id stringlengths 2 8 | text stringlengths 16 264k | dataset_id stringclasses 1 value |
|---|---|---|
6418051 | <reponame>QazmoQwerty/simple-settings-daemon<gh_stars>0
from typing import Set, Dict
from libssettings.utils import is_integer
from libssettings.exceptions import SSettingsError
class InvalidSettingValueError(SSettingsError):
pass
class Rule:
def validate(self, value: str) -> None:
raise NotImplementedError
class OptionsRule(Rule):
def __init__(self, options: Set[str]) -> None:
self._options = options
def validate(self, value: str) -> None:
if value not in self._options:
raise InvalidSettingValueError(f'Invalid value {repr(value)}, must be one of: {repr(self._options)}')
class IntegerRule(Rule):
def validate(self, value: str) -> None:
if not is_integer(value):
raise InvalidSettingValueError(f'Invalid value {repr(value)}, must be an integer')
class PositiveIntegerRule(Rule):
def validate(self, value: str) -> None:
if not (is_integer(value) and int(value) >= 0):
raise InvalidSettingValueError(f'Invalid value {repr(value)}, must be a positive integer')
class NegativeIntegerRule(Rule):
def validate(self, value: str) -> None:
if not (is_integer(value) and int(value) <= 0):
raise InvalidSettingValueError(f'Invalid value {repr(value)}, must be a negative integer')
class Rules:
_rules: Dict[str, Rule]
def __init__(self) -> None:
self._rules = dict()
def set(self, key: str, rule: Rule) -> None:
self._rules[key] = rule
def reset(self, key: str) -> None:
self._rules.pop(key, None)
def validate(self, key: str, value: str) -> None:
rule = self._rules.get(key)
if rule is not None:
rule.validate(value)
| StarcoderdataPython |
4981955 | <gh_stars>0
# -*- coding: utf-8 -*-
# cython: language_level=3
# BSD 3-Clause License
#
# Copyright (c) 2020-2022, <NAME>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# pyright: reportIncompatibleMethodOverride=none
# pyright: reportUnknownMemberType=none
# pyright: reportPrivateUsage=none
# This leads to too many false-positives around mocks.
import typing
from unittest import mock
import hikari
import pytest
import tanjun
def test_as_message_menu():
mock_callback = mock.Mock()
command = tanjun.as_message_menu("eat", always_defer=True, default_to_ephemeral=False, is_global=False)(
mock_callback
)
assert isinstance(command, tanjun.MenuCommand)
assert command.type is hikari.CommandType.MESSAGE
assert command.name == "eat"
assert command._always_defer is True
assert command.defaults_to_ephemeral is False
assert command.is_global is False
assert command.callback is mock_callback
assert command._wrapped_command is None
def test_as_message_menu_with_defaults():
mock_callback = mock.Mock()
command = tanjun.as_message_menu("yeet")(mock_callback)
assert isinstance(command, tanjun.MenuCommand)
assert command.type is hikari.CommandType.MESSAGE
assert command.name == "yeet"
assert command._always_defer is False
assert command.defaults_to_ephemeral is None
assert command.is_global is True
assert command.callback is mock_callback
assert command._wrapped_command is None
@pytest.mark.parametrize(
"other_command",
[
tanjun.SlashCommand[typing.Any](mock.Mock(), "e", "a"),
tanjun.MessageCommand[typing.Any](mock.Mock(), "b"),
tanjun.MenuCommand[typing.Any, typing.Any](mock.Mock(), hikari.CommandType.MESSAGE, "a"),
],
)
def test_as_message_menu_when_wrapping_command(
other_command: typing.Union[
tanjun.SlashCommand[typing.Any],
tanjun.MessageCommand[typing.Any],
tanjun.MenuCommand[typing.Any, typing.Any],
]
):
command = tanjun.as_message_menu("c")(other_command)
assert command.type is hikari.CommandType.MESSAGE
assert command.callback is other_command.callback
assert command._wrapped_command is other_command
assert isinstance(command, tanjun.MenuCommand)
def test_as_user_menu():
mock_callback = mock.Mock()
command = tanjun.as_user_menu("uoy", always_defer=True, default_to_ephemeral=False, is_global=False)(mock_callback)
assert isinstance(command, tanjun.MenuCommand)
assert command.type is hikari.CommandType.USER
assert command.name == "uoy"
assert command._always_defer is True
assert command.defaults_to_ephemeral is False
assert command.is_global is False
assert command.callback is mock_callback
assert command._wrapped_command is None
def test_as_user_menu_with_defaults():
mock_callback = mock.Mock()
command = tanjun.as_user_menu("you")(mock_callback)
assert isinstance(command, tanjun.MenuCommand)
assert command.type is hikari.CommandType.USER
assert command.name == "you"
assert command._always_defer is False
assert command.defaults_to_ephemeral is None
assert command.is_global is True
assert command.callback is mock_callback
assert command._wrapped_command is None
@pytest.mark.parametrize(
"other_command",
[
tanjun.SlashCommand[typing.Any](mock.Mock(), "e", "a"),
tanjun.MessageCommand[typing.Any](mock.Mock(), "b"),
tanjun.MenuCommand[typing.Any, typing.Any](mock.Mock(), hikari.CommandType.MESSAGE, "a"),
],
)
def test_as_user_menu_when_wrapping_command(
other_command: typing.Union[
tanjun.SlashCommand[typing.Any],
tanjun.MessageCommand[typing.Any],
tanjun.MenuCommand[typing.Any, typing.Any],
]
):
command = tanjun.as_user_menu("c")(other_command)
assert command.type is hikari.CommandType.USER
assert command.callback is other_command.callback
assert command._wrapped_command is other_command
assert isinstance(command, tanjun.MenuCommand)
class TestMenuCommand:
def test__init__when_name_too_long(self):
with pytest.raises(
ValueError,
match="Command name must be between 1-32 characters in length",
):
tanjun.commands.MenuCommand(mock.Mock(), hikari.CommandType.MESSAGE, "x" * 33)
def test__init__when_no_name(self):
with pytest.raises(
ValueError,
match="Command name must be between 1-32 characters in length",
):
tanjun.commands.MenuCommand(mock.Mock(), hikari.CommandType.MESSAGE, "")
@pytest.mark.parametrize(
"inner_command",
[
tanjun.SlashCommand[typing.Any](mock.Mock(), "a", "b"),
tanjun.MessageCommand[typing.Any](mock.Mock(), "a"),
tanjun.MenuCommand[typing.Any, typing.Any](mock.Mock(), hikari.CommandType.MESSAGE, "e"),
],
)
def test___init___when_command_object(
self,
inner_command: typing.Union[
tanjun.SlashCommand[tanjun.abc.CommandCallbackSig],
tanjun.MessageCommand[tanjun.abc.CommandCallbackSig],
tanjun.MenuCommand[typing.Any, typing.Any],
],
):
assert tanjun.MenuCommand(inner_command, hikari.CommandType.MESSAGE, "woow").callback is inner_command.callback
@pytest.mark.asyncio()
async def test_call_dunder_method(self):
mock_callback: typing.Any = mock.AsyncMock()
command = tanjun.MenuCommand(mock_callback, hikari.CommandType.MESSAGE, "a")
await command(123, 321, "ea", b=32)
mock_callback.assert_awaited_once_with(123, 321, "ea", b=32)
def test_callback_property(self):
mock_callback = mock.Mock()
command = tanjun.MenuCommand[typing.Any, typing.Any](mock_callback, hikari.CommandType.MESSAGE, "a")
assert command.callback is mock_callback
def test_defaults_to_ephemeral_property(self):
command = tanjun.MenuCommand[typing.Any, typing.Any](
mock.Mock(), hikari.CommandType.MESSAGE, "a", default_to_ephemeral=True
)
assert command.defaults_to_ephemeral is True
def test_defaults_to_ephemeral_property_when_unset(self):
command = tanjun.MenuCommand[typing.Any, typing.Any](mock.Mock(), hikari.CommandType.MESSAGE, "a")
assert command.defaults_to_ephemeral is None
def test_is_global_property(self):
command = tanjun.MenuCommand[typing.Any, typing.Any](
mock.Mock(), hikari.CommandType.MESSAGE, "a", is_global=False
)
assert command.is_global is False
def test_is_global_property_when_default(self):
command = tanjun.MenuCommand[typing.Any, typing.Any](mock.Mock(), hikari.CommandType.MESSAGE, "a")
assert command.is_global is True
def test_name_property(self):
command = tanjun.MenuCommand[typing.Any, typing.Any](mock.Mock(), hikari.CommandType.MESSAGE, "uwu")
assert command.name == "uwu"
def test_tracked_command_property(self):
command = tanjun.MenuCommand[typing.Any, typing.Any](mock.Mock(), hikari.CommandType.MESSAGE, "uwu")
assert command.tracked_command is None
def test_tracked_command_id_property(self):
command = tanjun.MenuCommand[typing.Any, typing.Any](mock.Mock(), hikari.CommandType.MESSAGE, "uwu")
assert command.tracked_command_id is None
@pytest.mark.parametrize("command_type", [hikari.CommandType.MESSAGE, hikari.CommandType.USER])
def test_type_property(self, command_type: hikari.CommandType):
command = tanjun.MenuCommand[typing.Any, typing.Any](mock.Mock(), command_type, "uwu") # type: ignore
assert command.type is command_type
def test_build(self):
command = tanjun.MenuCommand[typing.Any, typing.Any](mock.Mock(), hikari.CommandType.USER, "owo")
builder = command.build()
assert builder.name == "owo"
assert builder.type is hikari.CommandType.USER
assert builder.id is hikari.UNDEFINED
def test_build_when_all_fields_set(self):
command = tanjun.MenuCommand[typing.Any, typing.Any](mock.Mock(), hikari.CommandType.MESSAGE, "pat")
builder = command.build()
assert builder.name == "pat"
assert builder.type is hikari.CommandType.MESSAGE
assert builder.id is hikari.UNDEFINED
def test_set_tracked_command(self):
command = tanjun.MenuCommand[typing.Any, typing.Any](mock.Mock(), hikari.CommandType.MESSAGE, "pat")
mock_command = mock.Mock(hikari.ContextMenuCommand)
result = command.set_tracked_command(mock_command)
assert result is command
assert command.tracked_command is mock_command
assert command.tracked_command_id is mock_command.id
def test_set_ephemeral_default(self):
command = tanjun.MenuCommand[typing.Any, typing.Any](mock.Mock(), hikari.CommandType.MESSAGE, "pat")
result = command.set_ephemeral_default(True)
assert result is command
assert command.defaults_to_ephemeral is True
@pytest.mark.asyncio()
async def test_check_context(self):
mock_callback = mock.Mock()
mock_other_callback = mock.Mock()
mock_context = mock.Mock()
command = (
tanjun.MenuCommand[typing.Any, typing.Any](mock.Mock(), hikari.CommandType.USER, "pat")
.add_check(mock_callback)
.add_check(mock_other_callback)
)
with mock.patch.object(tanjun.utilities, "gather_checks", new=mock.AsyncMock()) as gather_checks:
result = await command.check_context(mock_context)
gather_checks.assert_awaited_once_with(mock_context, [mock_callback, mock_other_callback])
assert result is gather_checks.return_value
mock_context.set_command.assert_has_calls([mock.call(command), mock.call(None)])
@pytest.mark.skip(reason="TODO")
def test_copy(self):
...
@pytest.mark.skip(reason="TODO")
@pytest.mark.asyncio()
async def test_execute(self):
...
def test_load_into_component(self):
command = tanjun.MenuCommand[typing.Any, typing.Any](mock.Mock(), hikari.CommandType.MESSAGE, "pat")
mock_component = mock.Mock()
command.load_into_component(mock_component)
mock_component.add_menu_command.assert_called_once_with(command)
def test_load_into_component_when_wrapped_command(self):
mock_other_command = mock.Mock()
command = tanjun.MenuCommand[typing.Any, typing.Any](
mock.Mock(), hikari.CommandType.MESSAGE, "pat", _wrapped_command=mock_other_command
)
mock_component = mock.Mock()
command.load_into_component(mock_component)
mock_component.add_menu_command.assert_called_once_with(command)
mock_other_command.load_into_component.assert_not_called()
def test_load_into_component_when_wrapped_command_is_loader(self):
mock_other_command = mock.Mock(tanjun.components.AbstractComponentLoader)
command = tanjun.MenuCommand[typing.Any, typing.Any](
mock.Mock(), hikari.CommandType.MESSAGE, "pat", _wrapped_command=mock_other_command
)
mock_component = mock.Mock()
command.load_into_component(mock_component)
mock_component.add_menu_command.assert_called_once_with(command)
mock_other_command.load_into_component.assert_called_once_with(mock_component)
| StarcoderdataPython |
188478 | <reponame>nkuhzx/VSG-IA<filename>vsgia_model/dataset/videotargetatt.py<gh_stars>0
import glob
import numpy as np
import pandas as pd
import os
import math
import h5py
from PIL import Image
import torch
import torch.nn.functional as F
from torch.utils.data.dataset import Dataset
from torch.utils.data import DataLoader
from torchvision import transforms
import torchvision.transforms.functional as TF
import matplotlib.pyplot as plt
from vsgia_model.utils import img_utils
from vsgia_model.models.utils.misc import nested_tensor_from_tensor_list
# from preprocess_tools.utils.utils import visualization_mask
import warnings
warnings.filterwarnings("ignore")
class VideoTargetAttLoader(object):
def __init__(self,opt):
self.train_gaze = VideoTargetAttDataset(opt.DATASET.train_anno, 'train', opt, show=False)
self.val_gaze = VideoTargetAttDataset(opt.DATASET.test_anno, 'test', opt, show=False)
self.train_loader=DataLoader(self.train_gaze,
batch_size=opt.DATASET.train_batch_size,
num_workers=opt.DATASET.load_workers,
shuffle=True,
collate_fn=collate_fn)
self.val_loader=DataLoader(self.val_gaze,
batch_size=opt.DATASET.test_batch_size,
num_workers=opt.DATASET.load_workers,
shuffle=False,
collate_fn=collate_fn)
#deal the videoAttTarget as image
class VideoTargetAttDataset(Dataset):
def __init__(self,csv_path,type,opt,show=False):
test=True if type=="test" else False
if test:
df=pd.read_csv(os.path.join(csv_path),index_col=False)
self.length=len(df)
self.graph_info=h5py.File(opt.DATASET.test_graph,'r')
else:
df=pd.read_csv(os.path.join(csv_path),index_col=False)
self.length=len(df)
self.graph_info = h5py.File(opt.DATASET.train_graph, 'r')
self.data_dir=opt.DATASET.root_dir
self.mask_dir=opt.DATASET.mask_dir
self.df=df
transform_list = []
transform_list.append(transforms.Resize((opt.TRAIN.input_size, opt.TRAIN.input_size)))
transform_list.append(transforms.ToTensor())
transform_list.append(transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]))
self.transform = transforms.Compose(transform_list)
self.test=test
self.input_size = opt.TRAIN.input_size
self.output_size = opt.TRAIN.output_size
self.imshow = show
def __getitem__(self, index):
# index=self.maplist[index]
row=self.df.loc[index]
sub_id=row['sub_id']
act_id=row['act_id']
show_index=row['show_index']
show_name=row['show_name']
frame_scope=row['frame_scope']
img_name=row['img_name']
x_min=row['x_min']
y_min=row['y_min']
x_max=row['x_max']
y_max=row['y_max']
gaze_x=row['gaze_x']
gaze_y=row['gaze_y']
if self.test:
pass
else:
b_x_min=row['bound_x_min']
b_y_min=row['bound_y_min']
b_x_max=row['bound_x_max']
b_y_max=row['bound_y_max']
# map to float
x_min,y_min,x_max,y_max=map(float,[x_min,y_min,x_max,y_max])
gaze_x,gaze_y=map(float,[gaze_x,gaze_y])
# print(sub_id,act_id,show_index,show_name,frame_scope,img_name)
# print(x_min,y_min,x_max,y_max,gaze_x,gaze_y)
img_path=os.path.join(self.data_dir,show_name,frame_scope,img_name)
mask_path=os.path.join(self.mask_dir,show_name,frame_scope,img_name.replace('jpg','npy'))
# load the image
img=Image.open(img_path)
img=img.convert('RGB')
width, height = img.size
# load the mask
maskimg=np.load(mask_path)
# move the target person to top
target_mask=np.expand_dims(maskimg[act_id,:,:],axis=0)
maskimg=np.delete(maskimg,obj=act_id,axis=0)
maskimg=np.insert(maskimg,obj=0,values=target_mask,axis=0)
del_ids=np.where(maskimg[:,0,0]<0)[0]
maskimg=np.delete(maskimg,obj=del_ids,axis=0)
if maskimg.shape[0]==1:
all_sense_mask=1-maskimg
maskimg=np.concatenate([maskimg,all_sense_mask],axis=0)
# get the nodenum visual feat and spatial feat
node_num=self.graph_info[str(index)]["node_num"][()]
visual_feat=self.graph_info[str(index)]["visual_feature"]
spatial_feat=self.graph_info[str(index)]["spatial_feature"]
spatial_feat=np.array(spatial_feat)
# gaze inside or not
if gaze_x==-1 and gaze_y==-1:
gaze_inside=False
else:
if gaze_x<0: gaze_x=0
if gaze_y<0: gaze_y=0
gaze_inside=True
# gaze_pixel=[math.ceil(gaze_x*width),math.ceil(gaze_y*height)]
imsize = torch.IntTensor([width, height])
if self.imshow:
org_show=img.resize((224,224))
orgmask_show=maskimg
if self.test:
if gaze_inside:
gaze_x, gaze_y = gaze_x / width, gaze_y / height
else:
## data augumentation
# Jitter (expansion-only) bounding box size
if np.random.random_sample() <= 0.5:
k = np.random.random_sample() * 0.2
x_min -= k * abs(x_max - x_min)
y_min -= k * abs(y_max - y_min)
x_max += k * abs(x_max - x_min)
y_max += k * abs(y_max - y_min)
# Random Crop
if np.random.random_sample() <= 0.5:
# if True:
# Calculate the minimum valid range of the crop that doesn't exclude the face and the gaze target
if gaze_inside:
crop_x_min = np.min([gaze_x , x_min, x_max,b_x_min,b_x_max])
crop_y_min = np.min([gaze_y , y_min, y_max,b_y_min,b_y_max])
crop_x_max = np.max([gaze_x , x_min, x_max,b_x_min,b_x_max])
crop_y_max = np.max([gaze_y , y_min, y_max,b_y_min,b_y_max])
else:
crop_x_min = np.min([x_min, x_max, b_x_min, b_x_max])
crop_y_min = np.min([y_min, y_max, b_y_min, b_y_max])
crop_x_max = np.max([x_min, x_max, b_x_min, b_x_max])
crop_y_max = np.max([y_min, y_max, b_y_min, b_y_max])
# Randomly select a random top left corner
if crop_x_min >= 0:
crop_x_min = np.random.uniform(0, crop_x_min)
if crop_y_min >= 0:
crop_y_min = np.random.uniform(0, crop_y_min)
# Find the range of valid crop width and height starting from the (crop_x_min, crop_y_min)
crop_width_min = crop_x_max - crop_x_min
crop_height_min = crop_y_max - crop_y_min
crop_width_max = width - crop_x_min
crop_height_max = height - crop_y_min
# Randomly select a width and a height
crop_width = np.random.uniform(crop_width_min, crop_width_max)
crop_height = np.random.uniform(crop_height_min, crop_height_max)
# Crop it
img = TF.crop(img, crop_y_min, crop_x_min, crop_height, crop_width)
crop_list = [crop_y_min / height, (crop_y_min + crop_height) / height, crop_x_min / width,
(crop_x_min + crop_width) / width]
crop_list = np.clip(crop_list, 0, 1)
crop_list = np.array(crop_list) * maskimg.shape[1]
crop_list = crop_list.round().astype(int)
maskimg = maskimg[:, crop_list[0]:crop_list[1], crop_list[2]:crop_list[3]]
# Record the crop's (x, y) offset
offset_x, offset_y = crop_x_min, crop_y_min
# convert coordinates into the cropped frame
x_min, y_min, x_max, y_max = x_min - offset_x, y_min - offset_y, x_max - offset_x, y_max - offset_y
# normalize to [0,1]
if gaze_inside:
gaze_x, gaze_y = (gaze_x - offset_x) / float(crop_width), \
(gaze_y - offset_y) / float(crop_height)
gaze_x=np.clip(gaze_x,0,1)
gaze_y=np.clip(gaze_y,0,1)
# else:
# gaze_x = -1; gaze_y = -1
# convert the spatial feat to cropped frame
area_ratio = width * height / (float(crop_height) * float(crop_width))
spatial_feat[:, [0, 2, 5, 7, 14]] = (spatial_feat[:, [0, 2, 5, 7, 14]] * width - offset_x) / float(crop_width)
spatial_feat[:, [1, 3, 6, 8, 15]] = (spatial_feat[:, [1, 3, 6, 8, 15]] * height - offset_y) / float(crop_height)
spatial_feat[:, [4, 9]] = spatial_feat[:, [4, 9]] * area_ratio
width, height = crop_width, crop_height
else:
if gaze_inside:
gaze_x=gaze_x/width
gaze_y= gaze_y/height
# Random flip
if np.random.random_sample() <= 0.5:
# if True:
img = img.transpose(Image.FLIP_LEFT_RIGHT)
maskimg=np.flip(maskimg,axis=2)
# depthimg=depthimg.transpose(Image.FLIP_LEFT_RIGHT)
x_max_2 = width - x_min
x_min_2 = width - x_max
x_max = x_max_2
x_min = x_min_2
if gaze_inside:
gaze_x = 1 - gaze_x
# convert the spatial feat to filpped frame
spatial_feat[:,[0,2,5,7,14]]=1-spatial_feat[:,[0,2,5,7,14]]
# Random color change
if np.random.random_sample() <= 0.5:
img = TF.adjust_brightness(img, brightness_factor=np.random.uniform(0.5, 1.5))
img = TF.adjust_contrast(img, contrast_factor=np.random.uniform(0.5, 1.5))
img = TF.adjust_saturation(img, saturation_factor=np.random.uniform(0, 1.5))
head_channel = img_utils.get_head_box_channel(x_min, y_min, x_max, y_max, width, height,
resolution=self.input_size, coordconv=False).unsqueeze(0)
# Crop the face
face = img.crop((int(x_min), int(y_min), int(x_max), int(y_max)))
if self.imshow:
img_show=img
face_show=face
if self.transform is not None:
img = self.transform(img)
face = self.transform(face)
maskimg=maskimg.astype(np.float32)
maskimg=torch.from_numpy(maskimg)
maskimg=maskimg.unsqueeze(1)
if maskimg.shape[2]!=self.input_size or maskimg.shape[3]!=self.input_size:
maskimg=F.interpolate(maskimg,(self.input_size,self.input_size),mode="bilinear")
maskimg=maskimg.squeeze(1)
# generate the heat map used for deconv prediction
gaze_heatmap = torch.zeros(self.output_size, self.output_size)
if gaze_inside:
gaze_heatmap = img_utils.draw_labelmap(gaze_heatmap, [gaze_x * self.output_size, gaze_y * self.output_size],
3,
type='Gaussian')
maskimg=maskimg[1:,:,:]
if maskimg.shape[0]!=node_num-1:
print(maskimg.shape[0], node_num - 1)
print(maskimg,node_num)
all_data={}
# X train
all_data['img'] = img
all_data['maskimg'] = maskimg
all_data["face"] = face
all_data["headloc"] = head_channel
all_data["nodenum"] = node_num
all_data["visfeature"] = visual_feat
all_data["spafeature"] = spatial_feat
# Y label
all_data["gaze_heatmap"] = gaze_heatmap
all_data["gaze_inside"]=gaze_inside
cont_gaze=np.array([gaze_x,gaze_y])
cont_gaze = torch.FloatTensor(cont_gaze)
all_data["gaze_label"]=cont_gaze
all_data["imsize"] = imsize
return all_data
def __len__(self):
return self.length
def collate_fn(batch):
batch_data={}
batch_data["img"]=[]
batch_data["maskimg"]=[]
batch_data["headloc"]=[]
batch_data["face"]=[]
batch_data["node_num"]=[]
batch_data["vis_feat"]=[]
batch_data["spa_feat"]=[]
batch_data["gaze_heatmap"]=[]
batch_data["gaze_inside"]=[]
# batch_data["gaze_vector"]=[]
batch_data["gaze_label"]=[]
batch_data["img_size"]=[]
for data in batch:
batch_data["img"].append(data["img"])
batch_data["maskimg"].append(data["maskimg"])
batch_data["face"].append(data["face"])
batch_data["headloc"].append(data["headloc"])
batch_data["node_num"].append(data["nodenum"])
batch_data["vis_feat"].append(data["visfeature"])
batch_data["spa_feat"].append(data["spafeature"])
batch_data["gaze_heatmap"].append(data["gaze_heatmap"])
batch_data["gaze_inside"].append(data["gaze_inside"])
# batch_data["gaze_vector"].append(data["gaze_vector"])
batch_data["gaze_label"].append(data["gaze_label"])
batch_data["img_size"].append(data["imsize"])
# train data
batch_data["img"]=nested_tensor_from_tensor_list(batch_data["img"])
batch_data["face"]=nested_tensor_from_tensor_list(batch_data["face"])
batch_data["headloc"]=nested_tensor_from_tensor_list(batch_data["headloc"])
batch_data["maskimg"]=torch.cat(batch_data["maskimg"],dim=0)
batch_data['vis_feat'] = torch.FloatTensor(np.concatenate(batch_data['vis_feat'], axis=0))
batch_data['spa_feat'] = torch.FloatTensor(np.concatenate(batch_data['spa_feat'], axis=0))
# label data
batch_data["gaze_heatmap"]=torch.stack(batch_data["gaze_heatmap"],0)
batch_data["gaze_inside"] = torch.as_tensor(batch_data["gaze_inside"])
# batch_data["gaze_vector"] = torch.stack(batch_data["gaze_vector"], 0)
batch_data["gaze_label"] = torch.stack(batch_data["gaze_label"], 0)
batch_data["img_size"]=torch.stack(batch_data["img_size"],0)
return batch_data
| StarcoderdataPython |
1693707 | <gh_stars>1-10
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# Project panel.helloyuna.io
#
# Author: <NAME>
# <EMAIL>
# <EMAIL>
# http://ardz.xyz
#
# File Created: Thursday, 1st March 2018 1:56:43 pm
# Last Modified: Sunday, 4th March 2018 6:03:42 pm
# Modified By: <NAME> (<EMAIL>)
#
# Hand-crafted & Made with Love
# Copyright - 2017 Yuna & Co, https://helloyuna.io
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++
from __future__ import unicode_literals
from django.views.generic import TemplateView
from django.shortcuts import redirect
from django.contrib import messages
from django.contrib.auth.models import User
from libs.view import ProtectedMixin
from libs.datatable import Datatable
from ..forms import UserForm
class UserView(ProtectedMixin, TemplateView):
template_name = "superuser/user.html"
def get(self, request, *args, **kwargs):
if request.GET.get('draw', None) != None:
return self.datatable(request)
return self.render_to_response({})
def delete(self, request):
o_id = request.body.split("=")[1]
qs = User.objects.filter(id__exact = o_id).first()
qs.delete()
return self.render_to_response({})
def datatable(self, request):
qs = User.objects.all()
defer = ['id', 'username', 'first_name', 'last_name', 'date_joined']
d = Datatable(request, qs, defer, key="id")
return d.get_data()
class UserFormView(ProtectedMixin, TemplateView):
template_name = "superuser/user.form.html"
def get(self, request, *args, **kwargs):
edit = request.GET.get("edit")
if edit:
user = User.objects.get(id=edit)
form = UserForm(instance=user, initial={'groups':user.groups.all()})
else:
form = UserForm()
return self.render_to_response({"form":form})
def post(self, request):
edit = request.GET.get("edit")
if edit:
form = UserForm(request.POST, instance=User.objects.get(id=edit))
else:
form = UserForm(request.POST)
if form.is_valid():
user = form.save(commit=False)
if request.POST.get("password") and request.POST.get("password") != "":
user.set_password(request.POST.get("password"))
user.save()
user.groups = form.cleaned_data['groups']
user.save()
messages.success(request, 'User (%s) has been saved.' % user.username)
return redirect("superuser:user")
else:
return self.render_to_response({"form":form}) | StarcoderdataPython |
3403112 | <filename>src/commands/rcon.py
from src.commands.command import Command
class RconCommand(Command):
def __init__(self):
pass
def execute(self):
pass
| StarcoderdataPython |
6629009 | <gh_stars>0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Python bot for comment a list of urls in YouTube
import time
import numpy as np
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.keys import Keys
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.firefox.firefox_binary import FirefoxBinary
def youtube_login(email, password):
# Browser
binary = FirefoxBinary('C:\\Program Files (x86)\\Mozilla Firefox\\firefox.exe')
driver = webdriver.Firefox(firefox_binary=binary)
driver.get(
'https://accounts.google.com/ServiceLogin?hl=tr&continue=https%3A%2F%2Fwww.youtube.com%2Fsignin%3Fhl%3Den%26feature%3Dsign_in_button%26app%3Ddesktop%26action_handle_signin%3Dtrue%26next%3D%252F&uilel=3&passive=true&service=youtube#identifier')
# find email, send data and submit
EMAIL_FIELD = driver.find_element_by_id('identifierId')
EMAIL_FIELD.click()
EMAIL_FIELD.clear()
EMAIL_FIELD.send_keys(email)
EMAIL_FIELD.send_keys(Keys.ENTER)
time.sleep(3)
driver.find_element_by_class_name('CeoRYc').click()
time.sleep(4)
# find password, send data and submit
PASSWD_FIELD = driver.find_element_by_name('password')
PASSWD_FIELD.click()
PASSWD_FIELD.clear()
PASSWD_FIELD.send_keys(password)
PASSWD_FIELD.send_keys(Keys.ENTER)
time.sleep(3)
time.sleep(3)
return driver
def comment_page(driver, urls, comment):
# Check if there still urls
if len(urls) == 0:
print 'Youtube Comment Bot: Finished!'
return []
# Pop a URL from the array
url = urls.pop()
# Visite the page
driver.get(url)
driver.implicitly_wait(1)
# Is video avaliable (deleted,private) ?
if not check_exists_by_xpath(driver, '//*[@id="movie_player"]'):
return comment_page(driver, urls, random_comment())
# Scroll, wait for load comment box
driver.execute_script("window.scrollTo(0, 500);")
# Comments are disabled?
if check_exists_by_xpath(driver, '//*[@id="comments-disabled-message"]/div/span'):
return comment_page(driver, urls, random_comment())
# Lets wait for comment box
WebDriverWait(driver, 15).until(EC.presence_of_element_located((By.ID, "comment-section-renderer")))
# Activate box for comments
driver.find_element_by_class_name('comment-simplebox-renderer-collapsed-content').click()
# driver.find_element_by_xpath("//div[@id='comment-section-renderer']/div/div[2]/div").click()
# Send comment and post
driver.implicitly_wait(5)
driver.find_element_by_xpath('//*[@id="comment-simplebox"]/div[1]').send_keys(_convert(comment))
driver.find_element_by_xpath('//*[@id="comment-simplebox"]/div[1]').send_keys(Keys.ENTER + Keys.ENTER)
# Is post ready to be clicked? comment-simplebox-submit
driver.find_element_by_class_name('comment-simplebox-submit').click()
# Lets wait a bit
r = np.random.randint(2, 5)
time.sleep(r)
# Recursive
return comment_page(driver, urls, random_comment())
def _convert(param):
if isinstance(param, str):
return param.decode('utf-8')
else:
return param
def random_comment():
messages = [
'Müzik Caddesi Uyguluması müzik indirme ve dinleme programı telefonuza şarkı keyfi yaşatır. Google Play\'den indirebilir veya https://play.google.com/store/apps/details?id=com.muzikcaddesi.muzikcaddesi'
]
r = np.random.randint(0, len(messages))
return messages[r]
def check_exists_by_xpath(driver, xpath):
try:
driver.find_element_by_xpath(xpath)
except NoSuchElementException:
return False
return True
if __name__ == '__main__':
# Credentials
email = ''
password = ''
# List of Urls
# urls = [
# 'https://www.youtube.com/watch?v=qbrvM61MUAY',
# ]
urls = ['']
# You can add in a file and import from there
inp = open("urls.txt", "r")
for line in inp.readlines():
yeni_url = line.split()
for current_word in yeni_url:
urls.append(current_word)
# Login in youtube
driver = youtube_login(email, password)
# Random comment
comment_page(driver, urls, random_comment()) | StarcoderdataPython |
12848443 | <filename>tests/views/test_plot_configuration_dialog.py<gh_stars>0
# This file is part of spot_motion_monitor.
#
# Developed for LSST System Integration, Test and Commissioning.
#
# See the LICENSE file at the top-level directory of this distribution
# for details of code ownership.
#
# Use of this source code is governed by a 3-clause BSD-style
# license that can be found in the LICENSE file.
from PyQt5.QtWidgets import QDialogButtonBox
from spot_motion_monitor.utils import AutoscaleState
from spot_motion_monitor.views import PlotConfigurationDialog
class TestPlotConfigurationDialog:
def test_parametersAfterConstruction(self, qtbot):
pcDialog = PlotConfigurationDialog()
qtbot.addWidget(pcDialog)
pcDialog.show()
assert pcDialog.tabWidget.count() == 2
def test_setPlotConfiguration(self, qtbot, mocker):
pcDialog = PlotConfigurationDialog()
mockCentroidTabSetConfig = mocker.patch.object(pcDialog.centroidPlotConfigTab, 'setConfiguration')
mockPsdTabSetConfig = mocker.patch.object(pcDialog.psdPlotConfigTab, 'setConfiguration')
qtbot.addWidget(pcDialog)
pcDialog.show()
centroidConfig = {'xCentroid': {'autoscale': AutoscaleState.OFF.name, 'pixelAddition': None,
'minimum': 10, 'maximum': 1000},
'yCentroid': {'autoscale': AutoscaleState.ON.name, 'pixelAddition': None,
'minimum': None, 'maximum': None},
'scatterPlot': {'numHistogramBins': 50}}
psdConfig = {'waterfall': {'numBins': 15, 'colorMap': None},
'xPSD': {'autoscale': True},
'yPSD': {'autoscale': False, 'maximum': 1320.0}}
pcDialog.setPlotConfiguration(centroidConfig, psdConfig)
assert mockCentroidTabSetConfig.call_count == 1
assert mockPsdTabSetConfig.call_count == 1
def test_getPlotConfiguration(self, qtbot, mocker):
pcDialog = PlotConfigurationDialog()
mockCentroidTabGetConfig = mocker.patch.object(pcDialog.centroidPlotConfigTab, 'getConfiguration')
mockPsdTabGetConfig = mocker.patch.object(pcDialog.psdPlotConfigTab, 'getConfiguration')
qtbot.addWidget(pcDialog)
pcDialog.show()
centroidConfig, psdConfig = pcDialog.getPlotConfiguration()
assert mockCentroidTabGetConfig.call_count == 1
assert mockPsdTabGetConfig.call_count == 1
assert centroidConfig is not None
assert psdConfig is not None
def test_validInputFromTabs(self, qtbot):
pcDialog = PlotConfigurationDialog()
qtbot.addWidget(pcDialog)
pcDialog.show()
pcDialog.centroidPlotConfigTab.pixelAdditionXLineEdit.setText(str(-1))
assert pcDialog.buttonBox.button(QDialogButtonBox.Ok).isEnabled() is False
pcDialog.centroidPlotConfigTab.pixelAdditionXLineEdit.setText(str(10))
assert pcDialog.buttonBox.button(QDialogButtonBox.Ok).isEnabled()
pcDialog.psdPlotConfigTab.waterfallNumBinsLineEdit.setText(str(0))
assert pcDialog.buttonBox.button(QDialogButtonBox.Ok).isEnabled() is False
| StarcoderdataPython |
1684390 | <gh_stars>1-10
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 2.0.4
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
"""
This module defines internals of BLPAPI-Py and the following classes:
- CorrelationId: a key to track requests and subscriptions
Copyright 2012. Bloomberg Finance L.P.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions: The above
copyright notice and this permission notice shall be included in all copies
or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
"""
from sys import version_info
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_internals', [dirname(__file__)])
except ImportError:
import _internals
return _internals
if fp is not None:
try:
_mod = imp.load_module('_internals', fp, pathname, description)
finally:
fp.close()
return _mod
_internals = swig_import_helper()
del swig_import_helper
else:
import _internals
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
TOPICLIST_NOT_CREATED = _internals.TOPICLIST_NOT_CREATED
TOPICLIST_CREATED = _internals.TOPICLIST_CREATED
TOPICLIST_FAILURE = _internals.TOPICLIST_FAILURE
RESOLUTIONLIST_UNRESOLVED = _internals.RESOLUTIONLIST_UNRESOLVED
RESOLUTIONLIST_RESOLVED = _internals.RESOLUTIONLIST_RESOLVED
RESOLUTIONLIST_RESOLUTION_FAILURE_BAD_SERVICE = _internals.RESOLUTIONLIST_RESOLUTION_FAILURE_BAD_SERVICE
RESOLUTIONLIST_RESOLUTION_FAILURE_SERVICE_AUTHORIZATION_FAILED = _internals.RESOLUTIONLIST_RESOLUTION_FAILURE_SERVICE_AUTHORIZATION_FAILED
RESOLUTIONLIST_RESOLUTION_FAILURE_BAD_TOPIC = _internals.RESOLUTIONLIST_RESOLUTION_FAILURE_BAD_TOPIC
RESOLUTIONLIST_RESOLUTION_FAILURE_TOPIC_AUTHORIZATION_FAILED = _internals.RESOLUTIONLIST_RESOLUTION_FAILURE_TOPIC_AUTHORIZATION_FAILED
ELEMENTDEFINITION_UNBOUNDED = _internals.ELEMENTDEFINITION_UNBOUNDED
ELEMENT_INDEX_END = _internals.ELEMENT_INDEX_END
SERVICEREGISTRATIONOPTIONS_PRIORITY_MEDIUM = _internals.SERVICEREGISTRATIONOPTIONS_PRIORITY_MEDIUM
SERVICEREGISTRATIONOPTIONS_PRIORITY_HIGH = _internals.SERVICEREGISTRATIONOPTIONS_PRIORITY_HIGH
CORRELATION_TYPE_UNSET = _internals.CORRELATION_TYPE_UNSET
CORRELATION_TYPE_INT = _internals.CORRELATION_TYPE_INT
CORRELATION_TYPE_POINTER = _internals.CORRELATION_TYPE_POINTER
CORRELATION_TYPE_AUTOGEN = _internals.CORRELATION_TYPE_AUTOGEN
CORRELATION_MAX_CLASS_ID = _internals.CORRELATION_MAX_CLASS_ID
MANAGEDPTR_COPY = _internals.MANAGEDPTR_COPY
MANAGEDPTR_DESTROY = _internals.MANAGEDPTR_DESTROY
DATETIME_YEAR_PART = _internals.DATETIME_YEAR_PART
DATETIME_MONTH_PART = _internals.DATETIME_MONTH_PART
DATETIME_DAY_PART = _internals.DATETIME_DAY_PART
DATETIME_OFFSET_PART = _internals.DATETIME_OFFSET_PART
DATETIME_HOURS_PART = _internals.DATETIME_HOURS_PART
DATETIME_MINUTES_PART = _internals.DATETIME_MINUTES_PART
DATETIME_SECONDS_PART = _internals.DATETIME_SECONDS_PART
DATETIME_MILLISECONDS_PART = _internals.DATETIME_MILLISECONDS_PART
DATETIME_FRACSECONDS_PART = _internals.DATETIME_FRACSECONDS_PART
DATETIME_DATE_PART = _internals.DATETIME_DATE_PART
DATETIME_TIME_PART = _internals.DATETIME_TIME_PART
DATETIME_TIMEMILLI_PART = _internals.DATETIME_TIMEMILLI_PART
DATETIME_TIMEFRACSECONDS_PART = _internals.DATETIME_TIMEFRACSECONDS_PART
EVENTTYPE_ADMIN = _internals.EVENTTYPE_ADMIN
EVENTTYPE_SESSION_STATUS = _internals.EVENTTYPE_SESSION_STATUS
EVENTTYPE_SUBSCRIPTION_STATUS = _internals.EVENTTYPE_SUBSCRIPTION_STATUS
EVENTTYPE_REQUEST_STATUS = _internals.EVENTTYPE_REQUEST_STATUS
EVENTTYPE_RESPONSE = _internals.EVENTTYPE_RESPONSE
EVENTTYPE_PARTIAL_RESPONSE = _internals.EVENTTYPE_PARTIAL_RESPONSE
EVENTTYPE_SUBSCRIPTION_DATA = _internals.EVENTTYPE_SUBSCRIPTION_DATA
EVENTTYPE_SERVICE_STATUS = _internals.EVENTTYPE_SERVICE_STATUS
EVENTTYPE_TIMEOUT = _internals.EVENTTYPE_TIMEOUT
EVENTTYPE_AUTHORIZATION_STATUS = _internals.EVENTTYPE_AUTHORIZATION_STATUS
EVENTTYPE_RESOLUTION_STATUS = _internals.EVENTTYPE_RESOLUTION_STATUS
EVENTTYPE_TOPIC_STATUS = _internals.EVENTTYPE_TOPIC_STATUS
EVENTTYPE_TOKEN_STATUS = _internals.EVENTTYPE_TOKEN_STATUS
EVENTTYPE_REQUEST = _internals.EVENTTYPE_REQUEST
STATUS_ACTIVE = _internals.STATUS_ACTIVE
STATUS_DEPRECATED = _internals.STATUS_DEPRECATED
STATUS_INACTIVE = _internals.STATUS_INACTIVE
STATUS_PENDING_DEPRECATION = _internals.STATUS_PENDING_DEPRECATION
SUBSCRIPTIONSTATUS_UNSUBSCRIBED = _internals.SUBSCRIPTIONSTATUS_UNSUBSCRIBED
SUBSCRIPTIONSTATUS_SUBSCRIBING = _internals.SUBSCRIPTIONSTATUS_SUBSCRIBING
SUBSCRIPTIONSTATUS_SUBSCRIBED = _internals.SUBSCRIPTIONSTATUS_SUBSCRIBED
SUBSCRIPTIONSTATUS_CANCELLED = _internals.SUBSCRIPTIONSTATUS_CANCELLED
SUBSCRIPTIONSTATUS_PENDING_CANCELLATION = _internals.SUBSCRIPTIONSTATUS_PENDING_CANCELLATION
CLIENTMODE_AUTO = _internals.CLIENTMODE_AUTO
CLIENTMODE_DAPI = _internals.CLIENTMODE_DAPI
CLIENTMODE_SAPI = _internals.CLIENTMODE_SAPI
CLIENTMODE_COMPAT_33X = _internals.CLIENTMODE_COMPAT_33X
RESOLVEMODE_DONT_REGISTER_SERVICES = _internals.RESOLVEMODE_DONT_REGISTER_SERVICES
RESOLVEMODE_AUTO_REGISTER_SERVICES = _internals.RESOLVEMODE_AUTO_REGISTER_SERVICES
SEATTYPE_INVALID_SEAT = _internals.SEATTYPE_INVALID_SEAT
SEATTYPE_BPS = _internals.SEATTYPE_BPS
SEATTYPE_NONBPS = _internals.SEATTYPE_NONBPS
SERVICEREGISTRATIONOPTIONS_PRIORITY_LOW = _internals.SERVICEREGISTRATIONOPTIONS_PRIORITY_LOW
DATATYPE_BOOL = _internals.DATATYPE_BOOL
DATATYPE_CHAR = _internals.DATATYPE_CHAR
DATATYPE_BYTE = _internals.DATATYPE_BYTE
DATATYPE_INT32 = _internals.DATATYPE_INT32
DATATYPE_INT64 = _internals.DATATYPE_INT64
DATATYPE_FLOAT32 = _internals.DATATYPE_FLOAT32
DATATYPE_FLOAT64 = _internals.DATATYPE_FLOAT64
DATATYPE_STRING = _internals.DATATYPE_STRING
DATATYPE_BYTEARRAY = _internals.DATATYPE_BYTEARRAY
DATATYPE_DATE = _internals.DATATYPE_DATE
DATATYPE_TIME = _internals.DATATYPE_TIME
DATATYPE_DECIMAL = _internals.DATATYPE_DECIMAL
DATATYPE_DATETIME = _internals.DATATYPE_DATETIME
DATATYPE_ENUMERATION = _internals.DATATYPE_ENUMERATION
DATATYPE_SEQUENCE = _internals.DATATYPE_SEQUENCE
DATATYPE_CHOICE = _internals.DATATYPE_CHOICE
DATATYPE_CORRELATION_ID = _internals.DATATYPE_CORRELATION_ID
def blpapi_Service_printHelper(*args):
return _internals.blpapi_Service_printHelper(*args)
blpapi_Service_printHelper = _internals.blpapi_Service_printHelper
def blpapi_SchemaElementDefinition_printHelper(*args):
return _internals.blpapi_SchemaElementDefinition_printHelper(*args)
blpapi_SchemaElementDefinition_printHelper = _internals.blpapi_SchemaElementDefinition_printHelper
def blpapi_SchemaTypeDefinition_printHelper(*args):
return _internals.blpapi_SchemaTypeDefinition_printHelper(*args)
blpapi_SchemaTypeDefinition_printHelper = _internals.blpapi_SchemaTypeDefinition_printHelper
def blpapi_SchemaTypeDefinition_hasElementDefinition(*args):
return _internals.blpapi_SchemaTypeDefinition_hasElementDefinition(*args)
blpapi_SchemaTypeDefinition_hasElementDefinition = _internals.blpapi_SchemaTypeDefinition_hasElementDefinition
def blpapi_ConstantList_hasConstant(*args):
return _internals.blpapi_ConstantList_hasConstant(*args)
blpapi_ConstantList_hasConstant = _internals.blpapi_ConstantList_hasConstant
def blpapi_Service_hasEventDefinition(*args):
return _internals.blpapi_Service_hasEventDefinition(*args)
blpapi_Service_hasEventDefinition = _internals.blpapi_Service_hasEventDefinition
def blpapi_Service_hasOperation(*args):
return _internals.blpapi_Service_hasOperation(*args)
blpapi_Service_hasOperation = _internals.blpapi_Service_hasOperation
def blpapi_SubscriptionList_addHelper(*args):
return _internals.blpapi_SubscriptionList_addHelper(*args)
blpapi_SubscriptionList_addHelper = _internals.blpapi_SubscriptionList_addHelper
def blpapi_Name_hasName(*args):
return _internals.blpapi_Name_hasName(*args)
blpapi_Name_hasName = _internals.blpapi_Name_hasName
def blpapi_TopicList_createFromResolutionList(*args):
return _internals.blpapi_TopicList_createFromResolutionList(*args)
blpapi_TopicList_createFromResolutionList = _internals.blpapi_TopicList_createFromResolutionList
class intArray(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, intArray, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, intArray, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _internals.new_intArray(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _internals.delete_intArray
__del__ = lambda self : None;
def __getitem__(self, *args): return _internals.intArray___getitem__(self, *args)
def __setitem__(self, *args): return _internals.intArray___setitem__(self, *args)
def cast(self): return _internals.intArray_cast(self)
__swig_getmethods__["frompointer"] = lambda x: _internals.intArray_frompointer
if _newclass:frompointer = staticmethod(_internals.intArray_frompointer)
intArray_swigregister = _internals.intArray_swigregister
intArray_swigregister(intArray)
def intArray_frompointer(*args):
return _internals.intArray_frompointer(*args)
intArray_frompointer = _internals.intArray_frompointer
def CorrelationId_t_equals(*args):
return _internals.CorrelationId_t_equals(*args)
CorrelationId_t_equals = _internals.CorrelationId_t_equals
class CorrelationId(_object):
"""
A key used to identify individual subscriptions or requests.
CorrelationId([value[, classId=0]]) constructs a CorrelationId object.
If 'value' is integer (either int or long) then created CorrelationId will have
type() == CorrelationId.INT_TYPE. Otherwise it will have
type() == CorrelationId.OBJECT_TYPE. If no arguments are specified
then it will have type() == CorrelationId.UNSET_TYPE.
Two CorrelationIds are considered equal if they have the same
type() and:
- holds the same (not just equal!) objects in case of
type() == CorrelationId.OBJECT_TYPE
- holds equal integers in case of
type() == CorrelationId.INT_TYPE or
type() == CorrelationId.AUTOGEN_TYPE
- True otherwise
(i.e. in case of type() == CorrelationId.UNSET_TYPE)
It is possible that an user constructed CorrelationId and a
CorrelationId generated by the API could return the same
result for value(). However, they will not compare equal because
they have different type().
CorrelationId objects are passed to many of the Session object
methods which initiate an asynchronous operations and are
obtained from Message objects which are delivered as a result
of those asynchronous operations.
When subscribing or requesting information an application has
the choice of providing a CorrelationId they construct
themselves or allowing the session to construct one for
them. If the application supplies a CorrelationId it must not
re-use the value contained in it in another CorrelationId
whilst the original request or subscription is still active.
Class attributes:
Possible return values for type() method:
UNSET_TYPE The CorrelationId is unset. That is, it was created by
the default CorrelationId constructor.
INT_TYPE The CorrelationId was created from an integer (or long)
supplied by the user.
OBJECT_TYPE The CorrelationId was created from an object supplied by
the user.
AUTOGEN_TYPE The CorrelationId was created internally by API.
MAX_CLASS_ID The maximum value allowed for classId.
"""
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, CorrelationId, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, CorrelationId, name)
__repr__ = _swig_repr
UNSET_TYPE = _internals.CORRELATION_TYPE_UNSET
INT_TYPE = _internals.CORRELATION_TYPE_INT
OBJECT_TYPE = _internals.CORRELATION_TYPE_POINTER
AUTOGEN_TYPE = _internals.CORRELATION_TYPE_AUTOGEN
MAX_CLASS_ID = _internals.CORRELATION_MAX_CLASS_ID
__TYPE_NAMES = {
_internals.CORRELATION_TYPE_UNSET: "UNSET",
_internals.CORRELATION_TYPE_INT: "INTEGER",
_internals.CORRELATION_TYPE_POINTER: "OBJECT",
_internals.CORRELATION_TYPE_AUTOGEN: "AUTOGEN"
}
def __str__(self):
"""x.__str__() <==> str(x)"""
valueType = self.type()
valueTypeName = CorrelationId.__TYPE_NAMES[valueType]
if valueType == CorrelationId.UNSET_TYPE:
return valueTypeName
else:
return "({0}: {1!r}, ClassId: {2})".format(
valueTypeName, self.value(), self.classId())
def __hash__(self):
return hash((self.type(), self.classId(), self.__toInteger()))
def __eq__(self, other):
"""x.__eq__(y) <==> x==y"""
try:
return CorrelationId_t_equals(self, other)
except Exception:
return NotImplemented
def __ne__(self, other):
"""x.__ne__(y) <==> x!=y"""
equal = self.__eq__(other)
return NotImplemented if equal is NotImplemented else not equal
def value(self):
"""Return the value of this CorrelationId object. The return value
depends on this CorrelationId's value type and could be:
- integer (type() == CorrelationId.INT_TYPE
or type() == CorrelationId.AUTOGEN_TYPE)
- object (type() == CorrelationId.OBJECT_TYPE)
- None (type() == CorrelationId.UNSET_TYPE)
"""
valueType = self.type()
if valueType == CorrelationId.INT_TYPE \
or valueType == CorrelationId.AUTOGEN_TYPE:
return self.__asInteger()
elif valueType == CorrelationId.OBJECT_TYPE:
return self.__asObject()
else:
return None
def _handle(self):
return self
def __init__(self, *args):
this = _internals.new_CorrelationId(*args)
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _internals.delete_CorrelationId
__del__ = lambda self : None;
def type(self):
"""Return the type of this CorrelationId object (see xxx_TYPE class attributes)"""
return _internals.CorrelationId_type(self)
def classId(self):
"""Return the user defined classification of this CorrelationId object"""
return _internals.CorrelationId_classId(self)
def __asObject(self): return _internals.CorrelationId___asObject(self)
def __asInteger(self): return _internals.CorrelationId___asInteger(self)
def __toInteger(self): return _internals.CorrelationId___toInteger(self)
CorrelationId_swigregister = _internals.CorrelationId_swigregister
CorrelationId_swigregister(CorrelationId)
def blpapi_Element_setElementFloat(*args):
return _internals.blpapi_Element_setElementFloat(*args)
blpapi_Element_setElementFloat = _internals.blpapi_Element_setElementFloat
def blpapi_Element_setValueFloat(*args):
return _internals.blpapi_Element_setValueFloat(*args)
blpapi_Element_setValueFloat = _internals.blpapi_Element_setValueFloat
def blpapi_Element_printHelper(*args):
return _internals.blpapi_Element_printHelper(*args)
blpapi_Element_printHelper = _internals.blpapi_Element_printHelper
def blpapi_Element_name(*args):
return _internals.blpapi_Element_name(*args)
blpapi_Element_name = _internals.blpapi_Element_name
def blpapi_Element_nameString(*args):
return _internals.blpapi_Element_nameString(*args)
blpapi_Element_nameString = _internals.blpapi_Element_nameString
def blpapi_Element_definition(*args):
return _internals.blpapi_Element_definition(*args)
blpapi_Element_definition = _internals.blpapi_Element_definition
def blpapi_Element_datatype(*args):
return _internals.blpapi_Element_datatype(*args)
blpapi_Element_datatype = _internals.blpapi_Element_datatype
def blpapi_Element_isComplexType(*args):
return _internals.blpapi_Element_isComplexType(*args)
blpapi_Element_isComplexType = _internals.blpapi_Element_isComplexType
def blpapi_Element_isArray(*args):
return _internals.blpapi_Element_isArray(*args)
blpapi_Element_isArray = _internals.blpapi_Element_isArray
def blpapi_Element_isReadOnly(*args):
return _internals.blpapi_Element_isReadOnly(*args)
blpapi_Element_isReadOnly = _internals.blpapi_Element_isReadOnly
def blpapi_Element_numValues(*args):
return _internals.blpapi_Element_numValues(*args)
blpapi_Element_numValues = _internals.blpapi_Element_numValues
def blpapi_Element_numElements(*args):
return _internals.blpapi_Element_numElements(*args)
blpapi_Element_numElements = _internals.blpapi_Element_numElements
def blpapi_Element_isNullValue(*args):
return _internals.blpapi_Element_isNullValue(*args)
blpapi_Element_isNullValue = _internals.blpapi_Element_isNullValue
def blpapi_Element_isNull(*args):
return _internals.blpapi_Element_isNull(*args)
blpapi_Element_isNull = _internals.blpapi_Element_isNull
def blpapi_Element_getElementAt(*args):
return _internals.blpapi_Element_getElementAt(*args)
blpapi_Element_getElementAt = _internals.blpapi_Element_getElementAt
def blpapi_Element_getElement(*args):
return _internals.blpapi_Element_getElement(*args)
blpapi_Element_getElement = _internals.blpapi_Element_getElement
def blpapi_Element_hasElementEx(*args):
return _internals.blpapi_Element_hasElementEx(*args)
blpapi_Element_hasElementEx = _internals.blpapi_Element_hasElementEx
def blpapi_Element_getValueAsBool(*args):
return _internals.blpapi_Element_getValueAsBool(*args)
blpapi_Element_getValueAsBool = _internals.blpapi_Element_getValueAsBool
def blpapi_Element_getValueAsChar(*args):
return _internals.blpapi_Element_getValueAsChar(*args)
blpapi_Element_getValueAsChar = _internals.blpapi_Element_getValueAsChar
def blpapi_Element_getValueAsInt32(*args):
return _internals.blpapi_Element_getValueAsInt32(*args)
blpapi_Element_getValueAsInt32 = _internals.blpapi_Element_getValueAsInt32
def blpapi_Element_getValueAsInt64(*args):
return _internals.blpapi_Element_getValueAsInt64(*args)
blpapi_Element_getValueAsInt64 = _internals.blpapi_Element_getValueAsInt64
def blpapi_Element_getValueAsFloat64(*args):
return _internals.blpapi_Element_getValueAsFloat64(*args)
blpapi_Element_getValueAsFloat64 = _internals.blpapi_Element_getValueAsFloat64
def blpapi_Element_getValueAsString(*args):
return _internals.blpapi_Element_getValueAsString(*args)
blpapi_Element_getValueAsString = _internals.blpapi_Element_getValueAsString
def blpapi_Element_getValueAsDatetime(*args):
return _internals.blpapi_Element_getValueAsDatetime(*args)
blpapi_Element_getValueAsDatetime = _internals.blpapi_Element_getValueAsDatetime
def blpapi_Element_getValueAsHighPrecisionDatetime(*args):
return _internals.blpapi_Element_getValueAsHighPrecisionDatetime(*args)
blpapi_Element_getValueAsHighPrecisionDatetime = _internals.blpapi_Element_getValueAsHighPrecisionDatetime
def blpapi_Element_getValueAsElement(*args):
return _internals.blpapi_Element_getValueAsElement(*args)
blpapi_Element_getValueAsElement = _internals.blpapi_Element_getValueAsElement
def blpapi_Element_getValueAsName(*args):
return _internals.blpapi_Element_getValueAsName(*args)
blpapi_Element_getValueAsName = _internals.blpapi_Element_getValueAsName
def blpapi_Element_getChoice(*args):
return _internals.blpapi_Element_getChoice(*args)
blpapi_Element_getChoice = _internals.blpapi_Element_getChoice
def blpapi_Element_setValueBool(*args):
return _internals.blpapi_Element_setValueBool(*args)
blpapi_Element_setValueBool = _internals.blpapi_Element_setValueBool
def blpapi_Element_setValueInt32(*args):
return _internals.blpapi_Element_setValueInt32(*args)
blpapi_Element_setValueInt32 = _internals.blpapi_Element_setValueInt32
def blpapi_Element_setValueInt64(*args):
return _internals.blpapi_Element_setValueInt64(*args)
blpapi_Element_setValueInt64 = _internals.blpapi_Element_setValueInt64
def blpapi_Element_setValueString(*args):
return _internals.blpapi_Element_setValueString(*args)
blpapi_Element_setValueString = _internals.blpapi_Element_setValueString
def blpapi_Element_setValueDatetime(*args):
return _internals.blpapi_Element_setValueDatetime(*args)
blpapi_Element_setValueDatetime = _internals.blpapi_Element_setValueDatetime
def blpapi_Element_setValueHighPrecisionDatetime(*args):
return _internals.blpapi_Element_setValueHighPrecisionDatetime(*args)
blpapi_Element_setValueHighPrecisionDatetime = _internals.blpapi_Element_setValueHighPrecisionDatetime
def blpapi_Element_setValueFromName(*args):
return _internals.blpapi_Element_setValueFromName(*args)
blpapi_Element_setValueFromName = _internals.blpapi_Element_setValueFromName
def blpapi_Element_setElementBool(*args):
return _internals.blpapi_Element_setElementBool(*args)
blpapi_Element_setElementBool = _internals.blpapi_Element_setElementBool
def blpapi_Element_setElementInt32(*args):
return _internals.blpapi_Element_setElementInt32(*args)
blpapi_Element_setElementInt32 = _internals.blpapi_Element_setElementInt32
def blpapi_Element_setElementInt64(*args):
return _internals.blpapi_Element_setElementInt64(*args)
blpapi_Element_setElementInt64 = _internals.blpapi_Element_setElementInt64
def blpapi_Element_setElementString(*args):
return _internals.blpapi_Element_setElementString(*args)
blpapi_Element_setElementString = _internals.blpapi_Element_setElementString
def blpapi_Element_setElementDatetime(*args):
return _internals.blpapi_Element_setElementDatetime(*args)
blpapi_Element_setElementDatetime = _internals.blpapi_Element_setElementDatetime
def blpapi_Element_setElementHighPrecisionDatetime(*args):
return _internals.blpapi_Element_setElementHighPrecisionDatetime(*args)
blpapi_Element_setElementHighPrecisionDatetime = _internals.blpapi_Element_setElementHighPrecisionDatetime
def blpapi_Element_setElementFromName(*args):
return _internals.blpapi_Element_setElementFromName(*args)
blpapi_Element_setElementFromName = _internals.blpapi_Element_setElementFromName
def blpapi_Element_appendElement(*args):
return _internals.blpapi_Element_appendElement(*args)
blpapi_Element_appendElement = _internals.blpapi_Element_appendElement
def blpapi_Element_setChoice(*args):
return _internals.blpapi_Element_setChoice(*args)
blpapi_Element_setChoice = _internals.blpapi_Element_setChoice
def blpapi_EventFormatter_setValueFloat(*args):
return _internals.blpapi_EventFormatter_setValueFloat(*args)
blpapi_EventFormatter_setValueFloat = _internals.blpapi_EventFormatter_setValueFloat
def blpapi_EventFormatter_appendValueFloat(*args):
return _internals.blpapi_EventFormatter_appendValueFloat(*args)
blpapi_EventFormatter_appendValueFloat = _internals.blpapi_EventFormatter_appendValueFloat
def blpapi_EventFormatter_create(*args):
return _internals.blpapi_EventFormatter_create(*args)
blpapi_EventFormatter_create = _internals.blpapi_EventFormatter_create
def blpapi_EventFormatter_destroy(*args):
return _internals.blpapi_EventFormatter_destroy(*args)
blpapi_EventFormatter_destroy = _internals.blpapi_EventFormatter_destroy
def blpapi_EventFormatter_appendMessage(*args):
return _internals.blpapi_EventFormatter_appendMessage(*args)
blpapi_EventFormatter_appendMessage = _internals.blpapi_EventFormatter_appendMessage
def blpapi_EventFormatter_appendMessageSeq(*args):
return _internals.blpapi_EventFormatter_appendMessageSeq(*args)
blpapi_EventFormatter_appendMessageSeq = _internals.blpapi_EventFormatter_appendMessageSeq
def blpapi_EventFormatter_appendResponse(*args):
return _internals.blpapi_EventFormatter_appendResponse(*args)
blpapi_EventFormatter_appendResponse = _internals.blpapi_EventFormatter_appendResponse
def blpapi_EventFormatter_appendRecapMessage(*args):
return _internals.blpapi_EventFormatter_appendRecapMessage(*args)
blpapi_EventFormatter_appendRecapMessage = _internals.blpapi_EventFormatter_appendRecapMessage
def blpapi_EventFormatter_appendRecapMessageSeq(*args):
return _internals.blpapi_EventFormatter_appendRecapMessageSeq(*args)
blpapi_EventFormatter_appendRecapMessageSeq = _internals.blpapi_EventFormatter_appendRecapMessageSeq
def blpapi_EventFormatter_setValueBool(*args):
return _internals.blpapi_EventFormatter_setValueBool(*args)
blpapi_EventFormatter_setValueBool = _internals.blpapi_EventFormatter_setValueBool
def blpapi_EventFormatter_setValueChar(*args):
return _internals.blpapi_EventFormatter_setValueChar(*args)
blpapi_EventFormatter_setValueChar = _internals.blpapi_EventFormatter_setValueChar
def blpapi_EventFormatter_setValueInt32(*args):
return _internals.blpapi_EventFormatter_setValueInt32(*args)
blpapi_EventFormatter_setValueInt32 = _internals.blpapi_EventFormatter_setValueInt32
def blpapi_EventFormatter_setValueInt64(*args):
return _internals.blpapi_EventFormatter_setValueInt64(*args)
blpapi_EventFormatter_setValueInt64 = _internals.blpapi_EventFormatter_setValueInt64
def blpapi_EventFormatter_setValueDatetime(*args):
return _internals.blpapi_EventFormatter_setValueDatetime(*args)
blpapi_EventFormatter_setValueDatetime = _internals.blpapi_EventFormatter_setValueDatetime
def blpapi_EventFormatter_setValueString(*args):
return _internals.blpapi_EventFormatter_setValueString(*args)
blpapi_EventFormatter_setValueString = _internals.blpapi_EventFormatter_setValueString
def blpapi_EventFormatter_setValueFromName(*args):
return _internals.blpapi_EventFormatter_setValueFromName(*args)
blpapi_EventFormatter_setValueFromName = _internals.blpapi_EventFormatter_setValueFromName
def blpapi_EventFormatter_pushElement(*args):
return _internals.blpapi_EventFormatter_pushElement(*args)
blpapi_EventFormatter_pushElement = _internals.blpapi_EventFormatter_pushElement
def blpapi_EventFormatter_popElement(*args):
return _internals.blpapi_EventFormatter_popElement(*args)
blpapi_EventFormatter_popElement = _internals.blpapi_EventFormatter_popElement
def blpapi_EventFormatter_appendValueBool(*args):
return _internals.blpapi_EventFormatter_appendValueBool(*args)
blpapi_EventFormatter_appendValueBool = _internals.blpapi_EventFormatter_appendValueBool
def blpapi_EventFormatter_appendValueChar(*args):
return _internals.blpapi_EventFormatter_appendValueChar(*args)
blpapi_EventFormatter_appendValueChar = _internals.blpapi_EventFormatter_appendValueChar
def blpapi_EventFormatter_appendValueInt32(*args):
return _internals.blpapi_EventFormatter_appendValueInt32(*args)
blpapi_EventFormatter_appendValueInt32 = _internals.blpapi_EventFormatter_appendValueInt32
def blpapi_EventFormatter_appendValueInt64(*args):
return _internals.blpapi_EventFormatter_appendValueInt64(*args)
blpapi_EventFormatter_appendValueInt64 = _internals.blpapi_EventFormatter_appendValueInt64
def blpapi_EventFormatter_appendValueDatetime(*args):
return _internals.blpapi_EventFormatter_appendValueDatetime(*args)
blpapi_EventFormatter_appendValueDatetime = _internals.blpapi_EventFormatter_appendValueDatetime
def blpapi_EventFormatter_appendValueString(*args):
return _internals.blpapi_EventFormatter_appendValueString(*args)
blpapi_EventFormatter_appendValueString = _internals.blpapi_EventFormatter_appendValueString
def blpapi_EventFormatter_appendValueFromName(*args):
return _internals.blpapi_EventFormatter_appendValueFromName(*args)
blpapi_EventFormatter_appendValueFromName = _internals.blpapi_EventFormatter_appendValueFromName
def blpapi_EventFormatter_appendElement(*args):
return _internals.blpapi_EventFormatter_appendElement(*args)
blpapi_EventFormatter_appendElement = _internals.blpapi_EventFormatter_appendElement
def Session_createHelper(*args):
return _internals.Session_createHelper(*args)
Session_createHelper = _internals.Session_createHelper
def Session_destroyHelper(*args):
return _internals.Session_destroyHelper(*args)
Session_destroyHelper = _internals.Session_destroyHelper
def blpapi_EventDispatcher_create(*args):
return _internals.blpapi_EventDispatcher_create(*args)
blpapi_EventDispatcher_create = _internals.blpapi_EventDispatcher_create
def blpapi_EventDispatcher_destroy(*args):
return _internals.blpapi_EventDispatcher_destroy(*args)
blpapi_EventDispatcher_destroy = _internals.blpapi_EventDispatcher_destroy
def blpapi_EventDispatcher_start(*args):
return _internals.blpapi_EventDispatcher_start(*args)
blpapi_EventDispatcher_start = _internals.blpapi_EventDispatcher_start
def blpapi_EventDispatcher_stop(*args):
return _internals.blpapi_EventDispatcher_stop(*args)
blpapi_EventDispatcher_stop = _internals.blpapi_EventDispatcher_stop
def ProviderSession_createHelper(*args):
return _internals.ProviderSession_createHelper(*args)
ProviderSession_createHelper = _internals.ProviderSession_createHelper
def ProviderSession_destroyHelper(*args):
return _internals.ProviderSession_destroyHelper(*args)
ProviderSession_destroyHelper = _internals.ProviderSession_destroyHelper
UNKNOWN_CLASS = _internals.UNKNOWN_CLASS
INVALIDSTATE_CLASS = _internals.INVALIDSTATE_CLASS
INVALIDARG_CLASS = _internals.INVALIDARG_CLASS
IOERROR_CLASS = _internals.IOERROR_CLASS
CNVERROR_CLASS = _internals.CNVERROR_CLASS
BOUNDSERROR_CLASS = _internals.BOUNDSERROR_CLASS
NOTFOUND_CLASS = _internals.NOTFOUND_CLASS
FLDNOTFOUND_CLASS = _internals.FLDNOTFOUND_CLASS
UNSUPPORTED_CLASS = _internals.UNSUPPORTED_CLASS
ERROR_UNKNOWN = _internals.ERROR_UNKNOWN
ERROR_ILLEGAL_ARG = _internals.ERROR_ILLEGAL_ARG
ERROR_ILLEGAL_ACCESS = _internals.ERROR_ILLEGAL_ACCESS
ERROR_INVALID_SESSION = _internals.ERROR_INVALID_SESSION
ERROR_DUPLICATE_CORRELATIONID = _internals.ERROR_DUPLICATE_CORRELATIONID
ERROR_INTERNAL_ERROR = _internals.ERROR_INTERNAL_ERROR
ERROR_RESOLVE_FAILED = _internals.ERROR_RESOLVE_FAILED
ERROR_CONNECT_FAILED = _internals.ERROR_CONNECT_FAILED
ERROR_ILLEGAL_STATE = _internals.ERROR_ILLEGAL_STATE
ERROR_CODEC_FAILURE = _internals.ERROR_CODEC_FAILURE
ERROR_INDEX_OUT_OF_RANGE = _internals.ERROR_INDEX_OUT_OF_RANGE
ERROR_INVALID_CONVERSION = _internals.ERROR_INVALID_CONVERSION
ERROR_ITEM_NOT_FOUND = _internals.ERROR_ITEM_NOT_FOUND
ERROR_IO_ERROR = _internals.ERROR_IO_ERROR
ERROR_CORRELATION_NOT_FOUND = _internals.ERROR_CORRELATION_NOT_FOUND
ERROR_SERVICE_NOT_FOUND = _internals.ERROR_SERVICE_NOT_FOUND
ERROR_LOGON_LOOKUP_FAILED = _internals.ERROR_LOGON_LOOKUP_FAILED
ERROR_DS_LOOKUP_FAILED = _internals.ERROR_DS_LOOKUP_FAILED
ERROR_UNSUPPORTED_OPERATION = _internals.ERROR_UNSUPPORTED_OPERATION
ERROR_DS_PROPERTY_NOT_FOUND = _internals.ERROR_DS_PROPERTY_NOT_FOUND
def blpapi_getLastErrorDescription(*args):
return _internals.blpapi_getLastErrorDescription(*args)
blpapi_getLastErrorDescription = _internals.blpapi_getLastErrorDescription
def blpapi_SessionOptions_create():
return _internals.blpapi_SessionOptions_create()
blpapi_SessionOptions_create = _internals.blpapi_SessionOptions_create
def blpapi_SessionOptions_destroy(*args):
return _internals.blpapi_SessionOptions_destroy(*args)
blpapi_SessionOptions_destroy = _internals.blpapi_SessionOptions_destroy
def blpapi_SessionOptions_setServerHost(*args):
return _internals.blpapi_SessionOptions_setServerHost(*args)
blpapi_SessionOptions_setServerHost = _internals.blpapi_SessionOptions_setServerHost
def blpapi_SessionOptions_setServerPort(*args):
return _internals.blpapi_SessionOptions_setServerPort(*args)
blpapi_SessionOptions_setServerPort = _internals.blpapi_SessionOptions_setServerPort
def blpapi_SessionOptions_setServerAddress(*args):
return _internals.blpapi_SessionOptions_setServerAddress(*args)
blpapi_SessionOptions_setServerAddress = _internals.blpapi_SessionOptions_setServerAddress
def blpapi_SessionOptions_removeServerAddress(*args):
return _internals.blpapi_SessionOptions_removeServerAddress(*args)
blpapi_SessionOptions_removeServerAddress = _internals.blpapi_SessionOptions_removeServerAddress
def blpapi_SessionOptions_setConnectTimeout(*args):
return _internals.blpapi_SessionOptions_setConnectTimeout(*args)
blpapi_SessionOptions_setConnectTimeout = _internals.blpapi_SessionOptions_setConnectTimeout
def blpapi_SessionOptions_setDefaultServices(*args):
return _internals.blpapi_SessionOptions_setDefaultServices(*args)
blpapi_SessionOptions_setDefaultServices = _internals.blpapi_SessionOptions_setDefaultServices
def blpapi_SessionOptions_setDefaultSubscriptionService(*args):
return _internals.blpapi_SessionOptions_setDefaultSubscriptionService(*args)
blpapi_SessionOptions_setDefaultSubscriptionService = _internals.blpapi_SessionOptions_setDefaultSubscriptionService
def blpapi_SessionOptions_setDefaultTopicPrefix(*args):
return _internals.blpapi_SessionOptions_setDefaultTopicPrefix(*args)
blpapi_SessionOptions_setDefaultTopicPrefix = _internals.blpapi_SessionOptions_setDefaultTopicPrefix
def blpapi_SessionOptions_setAllowMultipleCorrelatorsPerMsg(*args):
return _internals.blpapi_SessionOptions_setAllowMultipleCorrelatorsPerMsg(*args)
blpapi_SessionOptions_setAllowMultipleCorrelatorsPerMsg = _internals.blpapi_SessionOptions_setAllowMultipleCorrelatorsPerMsg
def blpapi_SessionOptions_setClientMode(*args):
return _internals.blpapi_SessionOptions_setClientMode(*args)
blpapi_SessionOptions_setClientMode = _internals.blpapi_SessionOptions_setClientMode
def blpapi_SessionOptions_setMaxPendingRequests(*args):
return _internals.blpapi_SessionOptions_setMaxPendingRequests(*args)
blpapi_SessionOptions_setMaxPendingRequests = _internals.blpapi_SessionOptions_setMaxPendingRequests
def blpapi_SessionOptions_setAutoRestartOnDisconnection(*args):
return _internals.blpapi_SessionOptions_setAutoRestartOnDisconnection(*args)
blpapi_SessionOptions_setAutoRestartOnDisconnection = _internals.blpapi_SessionOptions_setAutoRestartOnDisconnection
def blpapi_SessionOptions_setAuthenticationOptions(*args):
return _internals.blpapi_SessionOptions_setAuthenticationOptions(*args)
blpapi_SessionOptions_setAuthenticationOptions = _internals.blpapi_SessionOptions_setAuthenticationOptions
def blpapi_SessionOptions_setNumStartAttempts(*args):
return _internals.blpapi_SessionOptions_setNumStartAttempts(*args)
blpapi_SessionOptions_setNumStartAttempts = _internals.blpapi_SessionOptions_setNumStartAttempts
def blpapi_SessionOptions_setDefaultKeepAliveInactivityTime(*args):
return _internals.blpapi_SessionOptions_setDefaultKeepAliveInactivityTime(*args)
blpapi_SessionOptions_setDefaultKeepAliveInactivityTime = _internals.blpapi_SessionOptions_setDefaultKeepAliveInactivityTime
def blpapi_SessionOptions_setDefaultKeepAliveResponseTimeout(*args):
return _internals.blpapi_SessionOptions_setDefaultKeepAliveResponseTimeout(*args)
blpapi_SessionOptions_setDefaultKeepAliveResponseTimeout = _internals.blpapi_SessionOptions_setDefaultKeepAliveResponseTimeout
def blpapi_SessionOptions_serverHost(*args):
return _internals.blpapi_SessionOptions_serverHost(*args)
blpapi_SessionOptions_serverHost = _internals.blpapi_SessionOptions_serverHost
def blpapi_SessionOptions_serverPort(*args):
return _internals.blpapi_SessionOptions_serverPort(*args)
blpapi_SessionOptions_serverPort = _internals.blpapi_SessionOptions_serverPort
def blpapi_SessionOptions_numServerAddresses(*args):
return _internals.blpapi_SessionOptions_numServerAddresses(*args)
blpapi_SessionOptions_numServerAddresses = _internals.blpapi_SessionOptions_numServerAddresses
def blpapi_SessionOptions_getServerAddress(*args):
return _internals.blpapi_SessionOptions_getServerAddress(*args)
blpapi_SessionOptions_getServerAddress = _internals.blpapi_SessionOptions_getServerAddress
def blpapi_SessionOptions_connectTimeout(*args):
return _internals.blpapi_SessionOptions_connectTimeout(*args)
blpapi_SessionOptions_connectTimeout = _internals.blpapi_SessionOptions_connectTimeout
def blpapi_SessionOptions_defaultServices(*args):
return _internals.blpapi_SessionOptions_defaultServices(*args)
blpapi_SessionOptions_defaultServices = _internals.blpapi_SessionOptions_defaultServices
def blpapi_SessionOptions_defaultSubscriptionService(*args):
return _internals.blpapi_SessionOptions_defaultSubscriptionService(*args)
blpapi_SessionOptions_defaultSubscriptionService = _internals.blpapi_SessionOptions_defaultSubscriptionService
def blpapi_SessionOptions_defaultTopicPrefix(*args):
return _internals.blpapi_SessionOptions_defaultTopicPrefix(*args)
blpapi_SessionOptions_defaultTopicPrefix = _internals.blpapi_SessionOptions_defaultTopicPrefix
def blpapi_SessionOptions_allowMultipleCorrelatorsPerMsg(*args):
return _internals.blpapi_SessionOptions_allowMultipleCorrelatorsPerMsg(*args)
blpapi_SessionOptions_allowMultipleCorrelatorsPerMsg = _internals.blpapi_SessionOptions_allowMultipleCorrelatorsPerMsg
def blpapi_SessionOptions_clientMode(*args):
return _internals.blpapi_SessionOptions_clientMode(*args)
blpapi_SessionOptions_clientMode = _internals.blpapi_SessionOptions_clientMode
def blpapi_SessionOptions_maxPendingRequests(*args):
return _internals.blpapi_SessionOptions_maxPendingRequests(*args)
blpapi_SessionOptions_maxPendingRequests = _internals.blpapi_SessionOptions_maxPendingRequests
def blpapi_SessionOptions_autoRestartOnDisconnection(*args):
return _internals.blpapi_SessionOptions_autoRestartOnDisconnection(*args)
blpapi_SessionOptions_autoRestartOnDisconnection = _internals.blpapi_SessionOptions_autoRestartOnDisconnection
def blpapi_SessionOptions_authenticationOptions(*args):
return _internals.blpapi_SessionOptions_authenticationOptions(*args)
blpapi_SessionOptions_authenticationOptions = _internals.blpapi_SessionOptions_authenticationOptions
def blpapi_SessionOptions_numStartAttempts(*args):
return _internals.blpapi_SessionOptions_numStartAttempts(*args)
blpapi_SessionOptions_numStartAttempts = _internals.blpapi_SessionOptions_numStartAttempts
def blpapi_SessionOptions_maxEventQueueSize(*args):
return _internals.blpapi_SessionOptions_maxEventQueueSize(*args)
blpapi_SessionOptions_maxEventQueueSize = _internals.blpapi_SessionOptions_maxEventQueueSize
def blpapi_SessionOptions_slowConsumerWarningHiWaterMark(*args):
return _internals.blpapi_SessionOptions_slowConsumerWarningHiWaterMark(*args)
blpapi_SessionOptions_slowConsumerWarningHiWaterMark = _internals.blpapi_SessionOptions_slowConsumerWarningHiWaterMark
def blpapi_SessionOptions_slowConsumerWarningLoWaterMark(*args):
return _internals.blpapi_SessionOptions_slowConsumerWarningLoWaterMark(*args)
blpapi_SessionOptions_slowConsumerWarningLoWaterMark = _internals.blpapi_SessionOptions_slowConsumerWarningLoWaterMark
def blpapi_SessionOptions_defaultKeepAliveInactivityTime(*args):
return _internals.blpapi_SessionOptions_defaultKeepAliveInactivityTime(*args)
blpapi_SessionOptions_defaultKeepAliveInactivityTime = _internals.blpapi_SessionOptions_defaultKeepAliveInactivityTime
def blpapi_SessionOptions_defaultKeepAliveResponseTimeout(*args):
return _internals.blpapi_SessionOptions_defaultKeepAliveResponseTimeout(*args)
blpapi_SessionOptions_defaultKeepAliveResponseTimeout = _internals.blpapi_SessionOptions_defaultKeepAliveResponseTimeout
def blpapi_Name_create(*args):
return _internals.blpapi_Name_create(*args)
blpapi_Name_create = _internals.blpapi_Name_create
def blpapi_Name_destroy(*args):
return _internals.blpapi_Name_destroy(*args)
blpapi_Name_destroy = _internals.blpapi_Name_destroy
def blpapi_Name_equalsStr(*args):
return _internals.blpapi_Name_equalsStr(*args)
blpapi_Name_equalsStr = _internals.blpapi_Name_equalsStr
def blpapi_Name_string(*args):
return _internals.blpapi_Name_string(*args)
blpapi_Name_string = _internals.blpapi_Name_string
def blpapi_Name_length(*args):
return _internals.blpapi_Name_length(*args)
blpapi_Name_length = _internals.blpapi_Name_length
def blpapi_Name_findName(*args):
return _internals.blpapi_Name_findName(*args)
blpapi_Name_findName = _internals.blpapi_Name_findName
def blpapi_SubscriptionList_create():
return _internals.blpapi_SubscriptionList_create()
blpapi_SubscriptionList_create = _internals.blpapi_SubscriptionList_create
def blpapi_SubscriptionList_destroy(*args):
return _internals.blpapi_SubscriptionList_destroy(*args)
blpapi_SubscriptionList_destroy = _internals.blpapi_SubscriptionList_destroy
def blpapi_SubscriptionList_clear(*args):
return _internals.blpapi_SubscriptionList_clear(*args)
blpapi_SubscriptionList_clear = _internals.blpapi_SubscriptionList_clear
def blpapi_SubscriptionList_append(*args):
return _internals.blpapi_SubscriptionList_append(*args)
blpapi_SubscriptionList_append = _internals.blpapi_SubscriptionList_append
def blpapi_SubscriptionList_size(*args):
return _internals.blpapi_SubscriptionList_size(*args)
blpapi_SubscriptionList_size = _internals.blpapi_SubscriptionList_size
def blpapi_SubscriptionList_correlationIdAt(*args):
return _internals.blpapi_SubscriptionList_correlationIdAt(*args)
blpapi_SubscriptionList_correlationIdAt = _internals.blpapi_SubscriptionList_correlationIdAt
def blpapi_SubscriptionList_topicStringAt(*args):
return _internals.blpapi_SubscriptionList_topicStringAt(*args)
blpapi_SubscriptionList_topicStringAt = _internals.blpapi_SubscriptionList_topicStringAt
class blpapi_Datetime_tag(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, blpapi_Datetime_tag, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, blpapi_Datetime_tag, name)
__repr__ = _swig_repr
__swig_setmethods__["parts"] = _internals.blpapi_Datetime_tag_parts_set
__swig_getmethods__["parts"] = _internals.blpapi_Datetime_tag_parts_get
if _newclass:parts = _swig_property(_internals.blpapi_Datetime_tag_parts_get, _internals.blpapi_Datetime_tag_parts_set)
__swig_setmethods__["hours"] = _internals.blpapi_Datetime_tag_hours_set
__swig_getmethods__["hours"] = _internals.blpapi_Datetime_tag_hours_get
if _newclass:hours = _swig_property(_internals.blpapi_Datetime_tag_hours_get, _internals.blpapi_Datetime_tag_hours_set)
__swig_setmethods__["minutes"] = _internals.blpapi_Datetime_tag_minutes_set
__swig_getmethods__["minutes"] = _internals.blpapi_Datetime_tag_minutes_get
if _newclass:minutes = _swig_property(_internals.blpapi_Datetime_tag_minutes_get, _internals.blpapi_Datetime_tag_minutes_set)
__swig_setmethods__["seconds"] = _internals.blpapi_Datetime_tag_seconds_set
__swig_getmethods__["seconds"] = _internals.blpapi_Datetime_tag_seconds_get
if _newclass:seconds = _swig_property(_internals.blpapi_Datetime_tag_seconds_get, _internals.blpapi_Datetime_tag_seconds_set)
__swig_setmethods__["milliSeconds"] = _internals.blpapi_Datetime_tag_milliSeconds_set
__swig_getmethods__["milliSeconds"] = _internals.blpapi_Datetime_tag_milliSeconds_get
if _newclass:milliSeconds = _swig_property(_internals.blpapi_Datetime_tag_milliSeconds_get, _internals.blpapi_Datetime_tag_milliSeconds_set)
__swig_setmethods__["month"] = _internals.blpapi_Datetime_tag_month_set
__swig_getmethods__["month"] = _internals.blpapi_Datetime_tag_month_get
if _newclass:month = _swig_property(_internals.blpapi_Datetime_tag_month_get, _internals.blpapi_Datetime_tag_month_set)
__swig_setmethods__["day"] = _internals.blpapi_Datetime_tag_day_set
__swig_getmethods__["day"] = _internals.blpapi_Datetime_tag_day_get
if _newclass:day = _swig_property(_internals.blpapi_Datetime_tag_day_get, _internals.blpapi_Datetime_tag_day_set)
__swig_setmethods__["year"] = _internals.blpapi_Datetime_tag_year_set
__swig_getmethods__["year"] = _internals.blpapi_Datetime_tag_year_get
if _newclass:year = _swig_property(_internals.blpapi_Datetime_tag_year_get, _internals.blpapi_Datetime_tag_year_set)
__swig_setmethods__["offset"] = _internals.blpapi_Datetime_tag_offset_set
__swig_getmethods__["offset"] = _internals.blpapi_Datetime_tag_offset_get
if _newclass:offset = _swig_property(_internals.blpapi_Datetime_tag_offset_get, _internals.blpapi_Datetime_tag_offset_set)
def __init__(self):
this = _internals.new_blpapi_Datetime_tag()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _internals.delete_blpapi_Datetime_tag
__del__ = lambda self : None;
blpapi_Datetime_tag_swigregister = _internals.blpapi_Datetime_tag_swigregister
blpapi_Datetime_tag_swigregister(blpapi_Datetime_tag)
class blpapi_HighPrecisionDatetime_tag(_object):
__swig_setmethods__ = {}
__setattr__ = lambda self, name, value: _swig_setattr(self, blpapi_HighPrecisionDatetime_tag, name, value)
__swig_getmethods__ = {}
__getattr__ = lambda self, name: _swig_getattr(self, blpapi_HighPrecisionDatetime_tag, name)
__repr__ = _swig_repr
__swig_setmethods__["datetime"] = _internals.blpapi_HighPrecisionDatetime_tag_datetime_set
__swig_getmethods__["datetime"] = _internals.blpapi_HighPrecisionDatetime_tag_datetime_get
if _newclass:datetime = _swig_property(_internals.blpapi_HighPrecisionDatetime_tag_datetime_get, _internals.blpapi_HighPrecisionDatetime_tag_datetime_set)
__swig_setmethods__["picoseconds"] = _internals.blpapi_HighPrecisionDatetime_tag_picoseconds_set
__swig_getmethods__["picoseconds"] = _internals.blpapi_HighPrecisionDatetime_tag_picoseconds_get
if _newclass:picoseconds = _swig_property(_internals.blpapi_HighPrecisionDatetime_tag_picoseconds_get, _internals.blpapi_HighPrecisionDatetime_tag_picoseconds_set)
def __init__(self):
this = _internals.new_blpapi_HighPrecisionDatetime_tag()
try: self.this.append(this)
except: self.this = this
__swig_destroy__ = _internals.delete_blpapi_HighPrecisionDatetime_tag
__del__ = lambda self : None;
blpapi_HighPrecisionDatetime_tag_swigregister = _internals.blpapi_HighPrecisionDatetime_tag_swigregister
blpapi_HighPrecisionDatetime_tag_swigregister(blpapi_HighPrecisionDatetime_tag)
def blpapi_HighPrecisionDatetime_compare(*args):
return _internals.blpapi_HighPrecisionDatetime_compare(*args)
blpapi_HighPrecisionDatetime_compare = _internals.blpapi_HighPrecisionDatetime_compare
def blpapi_HighPrecisionDatetime_print(*args):
return _internals.blpapi_HighPrecisionDatetime_print(*args)
blpapi_HighPrecisionDatetime_print = _internals.blpapi_HighPrecisionDatetime_print
def blpapi_Constant_name(*args):
return _internals.blpapi_Constant_name(*args)
blpapi_Constant_name = _internals.blpapi_Constant_name
def blpapi_Constant_description(*args):
return _internals.blpapi_Constant_description(*args)
blpapi_Constant_description = _internals.blpapi_Constant_description
def blpapi_Constant_status(*args):
return _internals.blpapi_Constant_status(*args)
blpapi_Constant_status = _internals.blpapi_Constant_status
def blpapi_Constant_datatype(*args):
return _internals.blpapi_Constant_datatype(*args)
blpapi_Constant_datatype = _internals.blpapi_Constant_datatype
def blpapi_Constant_getValueAsInt64(*args):
return _internals.blpapi_Constant_getValueAsInt64(*args)
blpapi_Constant_getValueAsInt64 = _internals.blpapi_Constant_getValueAsInt64
def blpapi_Constant_getValueAsFloat64(*args):
return _internals.blpapi_Constant_getValueAsFloat64(*args)
blpapi_Constant_getValueAsFloat64 = _internals.blpapi_Constant_getValueAsFloat64
def blpapi_Constant_getValueAsDatetime(*args):
return _internals.blpapi_Constant_getValueAsDatetime(*args)
blpapi_Constant_getValueAsDatetime = _internals.blpapi_Constant_getValueAsDatetime
def blpapi_Constant_getValueAsString(*args):
return _internals.blpapi_Constant_getValueAsString(*args)
blpapi_Constant_getValueAsString = _internals.blpapi_Constant_getValueAsString
def blpapi_ConstantList_name(*args):
return _internals.blpapi_ConstantList_name(*args)
blpapi_ConstantList_name = _internals.blpapi_ConstantList_name
def blpapi_ConstantList_description(*args):
return _internals.blpapi_ConstantList_description(*args)
blpapi_ConstantList_description = _internals.blpapi_ConstantList_description
def blpapi_ConstantList_numConstants(*args):
return _internals.blpapi_ConstantList_numConstants(*args)
blpapi_ConstantList_numConstants = _internals.blpapi_ConstantList_numConstants
def blpapi_ConstantList_datatype(*args):
return _internals.blpapi_ConstantList_datatype(*args)
blpapi_ConstantList_datatype = _internals.blpapi_ConstantList_datatype
def blpapi_ConstantList_status(*args):
return _internals.blpapi_ConstantList_status(*args)
blpapi_ConstantList_status = _internals.blpapi_ConstantList_status
def blpapi_ConstantList_getConstant(*args):
return _internals.blpapi_ConstantList_getConstant(*args)
blpapi_ConstantList_getConstant = _internals.blpapi_ConstantList_getConstant
def blpapi_ConstantList_getConstantAt(*args):
return _internals.blpapi_ConstantList_getConstantAt(*args)
blpapi_ConstantList_getConstantAt = _internals.blpapi_ConstantList_getConstantAt
def blpapi_SchemaElementDefinition_name(*args):
return _internals.blpapi_SchemaElementDefinition_name(*args)
blpapi_SchemaElementDefinition_name = _internals.blpapi_SchemaElementDefinition_name
def blpapi_SchemaElementDefinition_description(*args):
return _internals.blpapi_SchemaElementDefinition_description(*args)
blpapi_SchemaElementDefinition_description = _internals.blpapi_SchemaElementDefinition_description
def blpapi_SchemaElementDefinition_status(*args):
return _internals.blpapi_SchemaElementDefinition_status(*args)
blpapi_SchemaElementDefinition_status = _internals.blpapi_SchemaElementDefinition_status
def blpapi_SchemaElementDefinition_type(*args):
return _internals.blpapi_SchemaElementDefinition_type(*args)
blpapi_SchemaElementDefinition_type = _internals.blpapi_SchemaElementDefinition_type
def blpapi_SchemaElementDefinition_numAlternateNames(*args):
return _internals.blpapi_SchemaElementDefinition_numAlternateNames(*args)
blpapi_SchemaElementDefinition_numAlternateNames = _internals.blpapi_SchemaElementDefinition_numAlternateNames
def blpapi_SchemaElementDefinition_getAlternateName(*args):
return _internals.blpapi_SchemaElementDefinition_getAlternateName(*args)
blpapi_SchemaElementDefinition_getAlternateName = _internals.blpapi_SchemaElementDefinition_getAlternateName
def blpapi_SchemaElementDefinition_minValues(*args):
return _internals.blpapi_SchemaElementDefinition_minValues(*args)
blpapi_SchemaElementDefinition_minValues = _internals.blpapi_SchemaElementDefinition_minValues
def blpapi_SchemaElementDefinition_maxValues(*args):
return _internals.blpapi_SchemaElementDefinition_maxValues(*args)
blpapi_SchemaElementDefinition_maxValues = _internals.blpapi_SchemaElementDefinition_maxValues
def blpapi_SchemaTypeDefinition_name(*args):
return _internals.blpapi_SchemaTypeDefinition_name(*args)
blpapi_SchemaTypeDefinition_name = _internals.blpapi_SchemaTypeDefinition_name
def blpapi_SchemaTypeDefinition_description(*args):
return _internals.blpapi_SchemaTypeDefinition_description(*args)
blpapi_SchemaTypeDefinition_description = _internals.blpapi_SchemaTypeDefinition_description
def blpapi_SchemaTypeDefinition_status(*args):
return _internals.blpapi_SchemaTypeDefinition_status(*args)
blpapi_SchemaTypeDefinition_status = _internals.blpapi_SchemaTypeDefinition_status
def blpapi_SchemaTypeDefinition_datatype(*args):
return _internals.blpapi_SchemaTypeDefinition_datatype(*args)
blpapi_SchemaTypeDefinition_datatype = _internals.blpapi_SchemaTypeDefinition_datatype
def blpapi_SchemaTypeDefinition_isComplexType(*args):
return _internals.blpapi_SchemaTypeDefinition_isComplexType(*args)
blpapi_SchemaTypeDefinition_isComplexType = _internals.blpapi_SchemaTypeDefinition_isComplexType
def blpapi_SchemaTypeDefinition_isSimpleType(*args):
return _internals.blpapi_SchemaTypeDefinition_isSimpleType(*args)
blpapi_SchemaTypeDefinition_isSimpleType = _internals.blpapi_SchemaTypeDefinition_isSimpleType
def blpapi_SchemaTypeDefinition_isEnumerationType(*args):
return _internals.blpapi_SchemaTypeDefinition_isEnumerationType(*args)
blpapi_SchemaTypeDefinition_isEnumerationType = _internals.blpapi_SchemaTypeDefinition_isEnumerationType
def blpapi_SchemaTypeDefinition_numElementDefinitions(*args):
return _internals.blpapi_SchemaTypeDefinition_numElementDefinitions(*args)
blpapi_SchemaTypeDefinition_numElementDefinitions = _internals.blpapi_SchemaTypeDefinition_numElementDefinitions
def blpapi_SchemaTypeDefinition_getElementDefinition(*args):
return _internals.blpapi_SchemaTypeDefinition_getElementDefinition(*args)
blpapi_SchemaTypeDefinition_getElementDefinition = _internals.blpapi_SchemaTypeDefinition_getElementDefinition
def blpapi_SchemaTypeDefinition_getElementDefinitionAt(*args):
return _internals.blpapi_SchemaTypeDefinition_getElementDefinitionAt(*args)
blpapi_SchemaTypeDefinition_getElementDefinitionAt = _internals.blpapi_SchemaTypeDefinition_getElementDefinitionAt
def blpapi_SchemaTypeDefinition_enumeration(*args):
return _internals.blpapi_SchemaTypeDefinition_enumeration(*args)
blpapi_SchemaTypeDefinition_enumeration = _internals.blpapi_SchemaTypeDefinition_enumeration
def blpapi_Request_destroy(*args):
return _internals.blpapi_Request_destroy(*args)
blpapi_Request_destroy = _internals.blpapi_Request_destroy
def blpapi_Request_elements(*args):
return _internals.blpapi_Request_elements(*args)
blpapi_Request_elements = _internals.blpapi_Request_elements
def blpapi_Request_setPreferredRoute(*args):
return _internals.blpapi_Request_setPreferredRoute(*args)
blpapi_Request_setPreferredRoute = _internals.blpapi_Request_setPreferredRoute
def blpapi_Operation_name(*args):
return _internals.blpapi_Operation_name(*args)
blpapi_Operation_name = _internals.blpapi_Operation_name
def blpapi_Operation_description(*args):
return _internals.blpapi_Operation_description(*args)
blpapi_Operation_description = _internals.blpapi_Operation_description
def blpapi_Operation_requestDefinition(*args):
return _internals.blpapi_Operation_requestDefinition(*args)
blpapi_Operation_requestDefinition = _internals.blpapi_Operation_requestDefinition
def blpapi_Operation_numResponseDefinitions(*args):
return _internals.blpapi_Operation_numResponseDefinitions(*args)
blpapi_Operation_numResponseDefinitions = _internals.blpapi_Operation_numResponseDefinitions
def blpapi_Operation_responseDefinition(*args):
return _internals.blpapi_Operation_responseDefinition(*args)
blpapi_Operation_responseDefinition = _internals.blpapi_Operation_responseDefinition
def blpapi_Service_name(*args):
return _internals.blpapi_Service_name(*args)
blpapi_Service_name = _internals.blpapi_Service_name
def blpapi_Service_description(*args):
return _internals.blpapi_Service_description(*args)
blpapi_Service_description = _internals.blpapi_Service_description
def blpapi_Service_numOperations(*args):
return _internals.blpapi_Service_numOperations(*args)
blpapi_Service_numOperations = _internals.blpapi_Service_numOperations
def blpapi_Service_numEventDefinitions(*args):
return _internals.blpapi_Service_numEventDefinitions(*args)
blpapi_Service_numEventDefinitions = _internals.blpapi_Service_numEventDefinitions
def blpapi_Service_addRef(*args):
return _internals.blpapi_Service_addRef(*args)
blpapi_Service_addRef = _internals.blpapi_Service_addRef
def blpapi_Service_release(*args):
return _internals.blpapi_Service_release(*args)
blpapi_Service_release = _internals.blpapi_Service_release
def blpapi_Service_authorizationServiceName(*args):
return _internals.blpapi_Service_authorizationServiceName(*args)
blpapi_Service_authorizationServiceName = _internals.blpapi_Service_authorizationServiceName
def blpapi_Service_getOperation(*args):
return _internals.blpapi_Service_getOperation(*args)
blpapi_Service_getOperation = _internals.blpapi_Service_getOperation
def blpapi_Service_getOperationAt(*args):
return _internals.blpapi_Service_getOperationAt(*args)
blpapi_Service_getOperationAt = _internals.blpapi_Service_getOperationAt
def blpapi_Service_getEventDefinition(*args):
return _internals.blpapi_Service_getEventDefinition(*args)
blpapi_Service_getEventDefinition = _internals.blpapi_Service_getEventDefinition
def blpapi_Service_getEventDefinitionAt(*args):
return _internals.blpapi_Service_getEventDefinitionAt(*args)
blpapi_Service_getEventDefinitionAt = _internals.blpapi_Service_getEventDefinitionAt
def blpapi_Service_createRequest(*args):
return _internals.blpapi_Service_createRequest(*args)
blpapi_Service_createRequest = _internals.blpapi_Service_createRequest
def blpapi_Service_createAuthorizationRequest(*args):
return _internals.blpapi_Service_createAuthorizationRequest(*args)
blpapi_Service_createAuthorizationRequest = _internals.blpapi_Service_createAuthorizationRequest
def blpapi_Service_createPublishEvent(*args):
return _internals.blpapi_Service_createPublishEvent(*args)
blpapi_Service_createPublishEvent = _internals.blpapi_Service_createPublishEvent
def blpapi_Service_createAdminEvent(*args):
return _internals.blpapi_Service_createAdminEvent(*args)
blpapi_Service_createAdminEvent = _internals.blpapi_Service_createAdminEvent
def blpapi_Service_createResponseEvent(*args):
return _internals.blpapi_Service_createResponseEvent(*args)
blpapi_Service_createResponseEvent = _internals.blpapi_Service_createResponseEvent
def blpapi_Message_messageType(*args):
return _internals.blpapi_Message_messageType(*args)
blpapi_Message_messageType = _internals.blpapi_Message_messageType
def blpapi_Message_topicName(*args):
return _internals.blpapi_Message_topicName(*args)
blpapi_Message_topicName = _internals.blpapi_Message_topicName
def blpapi_Message_service(*args):
return _internals.blpapi_Message_service(*args)
blpapi_Message_service = _internals.blpapi_Message_service
def blpapi_Message_numCorrelationIds(*args):
return _internals.blpapi_Message_numCorrelationIds(*args)
blpapi_Message_numCorrelationIds = _internals.blpapi_Message_numCorrelationIds
def blpapi_Message_correlationId(*args):
return _internals.blpapi_Message_correlationId(*args)
blpapi_Message_correlationId = _internals.blpapi_Message_correlationId
def blpapi_Message_elements(*args):
return _internals.blpapi_Message_elements(*args)
blpapi_Message_elements = _internals.blpapi_Message_elements
def blpapi_Message_fragmentType(*args):
return _internals.blpapi_Message_fragmentType(*args)
blpapi_Message_fragmentType = _internals.blpapi_Message_fragmentType
def blpapi_Message_addRef(*args):
return _internals.blpapi_Message_addRef(*args)
blpapi_Message_addRef = _internals.blpapi_Message_addRef
def blpapi_Message_release(*args):
return _internals.blpapi_Message_release(*args)
blpapi_Message_release = _internals.blpapi_Message_release
def blpapi_Event_eventType(*args):
return _internals.blpapi_Event_eventType(*args)
blpapi_Event_eventType = _internals.blpapi_Event_eventType
def blpapi_Event_release(*args):
return _internals.blpapi_Event_release(*args)
blpapi_Event_release = _internals.blpapi_Event_release
def blpapi_EventQueue_create():
return _internals.blpapi_EventQueue_create()
blpapi_EventQueue_create = _internals.blpapi_EventQueue_create
def blpapi_EventQueue_destroy(*args):
return _internals.blpapi_EventQueue_destroy(*args)
blpapi_EventQueue_destroy = _internals.blpapi_EventQueue_destroy
def blpapi_EventQueue_nextEvent(*args):
return _internals.blpapi_EventQueue_nextEvent(*args)
blpapi_EventQueue_nextEvent = _internals.blpapi_EventQueue_nextEvent
def blpapi_EventQueue_purge(*args):
return _internals.blpapi_EventQueue_purge(*args)
blpapi_EventQueue_purge = _internals.blpapi_EventQueue_purge
def blpapi_EventQueue_tryNextEvent(*args):
return _internals.blpapi_EventQueue_tryNextEvent(*args)
blpapi_EventQueue_tryNextEvent = _internals.blpapi_EventQueue_tryNextEvent
def blpapi_MessageIterator_create(*args):
return _internals.blpapi_MessageIterator_create(*args)
blpapi_MessageIterator_create = _internals.blpapi_MessageIterator_create
def blpapi_MessageIterator_destroy(*args):
return _internals.blpapi_MessageIterator_destroy(*args)
blpapi_MessageIterator_destroy = _internals.blpapi_MessageIterator_destroy
def blpapi_MessageIterator_next(*args):
return _internals.blpapi_MessageIterator_next(*args)
blpapi_MessageIterator_next = _internals.blpapi_MessageIterator_next
def blpapi_Identity_release(*args):
return _internals.blpapi_Identity_release(*args)
blpapi_Identity_release = _internals.blpapi_Identity_release
def blpapi_Identity_addRef(*args):
return _internals.blpapi_Identity_addRef(*args)
blpapi_Identity_addRef = _internals.blpapi_Identity_addRef
def blpapi_Identity_hasEntitlements(*args):
return _internals.blpapi_Identity_hasEntitlements(*args)
blpapi_Identity_hasEntitlements = _internals.blpapi_Identity_hasEntitlements
def blpapi_Identity_isAuthorized(*args):
return _internals.blpapi_Identity_isAuthorized(*args)
blpapi_Identity_isAuthorized = _internals.blpapi_Identity_isAuthorized
def blpapi_Identity_getSeatType(*args):
return _internals.blpapi_Identity_getSeatType(*args)
blpapi_Identity_getSeatType = _internals.blpapi_Identity_getSeatType
def blpapi_AbstractSession_cancel(*args):
return _internals.blpapi_AbstractSession_cancel(*args)
blpapi_AbstractSession_cancel = _internals.blpapi_AbstractSession_cancel
def blpapi_AbstractSession_sendAuthorizationRequest(*args):
return _internals.blpapi_AbstractSession_sendAuthorizationRequest(*args)
blpapi_AbstractSession_sendAuthorizationRequest = _internals.blpapi_AbstractSession_sendAuthorizationRequest
def blpapi_AbstractSession_openService(*args):
return _internals.blpapi_AbstractSession_openService(*args)
blpapi_AbstractSession_openService = _internals.blpapi_AbstractSession_openService
def blpapi_AbstractSession_openServiceAsync(*args):
return _internals.blpapi_AbstractSession_openServiceAsync(*args)
blpapi_AbstractSession_openServiceAsync = _internals.blpapi_AbstractSession_openServiceAsync
def blpapi_AbstractSession_generateToken(*args):
return _internals.blpapi_AbstractSession_generateToken(*args)
blpapi_AbstractSession_generateToken = _internals.blpapi_AbstractSession_generateToken
def blpapi_AbstractSession_getService(*args):
return _internals.blpapi_AbstractSession_getService(*args)
blpapi_AbstractSession_getService = _internals.blpapi_AbstractSession_getService
def blpapi_AbstractSession_createIdentity(*args):
return _internals.blpapi_AbstractSession_createIdentity(*args)
blpapi_AbstractSession_createIdentity = _internals.blpapi_AbstractSession_createIdentity
def blpapi_Session_start(*args):
return _internals.blpapi_Session_start(*args)
blpapi_Session_start = _internals.blpapi_Session_start
def blpapi_Session_startAsync(*args):
return _internals.blpapi_Session_startAsync(*args)
blpapi_Session_startAsync = _internals.blpapi_Session_startAsync
def blpapi_Session_stop(*args):
return _internals.blpapi_Session_stop(*args)
blpapi_Session_stop = _internals.blpapi_Session_stop
def blpapi_Session_stopAsync(*args):
return _internals.blpapi_Session_stopAsync(*args)
blpapi_Session_stopAsync = _internals.blpapi_Session_stopAsync
def blpapi_Session_nextEvent(*args):
return _internals.blpapi_Session_nextEvent(*args)
blpapi_Session_nextEvent = _internals.blpapi_Session_nextEvent
def blpapi_Session_tryNextEvent(*args):
return _internals.blpapi_Session_tryNextEvent(*args)
blpapi_Session_tryNextEvent = _internals.blpapi_Session_tryNextEvent
def blpapi_Session_subscribe(*args):
return _internals.blpapi_Session_subscribe(*args)
blpapi_Session_subscribe = _internals.blpapi_Session_subscribe
def blpapi_Session_resubscribe(*args):
return _internals.blpapi_Session_resubscribe(*args)
blpapi_Session_resubscribe = _internals.blpapi_Session_resubscribe
def blpapi_Session_unsubscribe(*args):
return _internals.blpapi_Session_unsubscribe(*args)
blpapi_Session_unsubscribe = _internals.blpapi_Session_unsubscribe
def blpapi_Session_setStatusCorrelationId(*args):
return _internals.blpapi_Session_setStatusCorrelationId(*args)
blpapi_Session_setStatusCorrelationId = _internals.blpapi_Session_setStatusCorrelationId
def blpapi_Session_sendRequest(*args):
return _internals.blpapi_Session_sendRequest(*args)
blpapi_Session_sendRequest = _internals.blpapi_Session_sendRequest
def blpapi_Session_getAbstractSession(*args):
return _internals.blpapi_Session_getAbstractSession(*args)
blpapi_Session_getAbstractSession = _internals.blpapi_Session_getAbstractSession
def blpapi_ResolutionList_extractAttributeFromResolutionSuccess(*args):
return _internals.blpapi_ResolutionList_extractAttributeFromResolutionSuccess(*args)
blpapi_ResolutionList_extractAttributeFromResolutionSuccess = _internals.blpapi_ResolutionList_extractAttributeFromResolutionSuccess
def blpapi_ResolutionList_create(*args):
return _internals.blpapi_ResolutionList_create(*args)
blpapi_ResolutionList_create = _internals.blpapi_ResolutionList_create
def blpapi_ResolutionList_destroy(*args):
return _internals.blpapi_ResolutionList_destroy(*args)
blpapi_ResolutionList_destroy = _internals.blpapi_ResolutionList_destroy
def blpapi_ResolutionList_add(*args):
return _internals.blpapi_ResolutionList_add(*args)
blpapi_ResolutionList_add = _internals.blpapi_ResolutionList_add
def blpapi_ResolutionList_addFromMessage(*args):
return _internals.blpapi_ResolutionList_addFromMessage(*args)
blpapi_ResolutionList_addFromMessage = _internals.blpapi_ResolutionList_addFromMessage
def blpapi_ResolutionList_addAttribute(*args):
return _internals.blpapi_ResolutionList_addAttribute(*args)
blpapi_ResolutionList_addAttribute = _internals.blpapi_ResolutionList_addAttribute
def blpapi_ResolutionList_correlationIdAt(*args):
return _internals.blpapi_ResolutionList_correlationIdAt(*args)
blpapi_ResolutionList_correlationIdAt = _internals.blpapi_ResolutionList_correlationIdAt
def blpapi_ResolutionList_topicString(*args):
return _internals.blpapi_ResolutionList_topicString(*args)
blpapi_ResolutionList_topicString = _internals.blpapi_ResolutionList_topicString
def blpapi_ResolutionList_topicStringAt(*args):
return _internals.blpapi_ResolutionList_topicStringAt(*args)
blpapi_ResolutionList_topicStringAt = _internals.blpapi_ResolutionList_topicStringAt
def blpapi_ResolutionList_status(*args):
return _internals.blpapi_ResolutionList_status(*args)
blpapi_ResolutionList_status = _internals.blpapi_ResolutionList_status
def blpapi_ResolutionList_statusAt(*args):
return _internals.blpapi_ResolutionList_statusAt(*args)
blpapi_ResolutionList_statusAt = _internals.blpapi_ResolutionList_statusAt
def blpapi_ResolutionList_attribute(*args):
return _internals.blpapi_ResolutionList_attribute(*args)
blpapi_ResolutionList_attribute = _internals.blpapi_ResolutionList_attribute
def blpapi_ResolutionList_attributeAt(*args):
return _internals.blpapi_ResolutionList_attributeAt(*args)
blpapi_ResolutionList_attributeAt = _internals.blpapi_ResolutionList_attributeAt
def blpapi_ResolutionList_message(*args):
return _internals.blpapi_ResolutionList_message(*args)
blpapi_ResolutionList_message = _internals.blpapi_ResolutionList_message
def blpapi_ResolutionList_messageAt(*args):
return _internals.blpapi_ResolutionList_messageAt(*args)
blpapi_ResolutionList_messageAt = _internals.blpapi_ResolutionList_messageAt
def blpapi_ResolutionList_size(*args):
return _internals.blpapi_ResolutionList_size(*args)
blpapi_ResolutionList_size = _internals.blpapi_ResolutionList_size
def blpapi_Topic_create(*args):
return _internals.blpapi_Topic_create(*args)
blpapi_Topic_create = _internals.blpapi_Topic_create
def blpapi_Topic_destroy(*args):
return _internals.blpapi_Topic_destroy(*args)
blpapi_Topic_destroy = _internals.blpapi_Topic_destroy
def blpapi_Topic_compare(*args):
return _internals.blpapi_Topic_compare(*args)
blpapi_Topic_compare = _internals.blpapi_Topic_compare
def blpapi_Topic_service(*args):
return _internals.blpapi_Topic_service(*args)
blpapi_Topic_service = _internals.blpapi_Topic_service
def blpapi_Topic_isActive(*args):
return _internals.blpapi_Topic_isActive(*args)
blpapi_Topic_isActive = _internals.blpapi_Topic_isActive
def blpapi_TopicList_create(*args):
return _internals.blpapi_TopicList_create(*args)
blpapi_TopicList_create = _internals.blpapi_TopicList_create
def blpapi_TopicList_destroy(*args):
return _internals.blpapi_TopicList_destroy(*args)
blpapi_TopicList_destroy = _internals.blpapi_TopicList_destroy
def blpapi_TopicList_add(*args):
return _internals.blpapi_TopicList_add(*args)
blpapi_TopicList_add = _internals.blpapi_TopicList_add
def blpapi_TopicList_addFromMessage(*args):
return _internals.blpapi_TopicList_addFromMessage(*args)
blpapi_TopicList_addFromMessage = _internals.blpapi_TopicList_addFromMessage
def blpapi_TopicList_correlationIdAt(*args):
return _internals.blpapi_TopicList_correlationIdAt(*args)
blpapi_TopicList_correlationIdAt = _internals.blpapi_TopicList_correlationIdAt
def blpapi_TopicList_topicString(*args):
return _internals.blpapi_TopicList_topicString(*args)
blpapi_TopicList_topicString = _internals.blpapi_TopicList_topicString
def blpapi_TopicList_topicStringAt(*args):
return _internals.blpapi_TopicList_topicStringAt(*args)
blpapi_TopicList_topicStringAt = _internals.blpapi_TopicList_topicStringAt
def blpapi_TopicList_status(*args):
return _internals.blpapi_TopicList_status(*args)
blpapi_TopicList_status = _internals.blpapi_TopicList_status
def blpapi_TopicList_statusAt(*args):
return _internals.blpapi_TopicList_statusAt(*args)
blpapi_TopicList_statusAt = _internals.blpapi_TopicList_statusAt
def blpapi_TopicList_message(*args):
return _internals.blpapi_TopicList_message(*args)
blpapi_TopicList_message = _internals.blpapi_TopicList_message
def blpapi_TopicList_messageAt(*args):
return _internals.blpapi_TopicList_messageAt(*args)
blpapi_TopicList_messageAt = _internals.blpapi_TopicList_messageAt
def blpapi_TopicList_size(*args):
return _internals.blpapi_TopicList_size(*args)
blpapi_TopicList_size = _internals.blpapi_TopicList_size
def blpapi_ProviderSession_create(*args):
return _internals.blpapi_ProviderSession_create(*args)
blpapi_ProviderSession_create = _internals.blpapi_ProviderSession_create
def blpapi_ProviderSession_destroy(*args):
return _internals.blpapi_ProviderSession_destroy(*args)
blpapi_ProviderSession_destroy = _internals.blpapi_ProviderSession_destroy
def blpapi_ProviderSession_start(*args):
return _internals.blpapi_ProviderSession_start(*args)
blpapi_ProviderSession_start = _internals.blpapi_ProviderSession_start
def blpapi_ProviderSession_startAsync(*args):
return _internals.blpapi_ProviderSession_startAsync(*args)
blpapi_ProviderSession_startAsync = _internals.blpapi_ProviderSession_startAsync
def blpapi_ProviderSession_stop(*args):
return _internals.blpapi_ProviderSession_stop(*args)
blpapi_ProviderSession_stop = _internals.blpapi_ProviderSession_stop
def blpapi_ProviderSession_stopAsync(*args):
return _internals.blpapi_ProviderSession_stopAsync(*args)
blpapi_ProviderSession_stopAsync = _internals.blpapi_ProviderSession_stopAsync
def blpapi_ProviderSession_nextEvent(*args):
return _internals.blpapi_ProviderSession_nextEvent(*args)
blpapi_ProviderSession_nextEvent = _internals.blpapi_ProviderSession_nextEvent
def blpapi_ProviderSession_tryNextEvent(*args):
return _internals.blpapi_ProviderSession_tryNextEvent(*args)
blpapi_ProviderSession_tryNextEvent = _internals.blpapi_ProviderSession_tryNextEvent
def blpapi_ProviderSession_registerService(*args):
return _internals.blpapi_ProviderSession_registerService(*args)
blpapi_ProviderSession_registerService = _internals.blpapi_ProviderSession_registerService
def blpapi_ProviderSession_registerServiceAsync(*args):
return _internals.blpapi_ProviderSession_registerServiceAsync(*args)
blpapi_ProviderSession_registerServiceAsync = _internals.blpapi_ProviderSession_registerServiceAsync
def blpapi_ProviderSession_resolve(*args):
return _internals.blpapi_ProviderSession_resolve(*args)
blpapi_ProviderSession_resolve = _internals.blpapi_ProviderSession_resolve
def blpapi_ProviderSession_resolveAsync(*args):
return _internals.blpapi_ProviderSession_resolveAsync(*args)
blpapi_ProviderSession_resolveAsync = _internals.blpapi_ProviderSession_resolveAsync
def blpapi_ProviderSession_createTopics(*args):
return _internals.blpapi_ProviderSession_createTopics(*args)
blpapi_ProviderSession_createTopics = _internals.blpapi_ProviderSession_createTopics
def blpapi_ProviderSession_createTopicsAsync(*args):
return _internals.blpapi_ProviderSession_createTopicsAsync(*args)
blpapi_ProviderSession_createTopicsAsync = _internals.blpapi_ProviderSession_createTopicsAsync
def blpapi_ProviderSession_getTopic(*args):
return _internals.blpapi_ProviderSession_getTopic(*args)
blpapi_ProviderSession_getTopic = _internals.blpapi_ProviderSession_getTopic
def blpapi_ProviderSession_createTopic(*args):
return _internals.blpapi_ProviderSession_createTopic(*args)
blpapi_ProviderSession_createTopic = _internals.blpapi_ProviderSession_createTopic
def blpapi_ProviderSession_createServiceStatusTopic(*args):
return _internals.blpapi_ProviderSession_createServiceStatusTopic(*args)
blpapi_ProviderSession_createServiceStatusTopic = _internals.blpapi_ProviderSession_createServiceStatusTopic
def blpapi_ProviderSession_publish(*args):
return _internals.blpapi_ProviderSession_publish(*args)
blpapi_ProviderSession_publish = _internals.blpapi_ProviderSession_publish
def blpapi_ProviderSession_sendResponse(*args):
return _internals.blpapi_ProviderSession_sendResponse(*args)
blpapi_ProviderSession_sendResponse = _internals.blpapi_ProviderSession_sendResponse
def blpapi_ProviderSession_getAbstractSession(*args):
return _internals.blpapi_ProviderSession_getAbstractSession(*args)
blpapi_ProviderSession_getAbstractSession = _internals.blpapi_ProviderSession_getAbstractSession
def blpapi_ServiceRegistrationOptions_create():
return _internals.blpapi_ServiceRegistrationOptions_create()
blpapi_ServiceRegistrationOptions_create = _internals.blpapi_ServiceRegistrationOptions_create
def blpapi_ServiceRegistrationOptions_duplicate(*args):
return _internals.blpapi_ServiceRegistrationOptions_duplicate(*args)
blpapi_ServiceRegistrationOptions_duplicate = _internals.blpapi_ServiceRegistrationOptions_duplicate
def blpapi_ServiceRegistrationOptions_destroy(*args):
return _internals.blpapi_ServiceRegistrationOptions_destroy(*args)
blpapi_ServiceRegistrationOptions_destroy = _internals.blpapi_ServiceRegistrationOptions_destroy
def blpapi_ServiceRegistrationOptions_copy(*args):
return _internals.blpapi_ServiceRegistrationOptions_copy(*args)
blpapi_ServiceRegistrationOptions_copy = _internals.blpapi_ServiceRegistrationOptions_copy
def blpapi_ServiceRegistrationOptions_setGroupId(*args):
return _internals.blpapi_ServiceRegistrationOptions_setGroupId(*args)
blpapi_ServiceRegistrationOptions_setGroupId = _internals.blpapi_ServiceRegistrationOptions_setGroupId
def blpapi_ServiceRegistrationOptions_setServicePriority(*args):
return _internals.blpapi_ServiceRegistrationOptions_setServicePriority(*args)
blpapi_ServiceRegistrationOptions_setServicePriority = _internals.blpapi_ServiceRegistrationOptions_setServicePriority
def blpapi_ServiceRegistrationOptions_getGroupId(*args):
return _internals.blpapi_ServiceRegistrationOptions_getGroupId(*args)
blpapi_ServiceRegistrationOptions_getGroupId = _internals.blpapi_ServiceRegistrationOptions_getGroupId
def blpapi_ServiceRegistrationOptions_getServicePriority(*args):
return _internals.blpapi_ServiceRegistrationOptions_getServicePriority(*args)
blpapi_ServiceRegistrationOptions_getServicePriority = _internals.blpapi_ServiceRegistrationOptions_getServicePriority
# This file is compatible with both classic and new-style classes.
| StarcoderdataPython |
1694610 | #### standard provider code ####
# import the correct PROVIDER_SUBMODULE and PROVIDER_ID constants for your provider
from .constants import PROVIDER_ID
from ..constants import PROVIDER_SUBMODULE
# define common provider functions based on the constants
from ckan_cloud_operator.providers import manager as providers_manager
def _get_resource_name(suffix=None, short=False): return providers_manager.get_resource_name(PROVIDER_SUBMODULE, PROVIDER_ID, suffix=suffix, short=short)
def _get_resource_labels(for_deployment=False): return providers_manager.get_resource_labels(PROVIDER_SUBMODULE, PROVIDER_ID, for_deployment=for_deployment)
def _get_resource_annotations(suffix=None): return providers_manager.get_resource_annotations(PROVIDER_SUBMODULE, PROVIDER_ID, suffix=suffix)
def _set_provider(): providers_manager.set_provider(PROVIDER_SUBMODULE, PROVIDER_ID)
def _config_set(key=None, value=None, values=None, namespace=None, is_secret=False, suffix=None): providers_manager.config_set(PROVIDER_SUBMODULE, PROVIDER_ID, key=key, value=value, values=values, namespace=namespace, is_secret=is_secret, suffix=suffix)
def _config_get(key=None, default=None, required=False, namespace=None, is_secret=False, suffix=None): return providers_manager.config_get(PROVIDER_SUBMODULE, PROVIDER_ID, key=key, default=default, required=required, namespace=namespace, is_secret=is_secret, suffix=suffix)
def _config_interactive_set(default_values, namespace=None, is_secret=False, suffix=None, from_file=False): providers_manager.config_interactive_set(PROVIDER_SUBMODULE, PROVIDER_ID, default_values, namespace, is_secret, suffix, from_file)
################################
# custom provider code starts here
#
import tempfile
import traceback
import json
import datetime
import subprocess
import time
from ruamel import yaml
from ckan_cloud_operator import logs
from ckan_cloud_operator import kubectl
from ckan_cloud_operator.drivers.kubectl import rbac as kubectl_rbac_driver
from ckan_cloud_operator.routers import manager as routers_manager
from ckan_cloud_operator.crds import manager as crds_manager
from ckan_cloud_operator.providers.apps.constants import APP_CRD_SINGULAR
from ckan_cloud_operator.drivers.helm import driver as helm_driver
def create(tiller_namespace_name=None, chart_repo=None, chart_version=None, chart_release_name=None,
values=None, values_filename=None, with_service_account=False, chart_name=None,
chart_repo_name=None, app_type=None, values_json=None,
**create_kwargs):
if values_filename:
assert not values and not values_json
with open(values_filename) as f:
values = yaml.safe_load(f.read())
elif values:
assert not values_filename and not values_json
elif values_json:
assert not values_filename and not values
values = json.loads(values_json)
values = values or {}
spec = {
**({'tiller-namespace-name': tiller_namespace_name} if tiller_namespace_name else {}),
**({'chart-name': chart_name} if chart_name else {}),
**({'chart-repo': chart_repo} if chart_repo else {}),
**({'chart-repo-name': chart_repo_name} if chart_repo_name else {}),
**({'chart-version': chart_version} if chart_version else {}),
**({'chart-release-name': chart_release_name} if chart_release_name else {}),
**({'with-service-account': True} if with_service_account else {}),
**({'app-type': app_type} if app_type else {}),
**values,
}
assert 'values' not in create_kwargs
from ckan_cloud_operator.providers.apps import manager as apps_manager
return apps_manager.create('helm', values=spec, **create_kwargs)
def update(instance_id, instance, dry_run=False):
tiller_namespace_name = _get_tiller_namespace_name(instance_id, instance)
logs.debug('Updating helm-based instance deployment',
instance_id=instance_id, tiller_namespace_name=tiller_namespace_name)
chart_repo_name = instance['spec'].get("chart-repo-name")
assert chart_repo_name, 'missing spec attribute: chart-repo-name'
logs.info(chart_repo_name=chart_repo_name)
chart_repo = instance['spec'].get("chart-repo")
assert chart_repo or chart_repo_name in ['stable'], 'missing spec attribute: chart-repo'
logs.info(chart_repo=chart_repo)
chart_name = instance['spec'].get('chart-name')
assert chart_name, 'missing spec attribute: chart-name'
logs.info(chart_name=chart_name)
chart_version = instance['spec'].get("chart-version", "")
logs.info(chart_version=chart_version)
release_name = _get_helm_release_name(instance_id, instance)
logs.info(release_name=release_name,)
_pre_update_hook_modify_spec(instance_id, instance, lambda i: i.update(**{
'release-name': release_name,
'chart-version': chart_version,
'chart-name': chart_name,
'chart-repo': chart_repo,
'chart-repo-name': chart_repo_name,
}))
deploy_kwargs = dict(
values=instance['spec'].get('values', {}),
tiller_namespace_name=tiller_namespace_name,
chart_repo=chart_repo,
chart_version=chart_version,
chart_name=chart_name,
release_name=release_name,
instance_id=instance_id,
dry_run=dry_run,
chart_repo_name=chart_repo_name
)
app_type = instance['spec'].get('app-type')
if app_type:
_get_app_type_manager(app_type).pre_deploy_hook(instance_id, instance, deploy_kwargs)
_helm_deploy(**deploy_kwargs)
if app_type:
_get_app_type_manager(app_type).post_deploy_hook(instance_id, instance, deploy_kwargs)
def get(instance_id, instance=None):
res = {
'ready': None,
'helm_metadata': {
'ckan_instance_id': instance_id,
'namespace': instance_id,
'status_generated_at': datetime.datetime.now(),
'status_generated_from': subprocess.check_output(["hostname"]).decode().strip(),
}
}
app_type = instance['spec'].get('app-type')
if app_type:
_get_app_type_manager(app_type).get(instance_id, instance, res)
return res
def delete(instance_id, instance):
tiller_namespace_name = _get_tiller_namespace_name(instance_id, instance)
release_name = _get_helm_release_name(instance_id, instance)
logs.info(tiller_namespace_name=tiller_namespace_name, release_name=release_name)
errors = []
try:
logs.info(f'Deleting helm release {release_name}')
delete_kwargs=dict(tiller_namespace=tiller_namespace_name, release_name=release_name)
app_type = instance['spec'].get('app-type')
if app_type:
_get_app_type_manager(app_type).pre_delete_hook(instance_id, instance, delete_kwargs)
helm_driver.delete(**delete_kwargs)
if app_type:
_get_app_type_manager(app_type).post_delete_hook(instance_id, instance, delete_kwargs)
except Exception as e:
logs.warning(traceback.format_exc())
errors.append(f'Failed to delete helm release')
if kubectl.call(f'delete --wait=false namespace {instance_id}') != 0:
errors.append(f'Failed to delete namespace')
assert len(errors) == 0, ', '.join(errors)
def get_backend_url(instance_id, instance):
app_type = instance['spec'].get('app-type')
if app_type:
backend_url = instance['spec'].get('backend-url')
if backend_url:
try:
backend_url = backend_url.format(instance_id=instance_id)
except Exception:
backend_url = None
return _get_app_type_manager(app_type).get_backend_url(instance_id, instance, backend_url)
else:
return instance['spec'].get('backend-url').format(
instance_id=instance_id
)
def pre_update_hook(instance_id, instance, override_spec, skip_route=False, dry_run=False):
_init_namespace(instance_id, instance, dry_run=dry_run)
_pre_update_hook_override_spec(override_spec, instance)
res = {}
sub_domain, root_domain = _pre_update_hook_route(instance_id, skip_route, instance, res, dry_run=dry_run)
app_type = instance['spec'].get('app-type')
logs.info(app_type=app_type)
if app_type:
logs.info(f'Running {app_type} app pre_update_hook')
_get_app_type_manager(app_type).pre_update_hook(
instance_id, instance, res, sub_domain, root_domain,
lambda callback: _pre_update_hook_modify_spec(instance_id, instance, callback, dry_run=dry_run)
)
return res
def _init_namespace(instance_id, instance, dry_run=False):
logs.debug('Initializing helm-based instance deployment namespace', namespace=instance_id)
if kubectl.get('ns', instance_id, required=False):
logs.info(f'instance namespace already exists ({instance_id})')
else:
logs.info(f'creating instance namespace ({instance_id})')
kubectl.apply(kubectl.get_resource('v1', 'Namespace', instance_id, {}), dry_run=dry_run)
if instance['spec'].get('with-service-account'):
service_account_name = instance['spec'].get('service-account-name', f'ckan-{instance_id}-operator')
logs.info('Creating service account', service_account_name=service_account_name)
if not dry_run:
kubectl_rbac_driver.update_service_account(service_account_name, {}, namespace=instance_id)
role_name = f'{service_account_name}-role'
logs.debug('Creating role and binding to the service account', role_name=role_name)
if not dry_run:
rbac_rules = instance['spec'].get('service-account-rules', [
{
"apiGroups": [
"*"
],
"resources": [
'secrets', 'pods', 'pods/exec', 'pods/portforward'
],
"verbs": [
"list", "get", "create"
]
}
])
kubectl_rbac_driver.update_role(role_name, {}, rbac_rules, namespace=instance_id)
kubectl_rbac_driver.update_role_binding(
name=f'{service_account_name}-rolebinding',
role_name=f'{service_account_name}-role',
namespace=instance_id,
service_account_name=service_account_name,
labels={}
)
def _pre_update_hook_route(instance_id, skip_route, instance, res, dry_run=False):
if instance["spec"].get("skipRoute"):
sub_domain, root_domain = None, None
else:
root_domain = routers_manager.get_default_root_domain()
sub_domain = instance['spec'].get('sub-domain', f'ckan-cloud-app-{instance_id}')
if not skip_route:
# full domain to route to the instance
instance_domain = instance['spec'].get('domain')
if instance_domain and instance_domain != f'{sub_domain}.{root_domain}':
logs.warning(f'instance domain was changed from {instance_domain} to {sub_domain}.{root_domain}')
_pre_update_hook_modify_spec(instance_id, instance,
lambda i: i.update(domain=f'{sub_domain}.{root_domain}'),
dry_run=dry_run)
# instance is added to router only if this is true, as all routers must use SSL and may use sans SSL too
with_sans_ssl = instance['spec'].get('withSansSSL')
if not with_sans_ssl:
logs.warning(f'forcing with_sans_ssl, even though withSansSSL is disabled')
_pre_update_hook_modify_spec(instance_id, instance,
lambda i: i.update(withSansSSL=True),
dry_run=dry_run)
# subdomain to register on the default root domain
register_subdomain = instance['spec'].get('registerSubdomain')
if register_subdomain != sub_domain:
logs.warning(f'instance register sub domain was changed from {register_subdomain} to {sub_domain}')
_pre_update_hook_modify_spec(instance_id, instance,
lambda i: i.update(registerSubdomain=sub_domain),
dry_run=dry_run)
res.update(**{'root-domain': root_domain, 'sub-domain': sub_domain})
if not instance['spec'].get('forceKeepSiteUrl'):
site_url = instance['spec'].get('siteUrl')
if site_url != f'https://{sub_domain}.{root_domain}':
logs.warning(f'instance siteUrl was changed from {site_url} to https://{sub_domain}.{root_domain}')
_pre_update_hook_modify_spec(instance_id, instance,
lambda i: i.update(siteUrl=f'https://{sub_domain}.{root_domain}'),
dry_run=dry_run)
return sub_domain, root_domain
def _pre_update_hook_override_spec(override_spec, instance):
# applies override spec, but doesn't persist
if override_spec:
for k, v in override_spec.items():
logs.info(f'Applying override spec {k}={v}')
if k != 'values':
instance['spec'][k] = v
else:
instance['spec'].setdefault('values', {}).update(v)
def _pre_update_hook_modify_spec(instance_id, instance, callback, dry_run=False):
# applies changes to both the non-persistent spec and persists the changes on latest instance spec
latest_instance = crds_manager.get(APP_CRD_SINGULAR, name=instance_id, required=True)
callback(instance['spec'])
callback(latest_instance['spec'])
kubectl.apply(latest_instance, dry_run=dry_run)
def _helm_deploy(values, tiller_namespace_name, chart_repo, chart_name, chart_version, release_name, instance_id,
dry_run=False, chart_repo_name=None):
assert chart_repo_name, 'chart-repo-name is required'
helm_driver.init(tiller_namespace_name)
time.sleep(10) # wait for tiller pod to be ready
logs.info(f'Deploying helm chart {chart_repo_name} {chart_repo} {chart_version} {chart_name} to release {release_name} '
f'(instance_id={instance_id})')
with tempfile.NamedTemporaryFile('w') as f:
yaml.dump(values, f, default_flow_style=False)
f.flush()
helm_driver.deploy(tiller_namespace=tiller_namespace_name,
chart_repo=chart_repo,
chart_name=chart_name,
chart_version=chart_version,
release_name=release_name,
values_filename=f.name,
namespace=instance_id,
dry_run=dry_run,
chart_repo_name=chart_repo_name)
def _get_tiller_namespace_name(instance_id, instance):
return instance['spec'].get('tiller-namespace-name', _get_resource_name('tiller'))
def _get_helm_release_name(instance_id, instance):
return instance['spec'].get('chart-release-name', _get_resource_name(instance_id, short=False))
def _get_app_type_manager(app_type):
if app_type == 'provisioning':
from . import type_provisioning as app_type_manager
elif app_type == 'jenkins':
from . import type_jenkins as app_type_manager
elif app_type == 'elk':
from . import type_elk as app_type_manager
else:
raise NotImplementedError(f'Unknown app type: {app_type}')
return app_type_manager
| StarcoderdataPython |
3492918 | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2020 Alibaba Group Holding Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import sys
from urllib.parse import urlparse
import vineyard
from hdfs3 import HDFileSystem
import pyarrow as pa
from vineyard.io.byte import ByteStreamBuilder
def read_hdfs_bytes(vineyard_socket, path, proc_num, proc_index):
client = vineyard.connect(vineyard_socket)
builder = ByteStreamBuilder(client)
host, port = urlparse(path).netloc.split(':')
hdfs = HDFileSystem(host=host, port=int(port), pars={"dfs.client.read.shortcircuit": "false"})
header_row = False
fragments = urlparse(path).fragment.split('&')
path = urlparse(path).path
for frag in fragments:
try:
k, v = frag.split('=')
except:
pass
else:
if k == 'header_row':
header_row = (v.upper() == 'TRUE')
if header_row:
builder[k] = '1'
else:
builder[k] = '0'
elif k == 'delimiter':
builder[k] = bytes(v, "utf-8").decode("unicode_escape")
elif k == 'include_all_columns':
if v.upper() == 'TRUE':
builder[k] = '1'
else:
builder[k] = '0'
else:
builder[k] = v
offset = 0
chunk_size = 1024 * 1024 * 4
header_line = hdfs.read_block(path, 0, 1, b'\n')
builder['header_line'] = header_line.decode('unicode_escape')
if header_row:
offset = len(header_line)
stream = builder.seal(client)
client.persist(stream)
ret = {'type': 'return'}
ret['content'] = repr(stream.id)
print(json.dumps(ret), flush=True)
writer = stream.open_writer(client)
total_size = hdfs.info(path)['size']
begin = (total_size - offset) // proc_num * proc_index + offset
end = (total_size - offset) // proc_num + begin
if proc_index + 1 == proc_num:
end = total_size
if proc_index:
begin = next_delimiter(hdfs, path, begin, end, b'\n')
else:
begin -= int(header_row)
offset = begin
while offset < end:
buf = hdfs.read_block(path, offset, min(chunk_size, end - offset), b'\n')
size = len(buf)
if not size:
break
offset += size - 1
chunk = writer.next(size)
buf_writer = pa.FixedSizeBufferWriter(chunk)
buf_writer.write(buf)
buf_writer.close()
writer.finish()
def next_delimiter(hdfs, path, begin, end, delimiter):
length = 1024
while begin < end:
buf = hdfs.read_block(path, begin, length)
if delimiter not in buf:
begin += length
else:
begin += buf.find(delimiter)
break
return begin
if __name__ == '__main__':
if len(sys.argv) < 5:
print('usage: ./read_hdfs_bytes <ipc_socket> <hdfs path> <proc num> <proc index>')
exit(1)
ipc_socket = sys.argv[1]
hdfs_path = sys.argv[2]
proc_num = int(sys.argv[3])
proc_index = int(sys.argv[4])
read_hdfs_bytes(ipc_socket, hdfs_path, proc_num, proc_index)
| StarcoderdataPython |
6621850 | """
File for running tests programmatically.
"""
import pytest
def main():
"""
Run pytest tests.
"""
errno = pytest.main(['-x', 'hydsensread', '-v', '-rw', '--durations=10',
'--cov=hydsensread'])
if errno != 0:
raise SystemExit(errno)
if __name__ == '__main__':
main()
| StarcoderdataPython |
5156952 | import basilica
import tweepy
from decouple import config
from .models import DB, Tweets, User
TWITTER_USERS = ['elonmusk', 'nasa', 'lockheedmartin', 'bigdata', 'buzzfeed','theeconomist', 'funnyordie']
TWITTER_AUTH = tweepy.OAuthHandler(config('TWITTER_CONSUMER_KEY'),
config('TWITTER_CONSUMER_SECRET'))
TWITTER_AUTH.set_access_token(config('TWITTER_ACCESS_TOKEN'),
config('TWITTER_ACCESS_TOKEN_SECRET'))
TWITTER = tweepy.API(TWITTER_AUTH)
BASILICA = basilica.Connection(config('BASILICA_KEY'))
def add_or_update_user(username):
"""Add or update a user and their Tweets, error if not a Twitter user."""
try:
# Get user info from tweepy API
twitter_user = TWITTER.get_user(username)
# Add db_user to User table (or check if existing)
db_user = (User.query.get(twitter_user.id) or
User(id=twitter_user.id,
username=username,
followers=twitter_user.followers_count))
DB.session.add(db_user)
# Add as many recent non-retweet/reply tweets as possible
# 200 is a Twitter API limit for single request
tweets = twitter_user.timeline(count=200,
exclude_replies=True,
include_rts=False,
tweet_mode='extended',
since_id=db_user.newest_tweet_id)
# Add additional user info to User table in database
if tweets:
db_user.newest_tweet_id = tweets[0].id
# Loop over each tweet
for tweet in tweets:
# Get Basilica embedding for each tweet
embedding = BASILICA.embed_sentence(tweet.full_text, model='twitter')
# Add tweet info to Tweets table in database
db_tweet = Tweets(id=tweet.id,
text=tweet.full_text[:300],
embedding=embedding)
db_user.tweets.append(db_tweet)
DB.session.add(db_tweet)
except Exception as e:
print('Error processing {}: {}'.format(username, e))
raise e
else:
DB.session.commit()
return
def add_default_users(users=TWITTER_USERS):
"""
Add/update a list of users (strings of user names).
May take awhile, so run "offline" (flask shell).
"""
for user in users:
add_or_update_user(user)
def update_all_users():
"""Update all Tweets for all Users in the User table."""
for user in User.query.all():
add_or_update_user(user.name)
| StarcoderdataPython |
5121267 | from io import BytesIO
from itertools import chain, product
from pathlib import Path
import random
from tempfile import TemporaryDirectory
import unittest
import h5py as h5
import numpy as np
from pentinsula import ChunkBuffer
from pentinsula.chunkbuffer import _chunk_slices
try:
from .utils import random_string, capture_variables, random_int_tuple, product_range, repeat
except ImportError:
from utils import random_string, capture_variables, random_int_tuple, product_range, repeat
N_REPEAT_TEST_CASE = 5
class TestChunkBuffer(unittest.TestCase):
@repeat(N_REPEAT_TEST_CASE)
def test_construction(self):
# valid arguments, individual shape, dtype
for dtype, maxshape in product((int, float, np.float32, np.int32, None), (None, (None,))):
filename = random_string(random.randint(1, 10))
dataset_name = random_string(random.randint(1, 10))
shape = random_int_tuple(1, 10, 4)
maxshape = maxshape if maxshape is None else maxshape * len(shape)
buffer = ChunkBuffer(filename, dataset_name,
shape=shape, dtype=dtype,
maxshape=maxshape)
self.assertEqual(buffer.filename, Path(filename))
self.assertEqual(buffer.dataset_name.relative_to("/"), Path(dataset_name))
self.assertEqual(buffer.shape, shape)
self.assertEqual(buffer.data.shape, shape)
self.assertEqual(buffer.dtype, dtype if dtype else np.float64)
self.assertEqual(buffer.data.dtype, dtype if dtype else np.float64)
self.assertEqual(buffer.maxshape, (None,) * len(shape))
# valid arguments, from array
for dtype in (int, float, np.float32, np.int32, None):
shape = random_int_tuple(1, 10, 4)
array = np.random.uniform(-10, 10, shape).astype(dtype)
buffer = ChunkBuffer(random_string(random.randint(1, 10)), random_string(random.randint(1, 10)),
data=array)
self.assertEqual(buffer.shape, shape)
self.assertEqual(buffer.dtype, dtype if dtype else np.float64)
np.testing.assert_allclose(array, buffer.data)
# valid arguments, from array with reshaping
in_shape = (10, 4)
for target_shape in ((20, 2), (40,), (5, 8)):
array = np.random.uniform(-10, 10, in_shape)
buffer = ChunkBuffer(random_string(random.randint(1, 10)), random_string(random.randint(1, 10)),
data=array, shape=target_shape)
self.assertEqual(buffer.shape, target_shape)
# invalid reshaping
array = np.random.uniform(-10, 10, (4, 10))
with self.assertRaises(ValueError):
ChunkBuffer("test.h5", "test", data=array, shape=(3,))
# invalid maxshape
with self.assertRaises(ValueError):
ChunkBuffer("test.h5", "test", shape=(1, 2), maxshape=(1,))
with self.assertRaises(ValueError):
ChunkBuffer("test.h5", "test", shape=(1, 2), maxshape=(1, 2, 3))
@repeat(N_REPEAT_TEST_CASE)
def test_load(self):
for ndim in range(1, 4):
chunk_shape = random_int_tuple(1, 10, ndim)
nchunks = random_int_tuple(1, 4, ndim)
total_shape = tuple(n * c for n, c in zip(chunk_shape, nchunks))
array = np.random.uniform(-10, 10, total_shape)
stream = BytesIO()
with h5.File(stream, "w") as h5f:
h5f.create_dataset("data", data=array, chunks=chunk_shape)
# valid, load all chunks, positive indices
for chunk_index in product_range(nchunks):
buffer = ChunkBuffer.load(stream, "data", chunk_index)
np.testing.assert_allclose(buffer.data, array[_chunk_slices(chunk_index, chunk_shape)],
err_msg=capture_variables(ndim=ndim,
chunk_shape=chunk_shape,
nchunks=nchunks,
chunk_index=chunk_index))
# negative index
neg_index = (-1,) * ndim
pos_index = tuple(n - 1 for n in nchunks)
buffer = ChunkBuffer.load(stream, "data", neg_index)
np.testing.assert_allclose(buffer.data, array[_chunk_slices(pos_index, chunk_shape)],
err_msg=capture_variables(ndim=ndim,
chunk_shape=chunk_shape,
nchunks=nchunks,
chunk_index=neg_index))
# invalid, load non-existent chunk
# outside of maxshape, discoverable through maxshape
with self.assertRaises(IndexError):
ChunkBuffer.load(stream, "data", nchunks)
# outside of maxshape, not discoverable through maxshape
with self.assertRaises(IndexError):
ChunkBuffer.load(stream, "data", (nchunks[0] + 1,) + nchunks[1:])
# within maxshape but not stored
with h5.File(stream, "w") as h5f:
h5f.create_dataset("partially_filled", shape=total_shape, chunks=chunk_shape,
maxshape=tuple(n * 2 for n in total_shape))
with self.assertRaises(IndexError):
ChunkBuffer.load(stream, "partially_filled", (nchunks[0] + 1,) + nchunks[1:])
# invalid, contiguous dataset
stream = BytesIO()
with h5.File(stream, "w") as h5f:
h5f.create_dataset("data", data=np.random.uniform(-10, 10, (5, 3)))
with self.assertRaises(RuntimeError):
ChunkBuffer.load(stream, "data", (0, 0))
@repeat(N_REPEAT_TEST_CASE)
def test_dataset_creation(self):
for ndim in range(1, 4):
max_nchunks = random_int_tuple(1, 4, ndim)
for chunk_index in product_range(max_nchunks):
chunk_shape = random_int_tuple(1, 10, ndim)
for fill_level in chain((None,), product_range((1,) * ndim, chunk_shape)):
if fill_level is None:
total_shape = tuple(n * (i + 1)
for n, i in zip(chunk_shape, chunk_index))
else:
total_shape = tuple(n * i + fl
for n, i, fl in zip(chunk_shape, chunk_index, fill_level))
chunk_data = np.random.uniform(-10, 10, chunk_shape).astype(random.choice((float, int)))
stream = BytesIO()
buffer = ChunkBuffer(stream, "data", data=chunk_data, maxshape=(None,) * ndim)
buffer.select(chunk_index)
buffer.create_dataset(stream if random.random() < 0.5 else None, filemode="w",
write=True, fill_level=fill_level)
with h5.File(stream, "r") as h5f:
dataset = h5f["data"]
self.assertEqual(dataset.shape, total_shape)
self.assertEqual(dataset.chunks, chunk_shape)
self.assertEqual(dataset.dtype, chunk_data.dtype)
self.assertEqual(dataset.maxshape, buffer.maxshape)
fill_slices = tuple(map(slice, fill_level)) if fill_level is not None else ...
np.testing.assert_allclose(ChunkBuffer.load(h5f, "data", chunk_index).data[fill_slices],
chunk_data[fill_slices])
@repeat(N_REPEAT_TEST_CASE)
def test_select(self):
for ndim in range(1, 5):
chunk_shape = random_int_tuple(1, 10, ndim)
nchunks = random_int_tuple(1, 4, ndim)
maxshape = tuple(f * n if random.random() < 0.25 else None
for f, n in zip(nchunks, chunk_shape))
buffer = ChunkBuffer("file", "data", shape=chunk_shape, maxshape=maxshape)
# valid calls
for chunk_index in product_range(nchunks):
buffer.select(chunk_index)
self.assertEqual(buffer.chunk_index, chunk_index)
def random_chunk_index():
return tuple(map(lambda n: random.randint(0, n - 1), nchunks))
# invalid number of dimensions
too_many_dims = random_chunk_index() + (0,)
with self.assertRaises(IndexError):
buffer.select(too_many_dims)
too_few_dims = random_chunk_index()[:-1]
with self.assertRaises(IndexError):
buffer.select(too_few_dims)
# index out of bounds
for dim in range(ndim):
chunk_index = random_chunk_index()
negative = chunk_index[:dim] + (random.randint(-10, -1),) + chunk_index[dim + 1:]
with self.assertRaises(IndexError):
buffer.select(negative)
if maxshape[dim] is not None:
too_large = chunk_index[:dim] + (nchunks[dim] + random.randint(1, 10),) + chunk_index[dim + 1:]
with self.assertRaises(IndexError):
buffer.select(too_large)
@repeat(N_REPEAT_TEST_CASE)
def test_read(self):
for ndim in range(1, 4):
chunk_shape = random_int_tuple(1, 10, ndim)
nchunks = random_int_tuple(1, 4, ndim)
for fill_level in chain((None,), product_range((1,) * ndim, chunk_shape)):
if fill_level is None:
total_shape = tuple(n * c for n, c in zip(chunk_shape, nchunks))
else:
total_shape = tuple(n * (c - 1) + fl
for n, c, fl in zip(chunk_shape, nchunks, fill_level))
array = np.random.uniform(-10, 10, total_shape).astype(random.choice((int, float)))
stream = BytesIO()
with h5.File(stream, "w") as h5f:
h5f.create_dataset("data", data=array, chunks=chunk_shape, maxshape=(None,) * ndim)
def validate_fill_level(chunk_index, actual_fill_level):
target_fill_level = chunk_shape if fill_level is None else fill_level
for idx, n, length, actual, target in zip(chunk_index, nchunks, chunk_shape,
actual_fill_level, target_fill_level):
if idx == n - 1:
self.assertEqual(actual, target)
else:
self.assertEqual(actual, length)
# valid
buffer = ChunkBuffer(stream, "data", shape=chunk_shape, dtype=array.dtype)
for chunk_index in product_range(nchunks):
# separate select / read
buffer.select(chunk_index)
read_fill_level = buffer.read()
validate_fill_level(chunk_index, read_fill_level)
fill_slices = tuple(map(slice, fill_level)) if fill_level is not None else ...
np.testing.assert_allclose(buffer.data[fill_slices],
array[_chunk_slices(chunk_index, chunk_shape)][fill_slices])
# read with index arg
buffer.data[...] = np.random.uniform(-20, 20, chunk_shape).astype(buffer.dtype)
read_fill_level = buffer.read(chunk_index)
validate_fill_level(chunk_index, read_fill_level)
np.testing.assert_allclose(buffer.data[fill_slices],
array[_chunk_slices(chunk_index, chunk_shape)][fill_slices])
# index out of bounds
with self.assertRaises(IndexError):
buffer.read(nchunks)
# dataset does not exist
buffer = ChunkBuffer(stream, "wrong_name", shape=chunk_shape, dtype=array.dtype)
with self.assertRaises(KeyError):
buffer.read()
# invalid chunk shape
buffer = ChunkBuffer(stream, "data", shape=tuple(random.randint(1, 10) + n for n in chunk_shape))
with self.assertRaises(RuntimeError):
buffer.read()
# invalid datatype
buffer = ChunkBuffer(stream, "data", shape=chunk_shape, dtype=np.float32)
with self.assertRaises(RuntimeError):
buffer.read()
# invalid maxshape
buffer = ChunkBuffer(stream, "data", shape=chunk_shape, dtype=array.dtype, maxshape=chunk_shape)
with self.assertRaises(RuntimeError):
buffer.read()
@repeat(N_REPEAT_TEST_CASE)
def test_write_overwrite(self):
for ndim in range(1, 4):
chunk_shape = random_int_tuple(1, 10, ndim)
nchunks = random_int_tuple(1, 4, ndim)
total_shape = tuple(n * c for n, c in zip(chunk_shape, nchunks))
stream = BytesIO()
chunk = np.random.uniform(-10, 10, chunk_shape).astype(random.choice((int, float)))
file_content = np.random.uniform(-10, 10, total_shape).astype(chunk.dtype)
with h5.File(stream, "w") as h5f:
h5f.create_dataset("data", data=file_content, chunks=chunk_shape, maxshape=(None,) * ndim)
buffer = ChunkBuffer(stream, "data", data=chunk)
# valid indices
for chunk_index in product_range(nchunks):
with h5.File(stream, "a") as h5f:
h5f["data"][...] = file_content
buffer.select(chunk_index)
buffer.write(must_exist=True)
desired_file_content = file_content.copy()
desired_file_content[_chunk_slices(chunk_index, chunk_shape)] = chunk
with h5.File(stream, "r") as h5f:
np.testing.assert_allclose(h5f["data"][()], desired_file_content)
# index out of bounds
for dim in range(ndim):
chunk_index = tuple(map(lambda n: random.randint(0, n - 1), nchunks))
chunk_index = chunk_index[:dim] + (nchunks[dim] + random.randint(1, 10),) + chunk_index[dim + 1:]
buffer.select(chunk_index)
with self.assertRaises(RuntimeError):
buffer.write(must_exist=True)
@repeat(N_REPEAT_TEST_CASE)
def test_write_extend(self):
for ndim in range(1, 4):
chunk_shape = random_int_tuple(1, 10, ndim)
nchunks = random_int_tuple(1, 5, ndim)
chunks = []
stream = BytesIO()
with h5.File(stream, "w") as h5f:
h5f.create_dataset("data", shape=chunk_shape, dtype=float,
chunks=chunk_shape, maxshape=(None,) * ndim)
buffer = ChunkBuffer(stream, "data", shape=chunk_shape, dtype=float)
for chunk_index in product_range(nchunks):
chunks.append((_chunk_slices(chunk_index, chunk_shape), np.random.uniform(-10, 10, chunk_shape)))
buffer.select(chunk_index)
buffer.data[...] = chunks[-1][1]
buffer.write(must_exist=False)
with h5.File(stream, "r") as f:
dataset = f["data"]
for chunk_slice, expected in chunks:
np.testing.assert_allclose(dataset[chunk_slice], expected)
def test_real_files(self):
with TemporaryDirectory() as tempdir:
filename = Path(tempdir) / "test_file.h5"
chunk_shape = (1, 2, 3)
array = np.random.uniform(-10, 10, chunk_shape)
buffer = ChunkBuffer(filename, "data", data=array)
buffer.create_dataset(filemode="w")
self.assertTrue(filename.exists())
with h5.File(filename, "r") as h5f:
np.testing.assert_allclose(h5f["data"][()], array)
# extend dataset with stored filename
array = np.random.uniform(-10, 10, chunk_shape)
buffer.select((1, 0, 0))
buffer.data[...] = array
buffer.write(must_exist=False)
with h5.File(filename, "r") as h5f:
np.testing.assert_allclose(h5f["data"][1:, :, :], array)
# extend dataset with passed in filename
array = np.random.uniform(-10, 10, chunk_shape)
buffer.select((1, 1, 0))
buffer.data[...] = array
buffer.write(must_exist=False, file=filename)
with h5.File(filename, "r") as h5f:
np.testing.assert_allclose(h5f["data"][1:, 2:, :], array)
# extend dataset with passed in dataset
array = np.random.uniform(-10, 10, chunk_shape)
buffer.select((1, 0, 1))
buffer.data[...] = array
with h5.File(filename, "r+") as h5f:
dataset = h5f["data"]
buffer.write(must_exist=False, dataset=dataset)
np.testing.assert_allclose(dataset[1:, :2, 3:], array)
# wrong filename
with self.assertRaises(ValueError):
buffer.write(must_exist=False, file="wrong_file.h5")
# wrong dataset
with h5.File(filename, "a") as h5f:
wrong_dataset = h5f.create_dataset("wrong_data", (1,))
with self.assertRaises(ValueError):
buffer.write(must_exist=False, dataset=wrong_dataset)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
6563564 | <filename>src/beer/admin.py<gh_stars>0
from django.contrib import admin
from beer import models
admin.site.register(models.Beer) | StarcoderdataPython |
8026803 | from .fks_partition import FKSPartition
| StarcoderdataPython |
4932434 | import torch
__all__ = ['MeterShapeNet']
default_shape_name_to_part_classes = {
'Bag': [0],
'Box': [1],
'Cylinder': [2],
'RobotFrame': [3],
}
class MeterShapeNet:
def __init__(self, num_classes=4, num_shapes=4, shape_name_to_part_classes=None):
super().__init__()
self.num_classes = num_classes
self.num_shapes = num_shapes
self.shape_name_to_part_classes = default_shape_name_to_part_classes if shape_name_to_part_classes is None \
else shape_name_to_part_classes
part_class_to_shape_part_classes = []
for shape_name, shape_part_classes in self.shape_name_to_part_classes.items():
start_class, end_class = shape_part_classes[0], shape_part_classes[-1] + 1
for _ in range(start_class, end_class):
part_class_to_shape_part_classes.append((start_class, end_class))
self.part_class_to_shape_part_classes = part_class_to_shape_part_classes
self.reset()
def reset(self):
self.iou_sum = 0
self.shape_count = 0
def update(self, outputs: torch.Tensor, targets: torch.Tensor):
# outputs: B x num_classes x num_points, targets: B x num_points
for b in range(outputs.size(0)):
start_class, end_class = self.part_class_to_shape_part_classes[targets[b, 0].item()]
prediction = torch.argmax(outputs[b, start_class:end_class, :], dim=0) + start_class
target = targets[b, :]
iou = 0.0
for i in range(start_class, end_class):
itarget = (target == i)
iprediction = (prediction == i)
union = torch.sum(itarget | iprediction).item()
intersection = torch.sum(itarget & iprediction).item()
if union == 0:
iou += 1.0
else:
iou += intersection / union
iou /= (end_class - start_class)
self.iou_sum += iou
self.shape_count += 1
def compute(self):
return self.iou_sum / self.shape_count
| StarcoderdataPython |
8181451 | <gh_stars>0
from stockroom import StockRoom
from stockroom import make_torch_dataset
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.utils.data import DataLoader
import matplotlib.pyplot as plt
import numpy as np
from tqdm import tqdm
def imshow(img):
plt.imshow(np.transpose(img, (1, 2, 0)))
plt.show()
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(3, 6, 5)
self.pool = nn.MaxPool2d(2, 2)
self.conv2 = nn.Conv2d(6, 16, 5)
self.fc1 = nn.Linear(16 * 5 * 5, 120)
self.fc2 = nn.Linear(120, 84)
self.fc3 = nn.Linear(84, 10)
def forward(self, x):
x = self.pool(F.relu(self.conv1(x)))
x = self.pool(F.relu(self.conv2(x)))
x = x.view(-1, 16 * 5 * 5)
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = self.fc3(x)
return x
stock = StockRoom()
imgcol = stock.data['cifar10-train-image']
lblcol = stock.data['cifar10-train-label']
# imshow(imgcol[11])
lr = 0.001
momentum = 0.9
check_every = 500
net = Net()
dset = make_torch_dataset([imgcol, lblcol])
dloader = DataLoader(dset, batch_size=64)
criterion = nn.CrossEntropyLoss()
optimizer = optim.SGD(net.parameters(), lr=lr, momentum=momentum)
for epoch in range(2):
running_loss = 0.0
current_loss = 99999
best_loss = 99999
p = tqdm(dloader)
p.set_description('[epcoh: %d, iteration: %d] loss: %5d' %(epoch + 1, 1, current_loss))
for i, data in enumerate(p):
inputs, labels = data
optimizer.zero_grad()
outputs = net(inputs)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
running_loss += loss.item()
if i % check_every == check_every - 1:
current_loss = running_loss / check_every
running_loss = 0.0
p.set_description('[epcoh: %d, iteration: %d] loss: %.6f' %(epoch + 1, i + 1, current_loss))
if current_loss < best_loss:
with stock.enable_write():
stock.experiment['lr'] = lr
stock.experiment['momentum'] = momentum
stock.model['cifarmodel'] = net.state_dict()
best_loss = current_loss
print(stock.model.keys())
print('Finished Training')
| StarcoderdataPython |
4881848 | # import json
#
# from django.http import HttpResponse
# from django.utils.safestring import mark_safe
# from django.views.decorators.csrf import csrf_exempt
# from markdown import markdown
#
# from libs.util.auth import login_auth_view
#
#
# @csrf_exempt
# @login_auth_view
# def change_to_markdown(request):
# """
# 博客文本 to markdown
# :param request:
# :return:
# """
# content = request.POST.get('content', '')
# content = mark_safe(markdown(content))
# return HttpResponse(json.dumps({'content': content}), content_type="application/json")
| StarcoderdataPython |
4842378 | import pytest
from towel import *
class TestUpdate:
def test_deletes(self, base, cursor, fish_fixtures):
Fish, fishes = fish_fixtures
fish = fishes[0]
fish.objects().remove()
assert not fish.as_namedtuple() in Fish.objects().get_all()
def test_removes_multiple_without_filter(self, base, cursor, fish_fixtures):
Fish, fix = fish_fixtures
Fish.objects().remove()
cursor.execute("select * from fish")
assert len(cursor.fetchall()) == 0
def test_updates_on_multiple_with_filter(self, base, cursor, fish_fixtures):
Fish, fix = fish_fixtures
Fish.objects().filter("id", "<", 10).remove()
assert all(x.id > 10 for x in Fish.objects().get_all())
| StarcoderdataPython |
4995510 | from kirby_transform.schema.input.schema import CommonInput, NestedInputData
from kirby_transform.schema.validator import (FieldValidator, UnixEpoch,
ValidationError)
| StarcoderdataPython |
1978372 | <gh_stars>0
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from google.appengine.ext import ndb
from config.template_middleware import TemplateResponse
from article.article_model import Article
from gaecookie.decorator import no_csrf
from gaepermission.decorator import permissions, login_not_required
from permission_app.model import ADMIN
from routes.articles import edit
from routes.articles.new import salvar
from tekton.gae.middleware.redirect import RedirectResponse
from tekton.router import to_path
__author__ = 'marcos'
# @permissions(ADMIN)
@no_csrf
@login_not_required
def index():
query = Article.query_ordenada_por_nome()
edit_path_base = to_path(edit)
deletar_path_base = to_path(deletar)
articles = query.fetch()
for a in articles:
key = a.key
key_id = key.id()
a.edit_path = to_path(edit_path_base, key_id)
a.deletar_path = to_path(deletar_path_base, key_id)
ctx = {'salvar_path': to_path(salvar),
'articles': articles}
return TemplateResponse(ctx, 'articles/article_home.html')
@login_not_required
def deletar(article_id):
key = ndb.Key(Article, int(article_id))
key.delete()
return RedirectResponse(index)
| StarcoderdataPython |
301699 | import os
#=========== SIMULATION DETAILS ========
projectname = "project"
basename = "experimentname"
seed = -1
N_measurements = 1
measurements = range(N_measurements)
params1 = range(3)
params2 = range(3)
params3 = range(3)
params4 = range(3)
params5 = range(3)
params6 = range(3)
external_parameters = [
( 'p1', params1[:2] ),
( 'p2', params2 ),
( None , measurements ),
]
internal_parameters = [
('p3', params3[:1]),
('p3', params4[:]),
]
standard_parameters = [
( 'p5', params5[1] ),
( 'p6', params6[2] ),
]
only_save_times = False
#============== QUEUE ==================
queue = "SGE"
memory = "1G"
priority = 0
#============ CLUSTER SETTINGS ============
username = "user"
server = "localhost"
useratserver = username + u'@' + server
shell = "/bin/bash"
pythonpath = "python"
name = basename + "_NMEAS_" + str(N_measurements) + "_ONLYSAVETIME_" + str(only_save_times)
serverpath = "/home/"+username +"/"+ projectname + "/" + name
resultpath = serverpath + "/results"
#============== LOCAL SETTINGS ============
localpath = os.path.join(os.getcwd(),"results_"+name)
n_local_cpus = 1
#========================
git_repos = [
( "/path/to/repo", pythonpath + " setup.py install --user" )
]
| StarcoderdataPython |
4973827 | # coding=utf-8
# Copyright 2018-2020 EVA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyspark.sql import SparkSession
from pyspark.conf import SparkConf
from src.configuration.configuration_manager import ConfigurationManager
from src.utils.logging_manager import LoggingManager
class Session(object):
"""
Wrapper around Spark Session
"""
_instance = None
_session = None
def __new__(cls):
if cls._instance is None:
cls._instance = super(Session, cls).__new__(cls)
return cls._instance
def __init__(self):
self._config = ConfigurationManager()
name = self._config.get_value('core', 'application')
self.init_spark_session(name)
def init_spark_session(self, application_name, spark_master=None):
"""Setup a spark session.
:param spark_master: A master parameter used by spark session builder.
Use default value (None) to use system
environment configured spark cluster.
Use 'local[*]' to run on a local box.
:return: spark_session: A spark session
"""
eva_spark_conf = SparkConf()
pyspark_config = self._config.get_value('pyspark', 'property')
for key, value in pyspark_config.items():
eva_spark_conf.set(key, value)
session_builder = SparkSession \
.builder \
.appName(application_name) \
.config(conf=eva_spark_conf)
if spark_master:
session_builder.master(spark_master)
# Gets an existing SparkSession or,
# if there is no existing one, creates a new one based
# on the options set in this builder.
self._session = session_builder.getOrCreate()
# Configure logging
log4j_level = LoggingManager().getLog4JLevel()
spark_context = self._session.sparkContext
spark_context.setLogLevel(log4j_level)
def get_session(self):
return self._session
def get_context(self):
return self._session.sparkContext
def stop(self):
self._session.stop()
def __del__(self):
self._session.stop()
| StarcoderdataPython |
1815943 | # Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
class OrderedSet(collections.Set):
"""
a set that maintains the order of a list and allows for a simple comparison of two lists.
"""
def __init__(self, iterable=()):
self.d = collections.OrderedDict.fromkeys(iterable)
def __len__(self):
return len(self.d)
def __contains__(self, element):
return element in self.d
def __iter__(self):
return iter(self.d)
def compare_columns(left_col_list, right_col_list):
"""
compares the left and right column schema to see if their ordering and column names match.
:param left_col_list:
:param right_col_list:
:return:
"""
return OrderedSet(left_col_list) == OrderedSet(right_col_list)
def get_table_ref(client, table_id_str):
"""
Creates a TableReference.
:param client: BigQuery Client
:param table_id_str:
:return: google.cloud.bigquery.table.TableReference
"""
if table_id_str:
dataset_name = table_id_str.split('.')[0]
table_name = table_id_str.split('.')[1]
return client.dataset(dataset_name).table(table_name)
raise ValueError('Table name not found')
def get_console_link_for_table_ref(table_ref):
"""
Returns the string URL for a given TableReference. The URL navigates to
the BigQuery table in the GCP console.
:param table_ref: google.cloud.bigquery.table.TableReference
:return: string Link to BigQuery Table in GCP Console
"""
return (
'https://console.cloud.google.com/bigquery?'
'project={}'
'&p={}'
'&t={}'
'&d=validation&orgonly=true'
'&supportedpurview=organizationId&page=table'
).format(table_ref.project, table_ref.project, table_ref.table_id)
def get_console_link_for_query_job(query_job):
return (
'https://console.cloud.google.com/bigquery?'
'project={}'
'&j=bq:{}:{}'
'&page=queryresults&orgonly=true&supportedpurview=organizationId'
).format(query_job.project, query_job.location, query_job.job_id)
def get_full_columns_list(client, columns_list, primary_keys, l_table_name, r_table_name):
"""
This method will first retrieve the source table columns to preserve the same column order in this method's output
:param client: BigQuery client
:param columns_list: list of columns to exclude
:param primary_keys: list of primary keys
:param l_table_name: left table name
:param r_table_name: right table name
:return:
"""
#
l_table = client.get_table(get_table_ref(client, l_table_name))
r_table = client.get_table(get_table_ref(client, r_table_name))
l_compress = ["{0}".format(schema.name) for schema in l_table.schema]
r_compress = ["{0}".format(schema.name) for schema in r_table.schema]
l_columns = list(OrderedSet(l_compress) - columns_list)
r_columns = list(OrderedSet(r_compress) - columns_list)
if l_columns == r_columns:
config_column_list = set(primary_keys).difference(set(columns_list))
source_ordered_column_list = []
for source_table_column in l_table.schema:
if source_table_column.name in config_column_list:
source_ordered_column_list.append(source_table_column.name)
return source_ordered_column_list
else:
print('Table Schemas for table `{0}` and `{1}` are not equal!'.format(l_table_name, r_table_name))
return None
| StarcoderdataPython |
307632 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/6/8 上午10:53
# @Author : Ethan
# @Site :
# @File : my_crnn.py
# @Software: PyCharm
import torch
import torch.nn.functional as F
import torch.nn as nn
class Vgg_16(torch.nn.Module):
def __init__(self):
super(Vgg_16, self).__init__()
self.convolution1 = torch.nn.Conv2d(1, 64, 3, padding=1)
self.pooling1 = torch.nn.MaxPool2d(2, stride=2)
self.convolution2 = torch.nn.Conv2d(64, 128, 3, padding=1)
self.pooling2 = torch.nn.MaxPool2d(2, stride=2)
self.convolution3 = torch.nn.Conv2d(128, 256, 3, padding=1)
self.convolution4 = torch.nn.Conv2d(256, 256, 3, padding=1)
self.pooling3 = torch.nn.MaxPool2d((1, 2), stride=(2, 1)) # notice stride of the non-square pooling
self.convolution5 = torch.nn.Conv2d(256, 512, 3, padding=1)
self.BatchNorm1 = torch.nn.BatchNorm2d(512)
self.convolution6 = torch.nn.Conv2d(512, 512, 3, padding=1)
self.BatchNorm2 = torch.nn.BatchNorm2d(512)
self.pooling4 = torch.nn.MaxPool2d((1, 2), stride=(2, 1))
self.convolution7 = torch.nn.Conv2d(512, 512, 2)
for p in self.parameters():
p.requires_grad=False
def forward(self, x):
x = F.relu(self.convolution1(x), inplace=True)
x = self.pooling1(x)
x = F.relu(self.convolution2(x), inplace=True)
x = self.pooling2(x)
x = F.relu(self.convolution3(x), inplace=True)
x = F.relu(self.convolution4(x), inplace=True)
x = self.pooling3(x)
x = self.convolution5(x)
x = F.relu(self.BatchNorm1(x), inplace=True)
x = self.convolution6(x)
x = F.relu(self.BatchNorm2(x), inplace=True)
x = self.pooling4(x)
x = F.relu(self.convolution7(x), inplace=True)
return x # b*512x1x22
class RNN(torch.nn.Module):
def __init__(self, class_num, hidden_unit):
super(RNN, self).__init__()
self.Bidirectional_LSTM1 = torch.nn.LSTM(512, hidden_unit, bidirectional=True)
self.embedding1 = torch.nn.Linear(hidden_unit * 2, 512)
self.Bidirectional_LSTM2 = torch.nn.LSTM(512, hidden_unit, bidirectional=True)
#固定参数
self.embedding_finetune = torch.nn.Linear(hidden_unit * 2, class_num)
def forward(self, x):
x = self.Bidirectional_LSTM1(x) # LSTM output: output, (h_n, c_n)
T, b, h = x[0].size() # x[0]: (seq_len, batch, num_directions * hidden_size)
x = self.embedding1(x[0].view(T * b, h)) # pytorch view() reshape as [T * b, nOut]
x = x.view(T, b, -1) # [16, b, 512]
x = self.Bidirectional_LSTM2(x)
T, b, h = x[0].size()
x = self.embedding_finetune(x[0].view(T * b, h))
x = x.view(T, b, -1)
return x # [22,b,class_num]
# output: [s,b,class_num]
class CRNN(torch.nn.Module):
def __init__(self, class_num, hidden_unit=256):
super(CRNN, self).__init__()
self.cnn = torch.nn.Sequential()
self.cnn.add_module('vgg_16', Vgg_16())
self.rnn = torch.nn.Sequential()
self.rnn.add_module('rnn', RNN(class_num, hidden_unit))
def forward(self, x):
x = self.cnn(x)
b, c, h, w = x.size()
#print(x.size()) #: b,c,h,w,(64, 512, 1, 22)
assert h == 1 # "the height of conv must be 1"
x = x.squeeze(2) # remove h dimension, b *512 * width
x = x.permute(2, 0, 1) # [w, b, c] = [seq_len, batch, input_size]
# x = x.transpose(0, 2)
# x = x.transpose(1, 2)
x = self.rnn(x)
# print(x.size()) # (22, 64, 6736)
return x
| StarcoderdataPython |
289936 | <filename>releasescrawler/spiders/releases_games.py<gh_stars>0
# -*- coding: utf-8 -*-
import hashlib
import logging
import re
from datetime import datetime
from scrapy.linkextractors import LinkExtractor
from scrapy.selector import Selector
from scrapy.spiders import CrawlSpider, Rule
from releasescrawler.items import ReleasescrawlerItem
class ReleasesGamesException(Exception):
pass
class ReleasesGamesSpider(CrawlSpider):
name = 'releases_games'
allowed_domains = ['releases.com']
restricted_xpaths = [
'//*[contains(@class,\'calendar-item-title subpage-trigg\')]'
]
months_ref = {
'january': 1,
'february': 2,
'march': 3,
'april': 4,
'may': 5,
'june': 6,
'july': 7,
'august': 8,
'september': 9,
'october': 10,
'november': 11,
'december': 12,
}
quarter_ref = {
'q1': 1,
'q2': 2,
'q3': 3,
'q4': 4,
}
quarter_months_ref = {
1: [1, 2, 3],
2: [4, 5, 6],
3: [7, 8, 9],
4: [10, 11, 12],
}
rules = (
Rule(
LinkExtractor(
allow=(),
restrict_xpaths=restricted_xpaths,
),
callback='parse_links',
follow=False,
),
)
def __init__(self, *a, **kw):
self.log = logging.getLogger(self.name)
self.start_urls = self.get_start_urls()
super(ReleasesGamesSpider, self).__init__(*a, **kw)
def get_start_urls(self):
start_urls = []
current_year = datetime.now().year
current_month = datetime.now().month
for x in range(0, (12 - current_month) + 1):
start_urls.append(
'https://www.releases.com/l/Games/{year}/{month}/'.format(
year=current_year,
month=int(current_month + x)
)
)
self.log.info('start_urls:\n%s', '\n'.join(start_urls))
return start_urls
def get_xpathstring(self, content, xpath_str, str_sep='\x20'):
return str_sep.join(content.xpath(xpath_str).extract()).strip()
def get_countryfromflag(self, content_path):
regex = r'.*\-([a-zA-Z]+)\..*'
found = re.findall(regex, content_path)
return ''.join(found)
@staticmethod
def get_quarter_month(month):
if month in [1, 2, 3]:
return 1
if month in [4, 5, 6]:
return 2
if month in [7, 8, 9]:
return 3
if month in [10, 11, 12]:
return 4
def get_date_object(self, date_string):
datestring_regex = r'(Q[0-9])\x20+([0-9]{4})|'\
r'([A-Za-z]+)\x20+([0-9]{1,2})\,\x20+([0-9]{4})|'\
r'([A-Za-z]+)\x20+([0-9]{4})|([0-9]{4})'
founds = re.findall(datestring_regex, date_string)
date_obj = {}
for item in founds:
quarter = self.quarter_ref.get(item[0].lower())
month_str = item[2] if item[2] != '' else item[5]
month = self.months_ref.get(month_str.lower())
year = item[1] if item[1] != '' else item[4]\
if item[4] != '' else item[6] if item[6] != '' else item[7]\
if item[7] != '' else None
day = item[3] if item[3] != '' else None
if year:
date_obj.update({'year': int(year)})
if day:
date_obj.update({'day': int(day)})
if month:
date_obj.update({'months': [month]})
if not month and year:
date_obj.update(
{'months': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]})
if quarter:
date_obj.update(
{'quarter': quarter, 'months': self.quarter_months_ref.get(quarter)})
if not quarter and month:
date_obj.update({'quarter': self.get_quarter_month(month)})
return date_obj
def parse_links(self, response):
try:
name = self.get_xpathstring(
response, '//*[contains(@itemprop, \'name\')]/text()'
)
description = self.get_xpathstring(
response, '//*[contains(@itemprop, \'description\')]/text()'
)
tags = response.xpath(
'//*[contains(@class, \'p-details-tags\')]/li/a/text()'
).extract()
trackings = response.xpath(
'//*[contains(@class, \'rl-row rl-tracking\')]'
).extract()
for track in trackings:
item = ReleasescrawlerItem()
track_element = Selector(text=track)
release_flag = self.get_xpathstring(
track_element,
'//*[contains(@class, \'date-region-flag\')]'
)
item['name'] = name
item['description'] = description
item['tags'] = tags
item['release_version'] = self.get_xpathstring(
track_element,
'//*[contains(@class, \'rl-row rl-tracking\')]/@data-version-id'
)
item['release_country'] = self.get_countryfromflag(
release_flag
)
item['release_platform'] = self.get_xpathstring(
track_element,
'//*[contains(@class, \'version\')]/text()'
)
release_date_string = self.get_xpathstring(
track_element,
'//*[contains(@class, \'date-details\')]/span[contains(@class, \'date\')]/text()'
)
item['release_date_string'] = release_date_string
item['release_date'] = self.get_date_object(
release_date_string)
item['release_status'] = self.get_xpathstring(
track_element,
'//*[contains(@class, \'date-details\')]/span[contains(@class, \'status\')]/text()'
)
rid = '{name}:{platform}:{country}'.format(
name=name,
platform=item['release_platform'],
country=item['release_country']
)
item['_id'] = hashlib.sha256(str.encode(rid)).hexdigest()
yield item
except ReleasesGamesException as ex:
self.log.exception('%s', ex)
| StarcoderdataPython |
9735462 | import sys, os, hashlib
import numpy as np
if sys.version_info > (3,):
xrange = range
class Frame(np.ndarray):
pass
class SlicedView(object):
def __init__(self, parent, indexes, properties=()):
self.parent = parent
self.range = xrange(*indexes.indices(len(parent)))
self.properties = properties
def __getitem__(self, item):
return self.parent[self.range[item]]
def __len__(self):
return len(self.range)
def __getattr__(self, item):
if item in self.properties:
return self.properties[item](self.range)
return getattr(self.parent, item)
def __getstate__(self):
state = dict(self.__dict__)
del state['properties'] # FIXME
return state
def __setstate__(self, state):
self.__dict__.update(state)
self.properties = ()
cache_dir = os.path.join(os.path.expanduser('~'), ".cache", "vi3o")
def index_file(fn, extradata=None):
stats = os.stat(fn)
key = str((os.path.abspath(fn), stats.st_size, stats.st_mtime, extradata))
key = hashlib.md5(key.encode()).hexdigest()
path = os.path.join(cache_dir, key + '.idx')
d = os.path.dirname(path)
if not os.path.exists(d):
os.makedirs(d)
return path
| StarcoderdataPython |
353134 | import pygame
import os
import random
import sys
import math
from pygame.locals import *
from gamestate import *
from battle import battle
from repair import repair
from events import events
from shop import shop
from gameover import game_over, game_win
from escape import Escape
LAST = -1
FIRST = 1
def get_rand():
# Gets a random numbers from an uneven distrubution
nums = [2, 2, 2, 3, 3, 3, 4, 4, 5, 6]
return random.choice(nums)
class Icon(pygame.sprite.Sprite):
def __init__(self, image, shadow_image, type=None, x=0, y=0):
super().__init__()
self.x = x
self.y = y
self.type = type
self.image = image
self.shadow_image = shadow_image
self.rect = self.image.get_rect(topleft=(self.x, self.y))
self.bounding_rect = self.image.get_bounding_rect()
self.bounding_rect.center = self.rect.center
self.children = []
def connect(self, screen):
# draw connections between nodes
[pygame.draw.line(screen, ORCHIRD, self.rect.midtop, child.rect.midbottom) for child in self.children]
def draw(self, screen, position, access=False):
if self.y < -50 or self.y > SCREEN_HEIGHT + 50:
return
if self == position:
screen.blit(self.shadow_image, self.rect)
else:
screen.blit(self.image, self.rect)
self.connect(screen)
def up(self):
self.y += MAP_DELTA
self.update()
def down(self):
self.y -= MAP_DELTA
self.update()
def collide(self, position):
return self.bounding_rect.collidepoint(position)
def copy(self):
return Icon(self.image, self.shadow_image, self.type, self.x, self.y)
def update(self):
self.rect = self.image.get_rect(center=(self.x, self.y))
self.bounding_rect = self.image.get_bounding_rect()
self.bounding_rect.center = self.rect.center
def is_child(self, parent):
return self in parent.children
class IconTree(pygame.sprite.Group):
def __init__(self, images):
super().__init__()
# placeholder surface until actual images are filled in
image = pygame.surface.Surface((1, 1))
# start with 3-5 nodes for the player to start at
starting_nodes = random.randint(3, 5)
last_level = [Icon(image, image) for i in range(1, starting_nodes + 1)]
self.root = Icon(image, image)
self.root.children = last_level
# holds node list for each level of the tree
self.levels = [[self.root], last_level]
while len(self.levels) < 14:
new_level = [] # nodes that are being newly added
'''
Generate between 2 and 6 nodes for the next level
Each node can have at most half of the previous nodes connecting to it.
Only the last node connected
'''
new_level = [Icon(image, image) for _ in range(get_rand())] # random.randint(2, 6))]
has_parent = [False for _ in range(len(new_level))]
max_conn = math.ceil(len(last_level) / 2)
start_idx = 0
for parent in last_level:
if max_conn <= 1:
new_conn = 1
else:
new_conn = random.randint(1, max_conn)
end = min(start_idx + new_conn, len(new_level))
for i in range(start_idx, end):
parent.children.append(new_level[i])
has_parent[i] = True
start_idx = min(start_idx + new_conn - 1, len(new_level) - 1)
# Hook up oprhaned nodes
for chld, parent in zip(new_level, has_parent):
if not parent:
last_level[-1].children.append(chld)
# Get ready to move onto next level
self.levels.append(new_level)
last_level = new_level
'''
Add final repair nodes.
Before the boss players will have the option to repair.
'''
repair = []
repair_nodes = min(4, len(self.levels[LAST]))
last = 0
if repair_nodes == len(self.levels[LAST]):
nodes_per_repair = 1
else:
nodes_per_repair = len(self.levels[LAST]) // repair_nodes
for i in range(repair_nodes):
temp = Icon(*images['repair'])
temp.type = 'repair'
repair.append(temp)
for parent in self.levels[LAST][i * nodes_per_repair : (i + 1) * nodes_per_repair]:
parent.children.append(temp)
for parent in self.levels[LAST][i * nodes_per_repair:]:
parent.children.append(temp)
self.levels.append(repair)
''' Add final boss node. '''
boss = Icon(*images['boss'])
boss.type ='boss'
for parent in self.levels[LAST]:
parent.children.append(boss)
self.levels.append([boss])
'''
Generate actual icons at each node positoin in each map
Rest, Shop, Unknown, Minion
'''
total_encounters = sum([len(lvl) for lvl in self.levels[1:-2]])
selections = ['unknown'] * int(total_encounters * 0.30) + \
['minion'] * int(total_encounters * 0.40)
random.shuffle(selections)
for nodes in self.levels[1:3]:
for node in nodes:
node.type = selections.pop()
node.image, node.shadow_image = images[node.type]
node.update()
selections += ['repair'] * int(total_encounters * .15) + \
['shop'] * int(total_encounters * .15)
rem_nodes = sum(len(lvl) for lvl in self.levels[3:-2])
while len(selections) < rem_nodes:
selections.append('minion')
random.shuffle(selections)
for nodes in self.levels[3:-2]:
for node in nodes:
node.type = selections.pop()
node.image, node.shadow_image = images[node.type]
node.update()
# Add all newly created nodes to the group
for l in self.levels:
self.add(*l)
# generate positions for each node based on the number of nodes per level
self.position()
def draw(self, screen, position):
for icon in self.sprites():
if icon != self.root:
icon.draw(screen, position)
def position(self):
'''
Establishes positions of procedurally generated map icons.
Only needs to be called directly after new map initialization.
'''
delta_y = 150
y_coord = SCREEN_HEIGHT - 60
# establish positions of current level, skip root level
for current in self.levels[1:]:
n = len(current)
field = int(SCREEN_WIDTH * .9)
delta_x = (field - n * 40) / (n + 1)
x_coord = delta_x + int(SCREEN_WIDTH *.1)
for n in current:
n.x = x_coord
n.y = y_coord
x_coord += delta_x + 40
y_coord -= delta_y
self.update()
def scroll(self, screen, player_loc, bg, legend, up, down, up_rect, down_rect):
'''
Enables scrolling of map with arrow icons at bottom right hand side.
Will stop when the last row of icons display is going off the screen.
'''
while pygame.mouse.get_pressed()[MOUSE_ONE]:
pygame.time.Clock().tick(40)
screen.blit(bg, (0, 0))
screen.blit(up, up_rect)
screen.blit(down, down_rect)
pos = pygame.mouse.get_pos()
if down_rect.collidepoint(pos):
self.down()
elif up_rect.collidepoint(pos):
self.up()
self.draw(screen, player_loc)
screen.blit(legend, (580, 20))
pygame.display.update()
pygame.event.pump()
def up(self):
# Stop scrolling when last level is about to go off the bottom
if self.levels[LAST][0].y >= SCREEN_HEIGHT:
return
for sp in self.sprites():
sp.up()
def down(self):
# Stop scrolling when first level is about to go off the top
if self.levels[FIRST][0].y <= 0:
return
for sp in self.sprites():
sp.down()
class Map:
def __init__(self):
self.images = {}
#Boss
bi = pygame.transform.scale(pygame.image.load('assets/map_icons/battle-mech.png').convert_alpha(), (70, 70))
bi_shadow = pygame.transform.scale(pygame.image.load('assets/map_icons/battle-mech-here.png').convert_alpha(), (70, 70))
self.images['boss'] = (bi, bi_shadow)
#Minions
minion = pygame.transform.scale(pygame.image.load('assets/map_icons/spider-bot.png').convert_alpha(), (40, 40))
minion_shadow = pygame.transform.scale(pygame.image.load('assets/map_icons/spider-bot-here.png').convert_alpha(), (40, 40))
self.images['minion'] = (minion, minion_shadow)
#Stores
store = pygame.transform.scale(pygame.image.load('assets/map_icons/energy-tank.png').convert_alpha(), (40, 40))
store_shadow = pygame.transform.scale(pygame.image.load('assets/map_icons/energy-tank-here.png').convert_alpha(), (40, 40))
self.images['shop'] = (store, store_shadow)
#Unknown
unk = pygame.transform.scale(pygame.image.load('assets/map_icons/uncertainty.png').convert_alpha(), (40, 40))
unk_shadow = pygame.transform.scale(pygame.image.load('assets/map_icons/uncertainty-here.png').convert_alpha(), (40, 40))
self.images['unknown'] = (unk, unk_shadow)
# Repair
rep = pygame.transform.scale(pygame.image.load('assets/map_icons/auto-repair.png').convert_alpha(), (40, 40))
rep_shadow = pygame.transform.scale(pygame.image.load('assets/map_icons/auto-repair-here.png').convert_alpha(), (40, 40))
self.images['repair'] = (rep, rep_shadow)
# Background
self.bg = pygame.transform.scale(pygame.image.load(os.path.join(BACKGROUND_PATH, "nebula/nebula09.png")), (SCREEN_WIDTH, SCREEN_HEIGHT))
#Legend
self.legend = pygame.transform.scale(pygame.image.load('assets/map_icons/Legend.png'), (200, 50))
# Up/Down buttons
self.up = pygame.transform.scale(pygame.image.load(os.path.join(ICON_PATH, "upgrade.png")), (ICON_SIZE, ICON_SIZE))
self.down = pygame.transform.scale(pygame.image.load(os.path.join(ICON_PATH, "downgrade.png")), (ICON_SIZE, ICON_SIZE))
self.down_rect = self.down.get_rect(bottomright=(SCREEN_WIDTH, SCREEN_HEIGHT))
self.up_rect = self.up.get_rect(topright=self.down_rect.topleft)
def main_map(self, screen, player, assets):
escape_call = Escape()
sector_map = IconTree(self.images)
sector_map.update()
player_loc = sector_map.root
alive = True
win = None
while alive and win is None:
screen.blit(self.bg, (0, 0))
screen.blit(self.legend, (580, 20))
screen.blit(self.up, self.up_rect)
screen.blit(self.down, self.down_rect)
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
break
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
escape_call.escape_menu(screen)
break
elif event.type == MOUSEBUTTONDOWN:
position = pygame.mouse.get_pos()
if self.up_rect.collidepoint(position) or self.down_rect.collidepoint(position):
sector_map.scroll(screen, player_loc, self.bg,
self.legend, self.up, self.down,
self.up_rect, self.down_rect)
for sp in sector_map.sprites():
if sp.is_child(player_loc) and sp.collide(position):
player_loc = sp
if sp.type == 'minion':
alive = battle(screen, player, assets, escape_call)
elif sp.type == 'boss':
win = battle(screen, player, assets, escape_call, boss=True)
elif sp.type == 'unknown':
alive = events(screen, player, assets, escape_call)
elif sp.type == 'repair':
repair(screen, player, assets, escape_call)
elif sp.type == 'shop':
shop(screen, player, assets, escape_call)
if alive:
sector_map.draw(screen, player_loc)
pygame.display.update()
if player.current_health <= 0:
break
if not win or player.current_health <= 0:
game_over(screen)
if(win):
game_win(screen)
| StarcoderdataPython |
8011108 | <filename>cfg/weechat/__main__.py
from libdotfiles.packages import try_install
from libdotfiles.util import HOME_DIR, PKG_DIR, create_symlinks
try_install("weechat")
create_symlinks(
[
(path, HOME_DIR / ".weechat" / path.name)
for path in PKG_DIR.glob("*.conf")
]
)
create_symlinks(
[
(path, HOME_DIR / ".weechat" / "python" / path.name)
for path in (PKG_DIR / "python").glob("*.conf")
]
)
| StarcoderdataPython |
1817765 | # @lc app=leetcode id=497 lang=python3
#
# [497] Random Point in Non-overlapping Rectangles
#
# https://leetcode.com/problems/random-point-in-non-overlapping-rectangles/description/
#
# algorithms
# Medium (39.08%)
# Likes: 355
# Dislikes: 562
# Total Accepted: 32.1K
# Total Submissions: 82.2K
# Testcase Example: '["Solution","pick","pick","pick","pick","pick"]\n' +
# '[[[[-2,-2,1,1],[2,2,4,6]]],[],[],[],[],[]]'
#
# You are given an array of non-overlapping axis-aligned rectangles rects where
# rects[i] = [ai, bi, xi, yi] indicates that (ai, bi) is the bottom-left corner
# point of the i^th rectangle and (xi, yi) is the top-right corner point of the
# i^th rectangle. Design an algorithm to pick a random integer point inside the
# space covered by one of the given rectangles. A point on the perimeter of a
# rectangle is included in the space covered by the rectangle.
#
# Any integer point inside the space covered by one of the given rectangles
# should be equally likely to be returned.
#
# Note that an integer point is a point that has integer coordinates.
#
# Implement the Solution class:
#
#
# Solution(int[][] rects) Initializes the object with the given rectangles
# rects.
# int[] pick() Returns a random integer point [u, v] inside the space covered
# by one of the given rectangles.
#
#
#
# Example 1:
#
#
# Input
# ["Solution", "pick", "pick", "pick", "pick", "pick"]
# [[[[-2, -2, 1, 1], [2, 2, 4, 6]]], [], [], [], [], []]
# Output
# [null, [1, -2], [1, -1], [-1, -2], [-2, -2], [0, 0]]
#
# Explanation
# Solution solution = new Solution([[-2, -2, 1, 1], [2, 2, 4, 6]]);
# solution.pick(); // return [1, -2]
# solution.pick(); // return [1, -1]
# solution.pick(); // return [-1, -2]
# solution.pick(); // return [-2, -2]
# solution.pick(); // return [0, 0]
#
#
#
# Constraints:
#
#
# 1 <= rects.length <= 100
# rects[i].length == 4
# -10^9 <= ai < xi <= 10^9
# -10^9 <= bi < yi <= 10^9
# xi - ai <= 2000
# yi - bi <= 2000
# All the rectangles do not overlap.
# At most 10^4 calls will be made to pick.
#
#
#
# @lc tags=Unknown
# @lc imports=start
from warnings import resetwarnings
from imports import *
# @lc imports=end
# @lc idea=start
#
# 在不重叠的给定矩形区域内随机选择一个整数点。
# 根据每个矩形区域的大小,即整数点个数,来确定权重,根据随机值与总权重确定矩形位置。之后再根据剩余值,确定在这个矩形内的位置,加上偏移量即可。
#
# @lc idea=end
# @lc group=
# @lc rank=
# @lc code=start
class Solution:
def __init__(self, rects: List[List[int]]):
self.total = 0
self.rects = rects
self.orders = []
for r in rects:
self.orders.append(self.total)
left, bottom, right, top = r
self.total += (top - bottom + 1) * (right - left + 1)
def pick(self) -> List[int]:
r = random.randint(0, self.total - 1)
idx = bisect_right(self.orders, r) - 1
r -= self.orders[idx]
left, bottom, right, top = self.rects[idx]
l = right - left + 1
xr = left + r % l
yr = bottom + r // l
return [xr, yr]
# @lc code=end
# @lc main=start
if __name__ == '__main__':
print('Example 1:')
print('Input : ')
print('')
print('Exception :')
print('')
print('Output :')
print(str(Solution().__init__(error)))
print()
pass
# @lc main=end | StarcoderdataPython |
1853109 | # -*- coding: utf-8 -*-
from __future__ import annotations
import typing
from typing import Optional
from collections import namedtuple
from dataclasses import dataclass
import functools
import warnings
import numpy as np
import pandas as pd
import scipy.signal
from endaq.calc.stats import L2_norm
from endaq.calc import utils
def _absolute_acceleration_coefficients(omega, Q, T):
"""
Calculate the coefficients of the Z-domain transfer function for the
absolute acceleration response according to ISO 18431-4.
:param omega: the natural frequency of the system
:param Q: the quality factor of the system
:param T: the time step in seconds
:return: the coefficients of the Z-domain transfer function b, a
.. seealso::
- `ISO 18431-4 Mechanical vibration and shock — Signal processing — Part 4: Shock-response spectrum analysis`
Explicit implementations of digital filter coefficients for shock spectra.
"""
A = omega*T/(2.*Q)
B = omega*T*np.sqrt(1. - 1./(4.*(Q**2)))
b = (
1. - np.exp(-A)*np.sin(B)/B,
2.*np.exp(-A)*(np.sin(B)/B - np.cos(B)),
np.exp(-2*A) - np.exp(-A)*np.sin(B)/B,
)
a = (
1.,
-2.*np.exp(-A)*np.cos(B),
np.exp(-2.*A),
)
return b, a
def absolute_acceleration(accel: pd.DataFrame, omega: float, damp: float = 0.0) -> pd.DataFrame:
"""
Calculate the absolute acceleration for a SDOF system.
The absolute acceleration follows the transfer function:
`H(s) = L{x"(t)}(s) / L{y"(t)}(s) = X(s)/Y(s)`
for the PDE:
`x" + (2ζω)x' + (ω²)x = (2ζω)y' + (ω²)y`
:param accel: the absolute acceleration `y"`
:param omega: the natural frequency `ω` of the SDOF system
:param damp: the damping coefficient `ζ` of the SDOF system
:return: the absolute acceleration `x"` of the SDOF system
.. seealso::
- `An Introduction To The Shock Response Spectrum, <NAME>, 9 July 2012 <http://www.vibrationdata.com/tutorials2/srs_intr.pdf>`_
- `SciPy transfer functions <https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.TransferFunction.html>`_
Documentation for the transfer function class used to characterize the
relative displacement calculation.
- `SciPy biquad filter <https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.lfilter.html>`__
Documentation for the biquad function used to implement the transfer
function.
- `ISO 18431-4 Mechanical vibration and shock — Signal processing — Part 4: Shock-response spectrum analysis`
Explicit implementations of digital filter coefficients for shock spectra.
"""
T = utils.sample_spacing(accel)
Q = 1./(2.*damp)
return accel.apply(
functools.partial(
scipy.signal.lfilter,
*_absolute_acceleration_coefficients(omega, Q, T),
axis=0,
),
raw=True,
)
def _relative_velocity_coefficients(omega, Q, T):
"""
Calculate the coefficients of the Z-domain transfer function for the
relative velocity response according to ISO 18431-4.
:param omega: the natural frequency of the system
:param Q: the quality factor of the system
:param T: the time step in seconds
:return: the coefficients of the Z-domain transfer function b, a
.. seealso::
- `ISO 18431-4 Mechanical vibration and shock — Signal processing — Part 4: Shock-response spectrum analysis`
Explicit implementations of digital filter coefficients for shock spectra.
"""
A = omega*T/(2.*Q)
B = omega*T*np.sqrt(1. - 1./(4.*(Q**2.)))
C = np.exp(-A)*np.sin(B)/np.sqrt(4.*(Q**2.) - 1.)
D = T*(omega**2.)
b = (
(-1. + np.exp(-A)*np.cos(B) + C)/D,
(1. - np.exp(-2.*A) - 2.*C)/D,
(np.exp(-2.*A) - np.exp(-A)*np.cos(B) + C)/D,
)
a = (
1.,
-2.*np.exp(-A)*np.cos(B),
np.exp(-2.*A),
)
return b, a
def relative_velocity(accel: pd.DataFrame, omega: float, damp: float = 0.0) -> pd.DataFrame:
"""
Calculate the relative velocity for a SDOF system.
The relative velocity follows the transfer function:
`H(s) = L{z'(t)}(s) / L{y"(t)}(s) = (1/s)(Z(s)/Y(s))`
for the PDE:
`z" + (2ζω)z' + (ω²)z = -y"`
:param accel: the absolute acceleration y"
:param omega: the natural frequency ω of the SDOF system
:param damp: the damping coefficient ζ of the SDOF system
:return: the relative velocity z' of the SDOF system
.. seealso::
- `Pseudo Velocity Shock Spectrum Rules For Analysis Of Mechanical Shock, <NAME> <https://info.endaq.com/hubfs/pvsrs_rules.pdf>`_
- `SciPy transfer functions <https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.TransferFunction.html>`_
Documentation for the transfer function class used to characterize the
relative displacement calculation.
- `SciPy biquad filter <https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.lfilter.html>`__
Documentation for the biquad function used to implement the transfer
function.
- `ISO 18431-4 Mechanical vibration and shock — Signal processing — Part 4: Shock-response spectrum analysis`
Explicit implementations of digital filter coefficients for shock spectra.
"""
T = utils.sample_spacing(accel)
Q = 1./(2.*damp)
return accel.apply(
functools.partial(
scipy.signal.lfilter,
*_relative_velocity_coefficients(omega, Q, T),
axis=0,
),
raw=True,
)
def _relative_displacement_coefficients(omega, Q, T):
"""
Calculate the coefficients of the Z-domain transfer function for the
relative displacement response according to ISO 18431-4.
:param omega: the natural frequency of the system
:param Q: the quality factor of the system
:param T: the time step in seconds
:return: the coefficients of the Z-domain transfer function b, a
.. seealso::
- `ISO 18431-4 Mechanical vibration and shock — Signal processing — Part 4: Shock-response spectrum analysis`
Explicit implementations of digital filter coefficients for shock spectra.
"""
A = omega*T/(2.*Q)
B = omega*T*np.sqrt(1. - 1./(4.*(Q**2.)))
C = T*(omega**3.)
q = (1./(2.*(Q**2.)) - 1.)/np.sqrt(1. - 1./(4.*(Q**2.)))
b = (
((1. - np.exp(-A)*np.cos(B))/Q - q*np.exp(-A)*np.sin(B) - omega*T)/C,
(2.*np.exp(-A)*np.cos(B)*omega*T -
(1. - np.exp(-2.*A))/Q +
2*q*np.exp(-A)*np.sin(B))/C,
(-np.exp(-2.*A)*(omega*T + 1./Q) +
np.exp(-A)*np.cos(B)/Q -
q*np.exp(-A)*np.sin(B))/C,
)
a = (
1.,
-2.*np.exp(-A)*np.cos(B),
np.exp(-2.*A),
)
return b, a
def relative_displacement(accel: pd.DataFrame, omega: float, damp: float = 0.0) -> pd.DataFrame:
"""
Calculate the relative displacement for a SDOF system.
The relative displacement follows the transfer function:
`H(s) = L{z(t)}(s) / L{y"(t)}(s) = (1/s²)(Z(s)/Y(s))`
for the PDE:
`z" + (2ζω)z' + (ω²)z = -y"`
:param accel: the absolute acceleration y"
:param omega: the natural frequency ω of the SDOF system
:param damp: the damping coefficient ζ of the SDOF system
:return: the relative displacement z of the SDOF system
.. seealso::
- `Pseudo Velocity Shock Spectrum Rules For Analysis Of Mechanical Shock, <NAME> <https://info.endaq.com/hubfs/pvsrs_rules.pdf>`_
- `SciPy transfer functions <https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.TransferFunction.html>`_
Documentation for the transfer function class used to characterize the
relative displacement calculation.
- `SciPy biquad filter <https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.lfilter.html>`__
Documentation for the biquad function used to implement the transfer
function.
- `ISO 18431-4 Mechanical vibration and shock — Signal processing — Part 4: Shock-response spectrum analysis`
Explicit implementations of digital filter coefficients for shock spectra.
"""
T = utils.sample_spacing(accel)
Q = 1./(2.*damp)
return accel.apply(
functools.partial(
scipy.signal.lfilter,
*_relative_displacement_coefficients(omega, Q, T),
axis=0,
),
raw=True,
)
def _pseudo_velocity_coefficients(omega, Q, T):
"""
Calculate the coefficients of the Z-domain transfer function for the
pseudo-velocity response according to ISO 18431-4.
:param omega: the natural frequency of the system
:param Q: the quality factor of the system
:param T: the time step in seconds
:return: the coefficients of the Z-domain transfer function b, a
.. seealso::
- `ISO 18431-4 Mechanical vibration and shock — Signal processing — Part 4: Shock-response spectrum analysis`
Explicit implementations of digital filter coefficients for shock spectra.
"""
A = omega*T/(2.*Q)
B = omega*T*np.sqrt(1. - 1./(4.*(Q**2)))
C = T*(omega**2)
q = (1./(2.*(Q**2.)) - 1.)/np.sqrt(1. - 1./(4.*(Q**2.)))
b = (
((1. - np.exp(-A)*np.cos(B))/Q - q*np.exp(-A)*np.sin(B) - omega*T)/C,
(2.*np.exp(-A)*np.cos(B)*omega*T - (1. - np.exp(-2.*A))/Q + 2.*q*np.exp(-A)*np.sin(B))/C,
(-np.exp(-2.*A)*(omega*T + 1./Q) + np.exp(-A)*np.cos(B)/Q - q*np.exp(-A)*np.sin(B))/C,
)
a = (
1.,
-2.*np.exp(-A)*np.cos(B),
np.exp(-2.*A),
)
return b, a
def pseudo_velocity(accel: pd.DataFrame, omega: float, damp: float = 0.0) -> pd.DataFrame:
"""
Calculate the pseudo-velocity for a SDOF system.
The pseudo-velocity follows the transfer function:
`H(s) = L{ωz(t)}(s) / L{y"(t)}(s) = (ω/s²)(Z(s)/Y(s))`
for the PDE:
`z" + (2ζω)z' + (ω²)z = -y"`
:param accel: the absolute acceleration y"
:param omega: the natural frequency ω of the SDOF system
:param damp: the damping coefficient ζ of the SDOF system
:return: the pseudo-velocity of the SDOF system
.. seealso::
- `Pseudo Velocity Shock Spectrum Rules For Analysis Of Mechanical Shock, <NAME> <https://info.endaq.com/hubfs/pvsrs_rules.pdf>`_
- `SciPy transfer functions <https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.TransferFunction.html>`_
Documentation for the transfer function class used to characterize the
relative displacement calculation.
- `SciPy biquad filter <https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.lfilter.html>`__
Documentation for the biquad function used to implement the transfer
function.
- `ISO 18431-4 Mechanical vibration and shock — Signal processing — Part 4: Shock-response spectrum analysis`
Explicit implementations of digital filter coefficients for shock spectra.
"""
T = utils.sample_spacing(accel)
Q = 1./(2.*damp)
return accel.apply(
functools.partial(
scipy.signal.lfilter,
*_pseudo_velocity_coefficients(omega, Q, T),
axis=0,
),
raw=True,
)
def _relative_displacement_static_coefficients(omega, Q, T):
"""
Calculate the coefficients of the Z-domain transfer function for the
relative displacement response expressed as equivalent static acceleration
according to ISO 18431-4.
:param omega: the natural frequency of the system
:param Q: the quality factor of the system
:param T: the time step in seconds
:return: the coefficients of the Z-domain transfer function b, a
.. seealso::
- `ISO 18431-4 Mechanical vibration and shock — Signal processing — Part 4: Shock-response spectrum analysis`
Explicit implementations of digital filter coefficients for shock spectra.
"""
A = omega*T/(2.*Q)
B = omega*T*np.sqrt(1. - 1/(4.*(Q**2.)))
C = (T*omega)
q = (1./(2.*(Q**2.)) - 1.)/(np.sqrt(1. - 1./(4.*(Q**2.))))
b = (
((1 - np.exp(-A)*np.cos(B))/Q - q*np.exp(-A)*np.sin(B) - omega*T)/C,
(2*np.exp(-A)*np.cos(B)*omega*T - (1 - np.exp(-2.*A))/Q + 2.*q*np.exp(-A)*np.sin(B))/C,
(-np.exp(-2.*A)*(omega*T + 1./Q) + np.exp(-A)*np.cos(B)/Q - q*np.exp(-A)*np.sin(B))/C,
)
a = (
1.,
-2.*np.exp(-A)*np.cos(B),
np.exp(-2.*A),
)
return b, a
def relative_displacement_static(accel: pd.DataFrame, omega: float, damp: float = 0.0) -> pd.DataFrame:
"""
Calculate the relative displacement expressed as equivalent static
acceleration for a SDOF system.
The relative displacement as static acceleration follows the transfer
function:
`H(s) = L{ω²z(t)}(s) / L{y"(t)}(s) = (ω²/s²)(Z(s)/Y(s))`
for the PDE:
`z" + (2ζω)z' + (ω²)z = -y"`
:param accel: the absolute acceleration y"
:param omega: the natural frequency ω of the SDOF system
:param damp: the damping coefficient ζ of the SDOF system
:return: the relative displacement of the SDOF system expressed as
equivalent static acceleration
.. seealso::
- `Pseudo Velocity Shock Spectrum Rules For Analysis Of Mechanical Shock, <NAME> <https://info.endaq.com/hubfs/pvsrs_rules.pdf>`_
- `SciPy transfer functions <https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.TransferFunction.html>`_
Documentation for the transfer function class used to characterize the
relative displacement calculation.
- `SciPy biquad filter <https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.lfilter.html>`__
Documentation for the biquad function used to implement the transfer
function.
- `ISO 18431-4 Mechanical vibration and shock — Signal processing — Part 4: Shock-response spectrum analysis`
Explicit implementations of digital filter coefficients for shock spectra.
"""
T = utils.sample_spacing(accel)
Q = 1./(2.*damp)
return accel.apply(
functools.partial(
scipy.signal.lfilter,
*_relative_displacement_static_coefficients(omega, Q, T),
axis=0,
),
raw=True,
)
def shock_spectrum(
accel: pd.DataFrame,
freqs: np.ndarray,
damp: float = 0.0,
mode: typing.Literal["srs", "pvss"] = "srs",
two_sided: bool = False,
aggregate_axes: bool = False,
) -> pd.DataFrame:
"""
Calculate the shock spectrum of an acceleration signal.
:param accel: the absolute acceleration `y"`
:param freqs: the natural frequencies across which to calculate the spectrum
:param damp: the damping coefficient `ζ`, related to the Q-factor by
`ζ = 1/(2Q)`; defaults to 0
:param mode: the type of spectrum to calculate:
- `'srs'` (default) specifies the Shock Response Spectrum (SRS)
- `'pvss'` specifies the Pseudo-Velocity Shock Spectrum (PVSS)
:param two_sided: whether to return for each frequency:
both the maximum negative and positive shocks (`True`),
or simply the maximum absolute shock (`False`; default)
:param aggregate_axes: whether to calculate the column-wise resultant (`True`)
or calculate spectra along each column independently (`False`; default)
:return: the shock spectrum
.. seealso::
- `Pseudo Velocity Shock Spectrum Rules For Analysis Of Mechanical Shock, <NAME> <https://info.endaq.com/hubfs/pvsrs_rules.pdf>`_
- `An Introduction To The Shock Response Spectrum, <NAME>, 9 July 2012 <http://www.vibrationdata.com/tutorials2/srs_intr.pdf>`_
- `SciPy transfer functions <https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.TransferFunction.html>`_
Documentation for the transfer function class used to characterize the
relative displacement calculation.
- `SciPy biquad filter <https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.lfilter.html>`__
Documentation for the biquad function used to implement the transfer
function.
"""
if two_sided and aggregate_axes:
raise ValueError("cannot enable both options `two_sided` and `aggregate_axes`")
freqs = np.asarray(freqs)
if freqs.ndim != 1:
raise ValueError("target frequencies must be in a 1D-array")
omega = 2 * np.pi * freqs
if mode == "srs":
make_coeffs = _absolute_acceleration_coefficients
elif mode == "pvss":
make_coeffs = _pseudo_velocity_coefficients
else:
raise ValueError(f"invalid spectrum mode {mode:r}")
results = np.empty(
(2,) + freqs.shape + ((1,) if aggregate_axes else accel.shape[1:]),
dtype=np.float64,
)
dt = utils.sample_spacing(accel)
T_padding = 1 / (
freqs.min() * np.sqrt(1 - damp ** 2)
) # uses lowest damped frequency
if not two_sided:
T_padding /= 2
zi = np.zeros((2,) + accel.shape[1:])
zero_padding = np.zeros((int(T_padding // dt) + 1,) + accel.shape[1:])
Q = 1./(2.*damp)
for i_nd in np.ndindex(freqs.shape[0]):
rd, zf = scipy.signal.lfilter(
*make_coeffs(omega[i_nd], Q, dt),
accel.to_numpy(),
zi=zi,
axis=0,
)
rd_padding, _ = scipy.signal.lfilter(
*make_coeffs(omega[i_nd], Q, dt), zero_padding, zi=zf, axis=0
)
if aggregate_axes:
rd = L2_norm(rd, axis=-1, keepdims=True)
rd_padding = L2_norm(rd_padding, axis=-1, keepdims=True)
results[(0,) + i_nd] = -np.minimum(rd.min(axis=0), rd_padding.min(axis=0))
results[(1,) + i_nd] = np.maximum(rd.max(axis=0), rd_padding.max(axis=0))
if aggregate_axes or not two_sided:
return pd.DataFrame(
np.maximum(results[0], results[1]),
index=pd.Series(freqs, name="frequency (Hz)"),
columns=(["resultant"] if aggregate_axes else accel.columns),
)
return namedtuple("PseudoVelocityResults", "neg pos")(
*(
pd.DataFrame(
r, index=pd.Series(freqs, name="frequency (Hz)"), columns=accel.columns
)
for r in results
)
)
@dataclass
class HalfSineWavePulse:
"""
The output data type for :py:func:`enveloping_half_sine`.
The significant data members are `amplitude` and `duration`, which can
simply be unpacked as if from a plain tuple:
.. testsetup::
import pandas as pd
df_pvss = pd.DataFrame([1, 1], index=[200, 400])
from endaq.calc.shock import enveloping_half_sine
.. testcode::
ampl, T = enveloping_half_sine(df_pvss)
However, users can also elect to use the other methods of this class to
generate other kinds of outputs.
.. note:: This class is not intended to be instantiated manually.
"""
amplitude: pd.Series
duration: pd.Series
def __iter__(self):
return iter((self.amplitude, self.duration))
def to_time_series(
self,
tstart: Optional[float] = None,
tstop: Optional[float] = None,
dt: Optional[float] = None,
tpulse: Optional[float] = None,
) -> pd.DataFrame:
"""
Generate a time-series of the half-sine pulse.
:param tstart: the starting time of the resulting waveform; if `None`
(default), the range starts at `tpulse`
:param tstop: the ending time of the resulting waveform; if `None`
(default), the range ends at `tpulse + duration`
:param dt: the sampling period of the resulting waveform; defaults to
1/20th of the pulse duration
:param tpulse: the starting time of the pulse within the resulting
waveform; if `None` (default), the pulse starts at either:
- ``tstart``, if provided
- ``tstop - self.duration.max())``, if `tstop` is provided
- ``0.0`` otherwise
:return: a time-series of the half-sine pulse
"""
if dt is None:
dt = self.duration.min() / 20
if dt > self.duration.min() / 8:
warnings.warn(
f"the sampling period {dt} is large relative to the pulse duration"
f" {self.duration.min()}; the waveform may not accurately represent"
f" the half-sine pulse's shock intensity"
)
default_start = 0.0
if tstop is not None:
default_start = tstop - self.duration.max()
if tpulse is None and tstart is None:
tpulse = tstart = default_start
elif tpulse is None:
tpulse = tstart
elif tstart is None:
tstart = tpulse
if tstop is None:
tstop = tpulse + self.duration.max()
if not (tstart <= tpulse <= tstop - self.duration.max()):
warnings.warn(
"half-sine pulse extends beyond the bounds of the time series"
)
t = np.arange(tstart, tstop, dt)
data = np.zeros((len(t), len(self.amplitude)), dtype=float)
t_data, ampl_data, T_data = np.broadcast_arrays(
t[..., None], self.amplitude.to_numpy(), self.duration.to_numpy()
)
t_mask = np.nonzero((t_data >= tpulse) & (t_data < tpulse + T_data))
data[t_mask] = ampl_data[t_mask] * np.sin(
np.pi * t_data[t_mask] / T_data[t_mask]
)
return pd.DataFrame(
data,
index=pd.Series(t, name="timestamp"),
columns=self.amplitude.index,
)
# def widened_duration(self, new_duration: float):
# pass
# def pseudo_velocity(self):
# pass
def enveloping_half_sine(
pvss: pd.DataFrame,
damp: float = 0.0,
) -> HalfSineWavePulse:
"""
Characterize a half-sine pulse whose PVSS envelopes the input.
:param pvss: the PVSS to envelope
:param damp: the damping factor used to generate the input PVSS
:return: a tuple of amplitudes and periods, each pair of which describes a
half-sine pulse
.. seealso::
`Pseudo Velocity Shock Spectrum Rules For Analysis Of Mechanical Shock, <NAME> <https://info.endaq.com/hubfs/pvsrs_rules.pdf>`_
"""
def amp_factor(damp):
"""
Calculate the PVSS amplitude attenuation on a half-sine pulse from the
damping coefficient.
The PVSS of a half-sine pulse differs based on the damping coefficient
used. While the high-frequency rolloff is relatively consistent, the
flat low-frequency amplitude is attenuated at higher damping values.
This function calculates this attenuation for a given damping
coefficient.
"""
# This calculates the PVSS value as ω->0. However, since it necessarily
# computes the maximum of a function *over time*, and ω is only found
# therein in the multiplicative factor (ωt), it is mathematically
# equivalent to compute this maximum for any arbitrary ω>0. Thus we
# choose ω=1 for convenience, w/o loss of generality.
a = np.exp(1j * np.arccos(-damp)) # = -damp + 1j * np.sqrt(1 - damp**2)
# From WolframAlpha: https://www.wolframalpha.com/input/?i=D%5BPower%5Be%2C%5C%2840%29-d+*t%5C%2841%29%5D+sin%5C%2840%29Sqrt%5B1-Power%5Bd%2C2%5D%5D*t%5C%2841%29%2Ct%5D+%3D+0&assumption=%22ListOrTimes%22+-%3E+%22Times%22&assumption=%7B%22C%22%2C+%22e%22%7D+-%3E+%7B%22NamedConstant%22%7D&assumption=%7B%22C%22%2C+%22d%22%7D+-%3E+%7B%22Variable%22%7D&assumption=%22UnitClash%22+-%3E+%7B%22d%22%2C+%7B%22Days%22%7D%7D
t_max = (2 / np.imag(a)) * np.arctan2(np.imag(a), 1 - np.real(a))
PVSS_max = (1 / np.imag(a)) * np.imag(np.exp(a * t_max))
return PVSS_max
max_pvss = pvss.max()
max_f_pvss = pvss.mul(pvss.index, axis=0).max()
return HalfSineWavePulse(
amplitude=2 * np.pi * max_f_pvss,
duration=max_pvss / (4 * amp_factor(damp) * max_f_pvss),
)
| StarcoderdataPython |
3398801 | <gh_stars>0
name = "ServerName"
user = "mongo"
japd = None
host = "hostname"
port = 27017
auth = False
repset = None
repset_hosts = None
auth_db = "admin"
use_arg = True
use_uri = False
| StarcoderdataPython |
1659970 | <filename>calculator.py
from evaluation import *
# this is the main function for the calculator
def calculator():
# this string makes sure that the calculator doesn't stop
mark = None
# this is the initial string instructions
print('To stop the calculator enter \'end\' and to get help enter \'help\'')
# this keeps the calculator running automatically
while mark != False:
# this grabs the string from the user
s = input('')
if check(s) == False:
# this does stuff
print(splitExp(s))
else:
# this updates mark
mark = False
# this runs the calculator
calculator()
| StarcoderdataPython |
11273499 | <reponame>Livioni/Cloud-Workflow-Scheduling-base-on-Deep-Reinforcement-Learning
import gym, torch, copy, os, xlwt, random
import torch.nn as nn
from datetime import datetime
import numpy as np
env = gym.make("clusterEnv-v0").unwrapped
state_dim, action_dim = env.return_dim_info()
####### initialize environment hyperparameters ######
max_ep_len = 1000 # max timesteps in one episode
auto_save = 1
total_test_episodes = 100 * auto_save # total num of testing episodes
def initial_excel():
global worksheet, workbook
# xlwt 库将数据导入Excel并设置默认字符编码为ascii
workbook = xlwt.Workbook(encoding='ascii')
# 添加一个表 参数为表名
worksheet = workbook.add_sheet('makespan')
# 生成单元格样式的方法
# 设置列宽, 3为列的数目, 12为列的宽度, 256为固定值
for i in range(3):
worksheet.col(i).width = 256 * 12
# 设置单元格行高, 25为行高, 20为固定值
worksheet.row(1).height_mismatch = True
worksheet.row(1).height = 20 * 25
# 保存excel文件
workbook.save('data/makespan_MCTSAE.xls')
def read_current_state():
'''
读取当前env的状态
:return: 当前env的状态
'''
state = copy.deepcopy(env.state)
ready_list = copy.deepcopy(env.ready_list)
done_job = copy.deepcopy(env.done_job)
tasks = copy.deepcopy(env.tasks)
wait_duration = copy.deepcopy(env.wait_duration)
cpu_demand = copy.deepcopy(env.cpu_demand)
memory_demand = copy.deepcopy(env.memory_demand)
tasks_remaing_time = copy.deepcopy(env.tasks_remaing_time)
time = env.time
cpu_res = env.cpu_res
memory_res = env.memory_res
return state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand, tasks_remaing_time, cpu_res, memory_res, time
def load_current_state(state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand, tasks_remaing_time,
cpu_res, memory_res, time):
env.set_state(state[:])
env.set_ready_list(ready_list[:])
env.set_done_job(done_job[:])
env.set_tasks(tasks[:])
env.set_wait_duration(wait_duration[:])
env.set_cpu_demand(cpu_demand[:])
env.set_memory_demand(memory_demand[:])
env.set_tasks_remaing_time(tasks_remaing_time)
env.set_cpu_res(cpu_res)
env.set_memory_res(memory_res)
env.set_time(time)
return
class TreeNode(object):
def __init__(self, parent, state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand,
tasks_remaing_time, cpu_res, memory_res, time):
self._parent = parent
self._children = {} # a map from action to TreeNode
self._n_visits = 0
self._makespan = 0
self._total_makespan = 0
self._state = state
self._ready_list = ready_list
self._done_job = done_job
self._tasks = tasks
self._wait_duration = wait_duration
self._cpu_demand = cpu_demand
self._memory_demand = memory_demand
self._tasks_remaing_time = tasks_remaing_time
self._cpu_res = cpu_res
self._memory_res = memory_res
self._time = time
self._c = 40
self._value = 0
if self._parent != None:
self.get_value()
def expand(self):
'''
扩展树
'''
load_current_state(self._state, self._ready_list, self._done_job, self._tasks, self._wait_duration,
self._cpu_demand, self._memory_demand, self._tasks_remaing_time, self._cpu_res,
self._memory_res, self._time)
available_action = env.return_action_list()
if available_action:
for action in available_action:
load_current_state(self._state, self._ready_list, self._done_job, self._tasks, self._wait_duration,
self._cpu_demand, self._memory_demand, self._tasks_remaing_time, self._cpu_res,
self._memory_res, self._time)
if action not in self._children:
env.step(action)
state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand, tasks_remaing_time, cpu_res, memory_res, time = read_current_state()
self._children[action] = TreeNode(self, state, ready_list, done_job, tasks, wait_duration,
cpu_demand, memory_demand, tasks_remaing_time, cpu_res,
memory_res, time)
else:
print("done")
def get_average_makespan(self):
return self._makespan
def get_value(self):
self._value = self._makespan + self._c * np.sqrt(np.log(self._parent._n_visits + 1) / (self._n_visits + 1))
return self._value
def select(self):
'''
在子节中选择具有搜索价值的点
'''
return max(self._children.items(), key=lambda act_node: act_node[1].get_value())[1]
def update(self, makespan):
# Count visit.
self._n_visits += 1
if self._makespan == 0:
self._makespan = -makespan
else:
if -makespan > self._makespan:
self._makespan = -makespan
if self._parent != None:
self._value = self.get_value()
def update_recursive(self, leaf_value):
# If it is not root, this node's parent should be updated first.
if self._parent:
self._parent.update_recursive(leaf_value)
self.update(leaf_value)
def is_leaf(self):
return self._children == {}
def is_root(self):
return self._parent is None
class MCTS(object):
def __init__(self, state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand, tasks_remaing_time,
cpu_res, memory_res, time, depth):
self._root = TreeNode(None, state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand,
tasks_remaing_time, cpu_res, memory_res, time)
self._root.expand() # 初始化扩展
self._initial_buget = 100
self._min_buget = 10
self._depth = depth
def playout(self):
buget = max(self._initial_buget / self._depth, self._min_buget)
for j in range(int(buget)):
node = self._root
while True:
if node.is_leaf():
if node._n_visits == 0:
cur_state, cur_ready_list, cur_done_job, cur_tasks, cur_wait_duration, cur_cpu_demand, cur_memory_demand, cur_tasks_remaing_time, cur_cpu_res, cur_memory_res, cur_time = node._state, node._ready_list, node._done_job, node._tasks, node._wait_duration, node._cpu_demand, node._memory_demand, node._tasks_remaing_time, node._cpu_res, node._memory_res, node._time
makespan = self._roll_out(cur_state, cur_ready_list, cur_done_job, cur_tasks, cur_wait_duration,
cur_cpu_demand, cur_memory_demand, cur_tasks_remaing_time,
cur_cpu_res, cur_memory_res, cur_time)
node.update_recursive(makespan)
break
else:
node.expand()
node = node.select()
else:
node = node.select()
node = self._root
return max(node._children.items(), key=lambda act_node: act_node[1].get_average_makespan())[0]
def _roll_out(self, cur_state, cur_ready_list, cur_done_job, cur_tasks, cur_wait_duration, cur_cpu_demand,
cur_memory_demand, cur_tasks_remaing_time, cur_cpu_res, cur_memory_res, cur_time):
load_current_state(cur_state, cur_ready_list, cur_done_job, cur_tasks, cur_wait_duration, cur_cpu_demand,
cur_memory_demand, cur_tasks_remaing_time, cur_cpu_res, cur_memory_res, cur_time)
state = cur_state
max_ep_len = 1000 # max timesteps in one episode
for t in range(1, max_ep_len + 1):
action = random.choice(range(action_dim)) - 1
state, reward, done, info = env.step(action)
while (info[0] == False):
action = random.choice(range(action_dim)) - 1
state, reward, done, info = env.step(action) # 输入step的都是
next_state, reward, done, _ = state, reward, done, info
# break; if the episode is over
state = next_state
if done:
makespan = state[0]
break
return makespan
if __name__ == '__main__':
initial_excel()
makespans = []
line = 0
start_time = datetime.now().replace(microsecond=0)
print("Started training at (GMT) : ", start_time)
print("============================================================================================")
for ep in range(1, total_test_episodes + 1):
initial_state = env.reset()
state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand, tasks_remaing_time, cpu_res, memory_res, time = read_current_state()
for depth in range(1, max_ep_len + 1):
tree = MCTS(state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand,
tasks_remaing_time, cpu_res, memory_res, time, depth=depth)
best_action = tree.playout()
load_current_state(tree._root._state, tree._root._ready_list, tree._root._done_job, tree._root._tasks,
tree._root._wait_duration, tree._root._cpu_demand, tree._root._memory_demand,
tree._root._tasks_remaing_time, tree._root._cpu_res, tree._root._memory_res,
tree._root._time)
observation, reward, done, info = env.step(best_action)
state, ready_list, done_job, tasks, wait_duration, cpu_demand, memory_demand, tasks_remaing_time, cpu_res, memory_res, time = read_current_state()
del tree
if done:
makespan = observation[0]
makespans.append(makespan)
print("Episode:", ep, "Makespan:", makespan)
if ep % auto_save == 0:
average_makespan = np.mean(makespans)
worksheet.write(line, 1, float(average_makespan))
workbook.save('data/makespan_MCTSAE.xls')
print('MCTS : Episode: {}, Makespan: {:.3f}s'.format((line + 1) * auto_save, average_makespan))
line += 1
makespans = []
end_time = datetime.now().replace(microsecond=0)
print("Finished testing at (GMT) : ", end_time)
print("Total testing time : ", end_time - start_time)
start_time = end_time
break
workbook.save('data/makespan_MCTSAE.xls')
env.close()
| StarcoderdataPython |
1817300 | import matplotlib.pyplot as plt
from chombopy.plotting import PltFile, setup_mpl_latex
import matplotlib.cm as cm
pf = PltFile("../tests/data/plt000100.2d.hdf5")
setup_mpl_latex(14)
fig = plt.figure()
ax = plt.gca()
cmap = "viridis"
field = "Temperature"
# Get data for the temperature variable on level 2
for level in pf.get_levels():
temperature = pf.get_level_data(field, level)
# temperature is an xarray.DataSet object, which can be plotted using matplotlib
x, y = pf.get_mesh_grid_for_level(level=level, grow=True)
ax.pcolormesh(x, y, temperature, cmap=cmap)
# Or you can do some analysis using the xarray/numpy functionality
print(temperature.mean())
pf.plot_outlines(ax)
cbar = fig.colorbar(cm.ScalarMappable(norm=pf.get_norm(field), cmap=cmap), ax=ax)
cbar.ax.set_ylabel(field)
plt.savefig("../docs/images/plt000100.jpg")
plt.show()
| StarcoderdataPython |
171796 | #!/usr/bin/env python
from distutils.core import setup
setup(name='pattern',
version='0.1~alpha0',
author='<NAME>',
author_email='<EMAIL>',
description='Parsing strings according to python formats',
url='http://github.com/integralws/pattern',
license='MIT License',
packages=['pattern'],
) | StarcoderdataPython |
1731271 | <filename>const/sub_categories/service/media.py
from typing import Dict
from ..base_category import BaseCategory
# CD錄音帶 cd-and-tape
class CdAndTape(object):
def cd_and_tape() -> Dict[str, Dict[str, str]]:
list = {}
list['name'] = 'CD錄音帶'
list['id'] = 'cd-and-tape'
cd_and_tape = {}
cd_and_tape['CD-tapes-manufacturers'] = 'CD片、錄音帶製造'
cd_and_tape['music-shops'] = '唱片行'
list['sub'] = cd_and_tape
return list
# 影帶影碟光碟 dvd-and-vcd
class DvdAndVcd(object):
def dvd_and_vcd() -> Dict[str, Dict[str, str]]:
list = {}
list['name'] = '影帶影碟光碟'
list['id'] = 'dvd-and-vcd'
dvd_and_vcd = {}
dvd_and_vcd['video-tapes-DVD-manufacturers'] = '錄影帶、DVD製造'
dvd_and_vcd['video-tapes-rental'] = '錄影帶出租'
dvd_and_vcd['video-tapes-DVD'] = '錄影帶、DVD'
list['sub'] = dvd_and_vcd
return list
# 錄音錄影 recording-video
class RecordingVideo(object):
def recording_video() -> Dict[str, Dict[str, str]]:
list = {}
list['name'] = '錄音錄影'
list['id'] = 'recording-video'
recording_video = {}
recording_video['production-service'] = '視聽製作服務'
recording_video['recording-equip-manufacturers'] = '影音設備製造'
list['sub'] = recording_video
return list
# 眼鏡 glasses
class Glasses(object):
def glasses() -> Dict[str, Dict[str, str]]:
list = {}
list['name'] = '眼鏡'
list['id'] = 'glasses'
glasses = {}
glasses['glasses-contact-lenses'] = '眼鏡、隱形眼鏡'
glasses['glasses-manufacturers'] = '眼鏡製造'
list['sub'] = glasses
return list
# 鐘錶 watches-and-clocks
class WatchesAndClocks(object):
def watches_and_clocks() -> Dict[str, Dict[str, str]]:
list = {}
list['name'] = '鐘錶'
list['id'] = 'watches-and-clocks'
watches_and_clocks = {}
watches_and_clocks['watch-manufacturers'] = '鐘錶製造'
watches_and_clocks['watches'] = '鐘錶'
list['sub'] = watches_and_clocks
return list
# 攝影 photography
class Photography(object):
def photography() -> Dict[str, Dict[str, str]]:
list = {}
list['name'] = '攝影'
list['id'] = 'photography'
photography = {}
photography['image-equip-materials'] = '影像設備材料'
photography['photo-processing'] = '相片沖洗'
photography['camera-supplies'] = '相機、攝影器材製造'
photography['camera'] = '相機、攝影機'
photography['photo-service'] = '攝影服務'
list['sub'] = photography
return list
# 視聽工程器材 media-supplies
class MediaSupplies(object):
def media_supplies() -> Dict[str, Dict[str, str]]:
list = {}
list['name'] = '視聽工程器材'
list['id'] = 'media-supplies'
media_supplies = {}
media_supplies['cable-tv-equip'] = '有線電視設備'
media_supplies['stage-engineering'] = '舞台工程'
media_supplies['audio-video-engineering'] = '視聽工程'
list['sub'] = media_supplies
return list
# 儀器 instrument
class Instrument(object):
def instrument() -> Dict[str, Dict[str, str]]:
list = {}
list['name'] = '儀器'
list['id'] = 'instrument'
instrument = {}
instrument['optical'] = '光學儀器'
instrument['weighting'] = '度量衡儀器'
instrument['surveying'] = '測量儀器'
instrument['temp-humidity'] = '溫濕度儀器'
instrument['laboratory'] = '科學實驗室設備'
instrument['instrument'] = '儀器'
list['sub'] = instrument
return list
# 數位錄放設備 digital-record-device
class DigitalRecordDevice(object):
def digital_record_device() -> Dict[str, Dict[str, str]]:
list = {}
list['name'] = '數位錄放設備'
list['id'] = 'digital-record-device'
digital_record_device = {}
digital_record_device['digital-recording-equip'] = '數位錄放設備'
list['sub'] = digital_record_device
return list
# 聲光影視 media
class Media(BaseCategory, CdAndTape, DvdAndVcd, RecordingVideo, Glasses, WatchesAndClocks,
Photography, MediaSupplies, Instrument, DigitalRecordDevice):
category_name = '聲光影視'
category_id = 'media' | StarcoderdataPython |
6433942 | import inspect
import io
import os
import platform
import numpy
import cupy
try:
import cupy.cuda.thrust as thrust
except ImportError:
thrust = None
try:
import cupy_backends.cuda.libs.cudnn as cudnn
except ImportError:
cudnn = None
try:
import cupy.cuda.nccl as nccl
except ImportError:
nccl = None
try:
import cupy.cuda.cub as cub
except ImportError:
cub = None
try:
import cupy_backends.cuda.libs.cutensor as cutensor
except ImportError:
cutensor = None
try:
import scipy
except ImportError:
scipy = None
def _eval_or_error(func, errors):
# Evaluates `func` and return the result.
# If an error specified by `errors` occured, it returns a string
# representing the error.
try:
return func()
except errors as e:
return repr(e)
class _InstallInfo(object):
# TODO(niboshi): Add is_binary_distribution
def __init__(self):
cupy_package_root = self._get_cupy_package_root()
if cupy_package_root is not None:
data_root = os.path.join(cupy_package_root, '.data')
data_paths = {
'lib': _dir_or_none(os.path.join(data_root, 'lib')),
'include': _dir_or_none(os.path.join(data_root, 'include')),
}
else:
data_paths = {
'lib': None,
'include': None,
}
self.cupy_package_root = cupy_package_root
self.data_paths = data_paths
def get_data_path(self, data_type):
if data_type not in self.data_paths:
raise ValueError('Invalid data type: {}'.format(data_type))
return self.data_paths[data_type]
def _get_cupy_package_root(self):
try:
cupy_path = inspect.getfile(cupy)
except TypeError:
return None
return os.path.dirname(cupy_path)
class _RuntimeInfo(object):
cupy_version = None
cuda_path = None
# CUDA Driver
cuda_build_version = None
cuda_driver_version = None
# CUDA Runtime
cuda_runtime_version = None
# CUDA Toolkit
cublas_version = None
cufft_version = None
curand_version = None
cusolver_version = None
cusparse_version = None
nvrtc_version = None
thrust_version = None
# Optional Libraries
cudnn_build_version = None
cudnn_version = None
nccl_build_version = None
nccl_runtime_version = None
cub_build_version = None
cutensor_version = None
numpy_version = None
scipy_version = None
def __init__(self):
self.cupy_version = cupy.__version__
self.cuda_path = cupy.cuda.get_cuda_path()
self.cuda_build_version = cupy.cuda.driver.get_build_version()
self.cuda_driver_version = _eval_or_error(
cupy.cuda.runtime.driverGetVersion,
cupy.cuda.runtime.CUDARuntimeError)
self.cuda_runtime_version = _eval_or_error(
cupy.cuda.runtime.runtimeGetVersion,
cupy.cuda.runtime.CUDARuntimeError)
self.cublas_version = _eval_or_error(
lambda: cupy.cuda.cublas.getVersion(
cupy.cuda.device.get_cublas_handle()),
cupy.cuda.cublas.CUBLASError)
self.cufft_version = _eval_or_error(
cupy.cuda.cufft.getVersion,
cupy.cuda.cufft.CuFFTError)
self.curand_version = _eval_or_error(
cupy.cuda.curand.getVersion,
cupy.cuda.curand.CURANDError)
self.cusolver_version = _eval_or_error(
cupy.cuda.cusolver._getVersion,
cupy.cuda.cusolver.CUSOLVERError)
self.cusparse_version = _eval_or_error(
lambda: cupy.cuda.cusparse.getVersion(
cupy.cuda.device.get_cusparse_handle()),
cupy.cuda.cusparse.CuSparseError)
self.nvrtc_version = _eval_or_error(
cupy.cuda.nvrtc.getVersion,
cupy.cuda.nvrtc.NVRTCError)
if thrust is not None:
self.thrust_version = thrust.get_build_version()
if cudnn is not None:
self.cudnn_build_version = cudnn.get_build_version()
self.cudnn_version = _eval_or_error(
cudnn.getVersion, cudnn.CuDNNError)
if nccl is not None:
self.nccl_build_version = nccl.get_build_version()
nccl_runtime_version = nccl.get_version()
if nccl_runtime_version == 0:
nccl_runtime_version = '(unknown)'
self.nccl_runtime_version = nccl_runtime_version
if cub is not None:
self.cub_build_version = cub.get_build_version()
if cutensor is not None:
self.cutensor_version = cutensor.get_version()
self.numpy_version = numpy.version.full_version
if scipy is not None:
self.scipy_version = scipy.version.full_version
def __str__(self):
records = [
('OS', platform.platform()),
('CuPy Version', self.cupy_version),
('NumPy Version', self.numpy_version),
('SciPy Version', self.scipy_version),
('CUDA Root', self.cuda_path),
('CUDA Build Version', self.cuda_build_version),
('CUDA Driver Version', self.cuda_driver_version),
('CUDA Runtime Version', self.cuda_runtime_version),
]
records += [
('cuBLAS Version', self.cublas_version),
('cuFFT Version', self.cufft_version),
('cuRAND Version', self.curand_version),
('cuSOLVER Version', self.cusolver_version),
('cuSPARSE Version', self.cusparse_version),
('NVRTC Version', self.nvrtc_version),
('Thrust Version', self.thrust_version),
('CUB Build Version', self.cub_build_version),
]
records += [
('cuDNN Build Version', self.cudnn_build_version),
('cuDNN Version', self.cudnn_version),
('NCCL Build Version', self.nccl_build_version),
('NCCL Runtime Version', self.nccl_runtime_version),
('cuTENSOR Version', self.cutensor_version),
]
for device_id in range(cupy.cuda.runtime.getDeviceCount()):
with cupy.cuda.Device(device_id) as device:
props = cupy.cuda.runtime.getDeviceProperties(device_id)
records += [
('Device {} Name'.format(device_id),
props['name'].decode('utf-8')),
('Device {} Compute Capability'.format(device_id),
device.compute_capability),
]
width = max([len(r[0]) for r in records]) + 2
fmt = '{:' + str(width) + '}: {}\n'
s = io.StringIO()
for k, v in records:
s.write(fmt.format(k, v))
return s.getvalue()
def get_runtime_info():
return _RuntimeInfo()
def get_install_info():
return _InstallInfo()
def _dir_or_none(path):
"""Returns None if path does not exist."""
if os.path.isdir(path):
return path
return None
| StarcoderdataPython |
4897968 |
import argparse
import inspect
import os
import pathlib
from shutil import copy2
from glob import glob
def parseargs(f):
argspec = inspect.getfullargspec(f)
argnames = argspec[0]
defaults = list() if argspec[3] is None else argspec[3]
reqlen = len(argnames)-len(defaults)
parser = argparse.ArgumentParser(description=('signature = ' + str(inspect.signature(f))))
parser.add_argument('arglist', nargs=reqlen)
for option, default in zip(argnames[reqlen:], defaults):
parser.add_argument('--' + option, required=False, default=default)
args = parser.parse_args()
d = vars(args).copy()
del d['arglist']
f(*(args.arglist), **d)
def restructure(DATA_PATH, NEW_PATH):
images = sorted(glob(DATA_PATH+'/images/*.png'))
labels = sorted(glob(DATA_PATH+'/labels/*.json'))
assert len(images) == len(labels)
seen = set()
for impath, lbpath in zip(images, labels):
imsplit = impath.split('_')
lbsplit = lbpath.split('_')
disaster = imsplit[0][impath.rindex('/')+1:]
if disaster not in seen:
print('copying ', disaster)
imdir = os.path.join(NEW_PATH, disaster, 'images')
lbdir = os.path.join(NEW_PATH, disaster, 'labels')
if not os.path.isdir(os.path.join(NEW_PATH, disaster)):
pathlib.Path(imdir).mkdir(parents=True, exist_ok=True)
pathlib.Path(lbdir).mkdir(parents=True, exist_ok=True)
for file in glob(imsplit[0] + '*'):
copy2(file, imdir)
for file in glob(lbsplit[0] + '*'):
copy2(file, lbdir)
seen.add(disaster)
print('done')
if __name__ == '__main__':
parseargs(restructure) | StarcoderdataPython |
65286 | <gh_stars>0
import FWCore.ParameterSet.Config as cms
isolatedTracksCone= cms.EDAnalyzer("IsolatedTracksCone",
doMC = cms.untracked.bool(False),
Verbosity = cms.untracked.int32( 1 ),
useJetTrigger = cms.untracked.bool(False),
drLeadJetVeto = cms.untracked.double(1.2),
ptMinLeadJet = cms.untracked.double(15.0),
DebugTracks = cms.untracked.int32(0),
PrintTrkHitPattern=cms.untracked.bool(True),
minTrackP = cms.untracked.double(1.0),
maxTrackEta = cms.untracked.double(2.6),
maxNearTrackP = cms.untracked.double(1.0),
DebugEcalSimInfo= cms.untracked.bool(False),
ApplyEcalIsolation=cms.untracked.bool(True),
DebugL1Info = cms.untracked.bool(False),
L1extraTauJetSource = cms.InputTag("l1extraParticles", "Tau"),
L1extraCenJetSource = cms.InputTag("l1extraParticles", "Central"),
L1extraFwdJetSource = cms.InputTag("l1extraParticles", "Forward"),
TrackAssociatorParameters = cms.PSet(
muonMaxDistanceSigmaX = cms.double(0.0),
muonMaxDistanceSigmaY = cms.double(0.0),
CSCSegmentCollectionLabel = cms.InputTag("cscSegments"),
dRHcal = cms.double(9999.0),
dREcal = cms.double(9999.0),
CaloTowerCollectionLabel = cms.InputTag("towerMaker"),
useEcal = cms.bool(True),
dREcalPreselection = cms.double(0.05),
HORecHitCollectionLabel = cms.InputTag("horeco"),
dRMuon = cms.double(9999.0),
crossedEnergyType = cms.string('SinglePointAlongTrajectory'),
muonMaxDistanceX = cms.double(5.0),
muonMaxDistanceY = cms.double(5.0),
useHO = cms.bool(False),
accountForTrajectoryChangeCalo = cms.bool(False),
DTRecSegment4DCollectionLabel = cms.InputTag("dt4DSegments"),
EERecHitCollectionLabel = cms.InputTag("ecalRecHit","EcalRecHitsEE"),
dRHcalPreselection = cms.double(0.2),
useMuon = cms.bool(False),
useCalo = cms.bool(True),
EBRecHitCollectionLabel = cms.InputTag("ecalRecHit","EcalRecHitsEB"),
dRMuonPreselection = cms.double(0.2),
truthMatch = cms.bool(False),
HBHERecHitCollectionLabel = cms.InputTag("hbhereco"),
useHcal = cms.bool(True),
usePreshower = cms.bool(False),
dRPreshowerPreselection = cms.double(0.2),
trajectoryUncertaintyTolerance = cms.double(-1.0)
)
)
| StarcoderdataPython |
9728016 | # 2) Write a script that generates all the possible ungapped alignments of two sequences, scores them and identifies
# the best scoring ones.
#
# These are all the possible ungapped alingments of the two sequences: TCA and GA:
#
# --TCA -TCA TCA TCA TCA- TCA--
# GA--- GA-- GA- -GA --GA ---GA
#
# Using the following scoring scheme:# 2) Write a script that generates all the possible ungapped alignments of two sequences, scores them and identifies
# the best scoring ones.
#
# These are all the possible ungapped alingments of the two sequences: TCA and GA:
#
# --TCA -TCA TCA TCA TCA- TCA--
# GA--- GA-- GA- -GA --GA ---GA
#
# Using the following scoring scheme:
matrix = {'AA': 2, 'AC': -1, 'AT': -1, 'AG': -2, 'CC': 2, 'CT': 0, 'CG': -1,
'TT': 2, 'TG': -1, 'GG': 2, 'CA': -1, 'TA': -1, 'GA': -2, 'TC': 0,
'GC': -1, 'GT': -1, }
human = open('./data/titin_hu.txt', 'r')
mouse = open('./data/titin_mo.txt', 'r')
seq1 = ''
seq2 = ''
for line in human:
line = line.rstrip()
seq2 += line
for line in mouse:
line = line.rstrip()
seq2 += line
# seq1 = 'TT'
# seq2 = 'GTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTT'
len_seq1 = len(seq1)
len_seq2 = len(seq2)
iters = len_seq1 + len_seq2
same_size = False
if len_seq1 < len_seq2:
short = seq1
long = seq2
elif len_seq1 > len_seq2:
short = seq2
long = seq1
else:
same_size = True
short = seq1
long = seq2
len_short = len(short)
len_long = len(long)
long = long + '-' * len_short
short = '-' * len_long + short
short = list(short)
long = list(long)
highest = False
best_seq1 = ''
best_seq2 = ''
def score_fun(s1, s2, scoring_matrix):
score = 0
gap_penalty = -2
for base1, base2 in zip(s1, s2):
if base1 == '-' or base2 == '-':
score += gap_penalty
else:
score += scoring_matrix[base1 + base2]
print(''.join(s1), ''.join(s2), score, sep = '\n')
return score
for i in range(iters - 1):
score = score_fun(long, short, matrix)
if long[-1] == '-' and short[0] == '-':
del short[0]
del long[-1]
score = score_fun(long, short, matrix)
elif long[-1] != '-' and short[0] == '-':
short.append('-')
del short[0]
score = score_fun(long, short, matrix)
else:
long.insert(0, '-')
short.append('-')
score = score_fun(long, short, matrix)
if highest == False:
highest = score
if score > highest:
best_seq1 = ''.join(long)
best_seq2 = ''.join(short)
highest = score
print(highest)
comp = ''
for base1, base2 in zip(best_seq1, best_seq2):
if base1 == base2:
comp += '|'
else:
comp += ' '
print(best_seq1, comp, best_seq2,sep = '\n')
print('The best alignment score is:', highest)
| StarcoderdataPython |
5119148 | <reponame>DEADSEC-SECURITY/CODEX<gh_stars>10-100
#-----------Welcome to DeAdSeC Python Codex----------#
#-------Made By DeAdSeC-------#
#---Version 1.0.0---#
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
W = '\033[0m' # white (normal)
R = '\033[31m' # red
G = '\033[32m' # green
O = '\033[33m' # orange
B = '\033[34m' # blue
P = '\033[35m' # purple
C = '\033[36m' # cyan
GR = '\033[37m' # gray
D = '\033[2m' # dims current color. {W} resets.
Plus = f'{W}{D}[{W}{G}+{W}{D}]{W}' #[+]
Danger = f'{O}[{R}!{O}]{W}' #[!]
WTF = f'{W}[{C}?{W}]' #[?]
| StarcoderdataPython |
8003030 | from fastapi import APIRouter
from app.api.api_v1.endpoints import (
market_currency, wallet_asset, user_trade
)
api_router = APIRouter()
api_router.include_router(market_currency.router, prefix="/currency", tags=["currency"])
api_router.include_router(wallet_asset.router, prefix="/wallet", tags=["wallet"])
api_router.include_router(user_trade.router, prefix="/trades", tags=["trades"])
| StarcoderdataPython |
270202 | from django import forms
from .models import Fee, StudentFee, StudentReceipt
class FeeForm(forms.ModelForm):
class Meta:
model = Fee
fields = '__all__'
widgets = {
'name': forms.TextInput(attrs={'class': 'form-control'}),
'amount': forms.NumberInput(attrs={'class': 'form-control'}),
'session': forms.Select(attrs={'class': 'form-control'}),
}
| StarcoderdataPython |
112465 | <filename>days/d11.py
import os
import time
import numpy as np
import pytest
from scipy.signal import convolve
day = os.path.basename(__file__).split(".")[0]
def parse(file: str):
return np.genfromtxt(file, delimiter=1)
def p1(data: np.array):
mask = np.array([[1, 1, 1], [1, 0, 1], [1, 1, 1]])
flashes = 0
for _ in range(100):
data += 1
while (data > 9).any():
flash = data > 9
flashes += flash.sum()
data += convolve(flash, mask, "same")
data[flash] = -np.inf
data[data < 0] = 0
return flashes
def p2(data: np.array):
mask = np.array([[1, 1, 1], [1, 0, 1], [1, 1, 1]])
flashes = 0
for step in range(1, 1000):
data += 1
while (data > 9).any():
flash = data > 9
flashes += flash.sum()
data += convolve(flash, mask, "same")
data[flash] = -np.inf
data[data < 0] = 0
if (data == 0).all():
return step
@pytest.fixture
def test_data():
return parse(f"inputs/{day}.input.test")
def test_p1(test_data):
assert p1(test_data) == 1656
def test_p2(test_data):
assert p2(test_data) == 195
def main():
t0 = time.perf_counter()
print(f"P1: {p1(parse(f'inputs/{day}.input'))} {(time.perf_counter() - t0) * 1000:.2f} ms")
t0 = time.perf_counter()
print(f"P2: {p2(parse(f'inputs/{day}.input'))} {(time.perf_counter() - t0) * 1000:.2f} ms")
if __name__ == "__main__":
main()
| StarcoderdataPython |
1957527 | <reponame>azavodov/social-apis
from social_apis.networks.network import Network
class Vkontakte(Network):
api_url = 'https://api.vk.com'
api_version = '5.103'
url = f"{api_url}/method"
def __init__(self, **params):
super(Vkontakte, self).__init__(self.api_url, **params)
def request(self, endpoint, method='GET', params=None, json_encoded=False):
if 'v' not in params:
params['v'] = self.api_version
url = endpoint if endpoint.startswith('https://') else f'{self.url}/{endpoint}'
return self._request(url, method=method, params=params, api_call=url, json_encoded=json_encoded)
# Account
def account_ban(self, **params):
"""Docs: https://dev.vk.com/method/account.ban"""
return self.post(f'account.ban', params=params)
def account_change_password(self, **params):
"""Docs: https://dev.vk.com/method/account.changePassword"""
return self.post(f'account.changePassword', params=params)
def account_get_active_offers(self, **params):
"""Docs: https://dev.vk.com/method/account.getActiveOffers"""
return self.post(f'account.getActiveOffers', params=params)
def account_get_app_permissions(self, **params):
"""Docs: https://dev.vk.com/method/account.getAppPermissions"""
return self.post(f'account.getAppPermissions', params=params)
def account_get_banned(self, **params):
"""Docs: https://dev.vk.com/method/account.getBanned"""
return self.post(f'account.getBanned', params=params)
def account_get_counters(self, **params):
"""Docs: https://dev.vk.com/method/account.getCounters"""
return self.post(f'account.getCounters', params=params)
def account_get_info(self, **params):
"""Docs: https://dev.vk.com/method/account.getInfo"""
return self.post(f'account.getInfo', params=params)
def account_get_profile_info(self, **params):
"""Docs: https://dev.vk.com/method/account.getProfileInfo"""
return self.post(f'account.getProfileInfo', params=params)
def account_get_push_settings(self, **params):
"""Docs: https://dev.vk.com/method/account.getPushSettings"""
return self.post(f'account.getPushSettings', params=params)
def account_lookup_contracts(self, **params):
"""Docs: https://dev.vk.com/method/account.lookupContacts"""
return self.post(f'account.lookupContacts', params=params)
def account_register_device(self, **params):
"""Docs: https://dev.vk.com/method/account.registerDevice"""
return self.post(f'account.registerDevice', params=params)
def account_save_profile_info(self, **params):
"""Docs: https://dev.vk.com/method/account.saveProfileInfo"""
return self.post(f'account.saveProfileInfo', params=params)
def account_set_info(self, **params):
"""Docs: https://dev.vk.com/method/account.setInfo"""
return self.post(f'account.setInfo', params=params)
def account_set_name_in_menu(self, **params):
"""Docs: https://dev.vk.com/method/account.setNameInMenu"""
return self.post(f'account.setNameInMenu', params=params)
def account_set_offline(self, **params):
"""Docs: https://dev.vk.com/method/account.setOffline"""
return self.post(f'account.setOffline', params=params)
def account_set_online(self, **params):
"""Docs: https://dev.vk.com/method/account.setOnline"""
return self.post(f'account.setOnline', params=params)
def account_set_push_settings(self, **params):
"""Docs: https://dev.vk.com/method/account.setPushSettings"""
return self.post(f'account.setPushSettings', params=params)
def account_set_silence_mode(self, **params):
"""Docs: https://dev.vk.com/method/account.setSilenceMode"""
return self.post(f'account.', params=params)
def account_unban(self, **params):
"""Docs: https://dev.vk.com/method/account.unban"""
return self.post(f'account.unban', params=params)
def account_unregister_device(self, **params):
"""Docs: https://dev.vk.com/method/account.unregisterDevice"""
return self.post(f'account.unregisterDevice', params=params)
# Ads
def ads_add_office_users(self, **params):
"""Docs: https://dev.vk.com/method/ads.addOfficeUsers"""
return self.post(f'ads.addOfficeUsers', params=params)
def ads_check_link(self, **params):
"""Docs: https://dev.vk.com/method/ads.checkLink"""
return self.post(f'ads.checkLink', params=params)
def ads_create_ads(self, **params):
"""Docs: https://dev.vk.com/method/ads.createAds"""
return self.post(f'ads.createAds', params=params)
def ads_create_campaigns(self, **params):
"""Docs: https://dev.vk.com/method/ads.createCampaigns"""
return self.post(f'ads.createCampaigns', params=params)
def ads_create_clients(self, **params):
"""Docs: https://dev.vk.com/method/ads.createClients"""
return self.post(f'ads.createClients', params=params)
def ads_create_lookalike_request(self, **params):
"""Docs: https://dev.vk.com/method/ads.createLookalikeRequest"""
return self.post(f'ads.createLookalikeRequest', params=params)
def ads_create_target_group(self, **params):
"""Docs: https://dev.vk.com/method/ads.createTargetGroup"""
return self.post(f'ads.createTargetGroup', params=params)
def ads_create_target_pixel(self, **params):
"""Docs: https://dev.vk.com/method/ads.createTargetPixel"""
return self.post(f'ads.createTargetPixel', params=params)
def ads_delete_ads(self, **params):
"""Docs: https://dev.vk.com/method/ads.deleteAds"""
return self.post(f'ads.deleteAds', params=params)
def ads_delete_campaigns(self, **params):
"""Docs: https://dev.vk.com/method/ads.deleteCampaigns"""
return self.post(f'ads.deleteCampaigns', params=params)
def ads_delete_clients(self, **params):
"""Docs: https://dev.vk.com/method/ads.deleteClients"""
return self.post(f'ads.deleteClients', params=params)
def ads_delete_target_group(self, **params):
"""Docs: https://dev.vk.com/method/ads.deleteTargetGroup"""
return self.post(f'ads.deleteTargetGroup', params=params)
def ads_delete_target_pixel(self, **params):
"""Docs: https://dev.vk.com/method/ads.deleteTargetPixel"""
return self.post(f'ads.delete_target_pixel', params=params)
def ads_get_accounts(self, **params):
"""Docs: https://dev.vk.com/method/ads.getAccounts"""
return self.post(f'ads.get_accounts', params=params)
def ads_get_ads(self, **params):
"""Docs: https://dev.vk.com/method/ads.getAds"""
return self.post(f'ads.getAds', params=params)
ads_get_ads.iter_key = 'response.items'
ads_get_ads.iter_field = 'offset'
ads_get_ads.iter_mode = 'offset'
ads_get_ads.iter_next = 'response.items'
def ads_get_ads_layout(self, **params):
"""Docs: https://dev.vk.com/method/ads.getAdsLayout"""
return self.post(f'ads.getAdsLayout', params=params)
def ads_get_ads_posts_reach(self, **params):
"""Docs: https://dev.vk.com/method/ads.getAdsPostsReach"""
return self.post(f'ads.getAdsPostsReach', params=params)
def ads_get_ads_targeting(self, **params):
"""Docs: https://dev.vk.com/method/ads.getAdsTargeting"""
return self.post(f'ads.getAdsTargeting', params=params)
def ads_get_budget(self, **params):
"""Docs: https://dev.vk.com/method/ads.getBudget"""
return self.post(f'ads.getBudget', params=params)
def ads_get_campaigns(self, **params):
"""Docs: https://dev.vk.com/method/ads.getCampaigns"""
return self.post(f'ads.getCampaigns', params=params)
ads_get_campaigns.iter_key = 'response.items'
ads_get_campaigns.iter_field = 'offset'
ads_get_campaigns.iter_mode = 'offset'
ads_get_campaigns.iter_next = 'response.items'
def ads_get_categories(self, **params):
"""Docs: https://dev.vk.com/method/ads.getCategories"""
return self.post(f'ads.getCategories', params=params)
ads_get_categories.iter_key = 'response.items'
ads_get_categories.iter_field = 'offset'
ads_get_categories.iter_mode = 'offset'
ads_get_categories.iter_next = 'response.items'
def ads_get_clients(self, **params):
"""Docs: https://dev.vk.com/method/ads.getClients"""
return self.post(f'ads.getClients', params=params)
ads_get_clients.iter_key = 'response.items'
ads_get_clients.iter_field = 'offset'
ads_get_clients.iter_mode = 'offset'
ads_get_clients.iter_next = 'response.items'
def ads_get_demographics(self, **params):
"""Docs: https://dev.vk.com/method/ads.getDemographics"""
return self.post(f'ads.getDemographics', params=params)
def ads_get_flood_stats(self, **params):
"""Docs: https://dev.vk.com/method/ads.getFloodStats"""
return self.post(f'ads.getFloodStats', params=params)
def ads_get_lookalike_requests(self, **params):
"""Docs: https://dev.vk.com/method/ads.getLookalikeRequests"""
return self.post(f'ads.get_lookalike_requests', params=params)
def ads_get_musicians(self, **params):
"""Docs: https://dev.vk.com/method/ads.getMusicians"""
return self.post(f'ads.getMusicians', params=params)
def ads_get_musicians_by_ids(self, **params):
"""Docs: https://dev.vk.com/method/ads.getMusiciansByIds"""
return self.post(f'ads.getMusiciansByIds', params=params)
def ads_get_office_users(self, **params):
"""Docs: https://dev.vk.com/method/ads.getOfficeUsers"""
return self.post(f'ads.getOfficeUsers', params=params)
def ads_get_posts_reach(self, **params):
"""Docs: https://dev.vk.com/method/ads.getPostsReach"""
return self.post(f'ads.getPostsReach', params=params)
def ads_get_rejection_reason(self, **params):
"""Docs: https://dev.vk.com/method/ads.getRejectionReason"""
return self.post(f'ads.getRejectionReason', params=params)
def ads_get_statistics(self, **params):
"""Docs: https://dev.vk.com/method/ads.getStatistics"""
return self.post(f'ads.getStatistics', params=params)
def ads_get_suggestions(self, **params):
"""Docs: https://dev.vk.com/method/ads.getSuggestions"""
return self.post(f'ads.getSuggestions', params=params)
def ads_get_target_groups(self, **params):
"""Docs: https://dev.vk.com/method/ads.getTargetGroups"""
return self.post(f'ads.getTargetGroups', params=params)
def ads_get_target_pixels(self, **params):
"""Docs: https://dev.vk.com/method/ads.getTargetPixels"""
return self.post(f'ads.getTargetPixels', params=params)
def ads_get_targeting_stats(self, **params):
"""Docs: https://dev.vk.com/method/ads.getTargetingStats"""
return self.post(f'ads.getTargetingStats', params=params)
def ads_get_upload_url(self, **params):
"""Docs: https://dev.vk.com/method/ads.getUploadURL"""
return self.post(f'ads.getUploadURL', params=params)
def ads_get_video_upload_url(self, **params):
"""Docs: https://dev.vk.com/method/ads.getVideoUploadURL"""
return self.post(f'ads.getVideoUploadURL', params=params)
def ads_import_target_contacts(self, **params):
"""Docs: https://dev.vk.com/method/ads.importTargetContacts"""
return self.post(f'ads.importTargetContacts', params=params)
def ads_remove_office_users(self, **params):
"""Docs: https://dev.vk.com/method/ads.removeOfficeUsers"""
return self.post(f'ads.removeOfficeUsers', params=params)
def ads_remove_target_contacts(self, **params):
"""Docs: https://dev.vk.com/method/ads.removeTargetContacts"""
return self.post(f'ads.removeTargetContacts', params=params)
def ads_save_lookalike_request_result(self, **params):
"""Docs: https://dev.vk.com/method/ads.saveLookalikeRequestResult"""
return self.post(f'ads.saveLookalikeRequestResult', params=params)
def ads_share_target_group(self, **params):
"""Docs: https://dev.vk.com/method/ads.shareTargetGroup"""
return self.post(f'ads.shareTargetGroup', params=params)
def ads_update_ads(self, **params):
"""Docs: https://dev.vk.com/method/ads.updateAds"""
return self.post(f'ads.updateAds', params=params)
def ads_update_campaigns(self, **params):
"""Docs: https://dev.vk.com/method/ads.updateCampaigns"""
return self.post(f'ads.updateCampaigns', params=params)
def ads_update_clients(self, **params):
"""Docs: https://dev.vk.com/method/ads.updateClients"""
return self.post(f'ads.updateClients', params=params)
def ads_update_office_users(self, **params):
"""Docs: https://dev.vk.com/method/ads.updateOfficeUsers"""
return self.post(f'ads.updateOfficeUsers', params=params)
def ads_update_target_group(self, **params):
"""Docs: https://dev.vk.com/method/ads.updateTargetGroup"""
return self.post(f'ads.', params=params)
def ads_update_target_pixel(self, **params):
"""Docs: https://dev.vk.com/method/ads.updateTargetPixel"""
return self.post(f'ads.updateTargetPixel', params=params)
# Database
def database_get_chairs(self, **params):
"""Docs: https://dev.vk.com/method/database.getChairs"""
return self.post(f'database.getChairs', params=params)
database_get_chairs.iter_key = 'response.items'
database_get_chairs.iter_field = 'offset'
database_get_chairs.iter_mode = 'offset'
database_get_chairs.iter_next = 'response.items'
def database_get_cities(self, **params):
"""Docs: https://dev.vk.com/method/database.getCities"""
return self.post(f'database.getCities', params=params)
database_get_cities.iter_key = 'response.items'
database_get_cities.iter_field = 'offset'
database_get_cities.iter_mode = 'offset'
database_get_cities.iter_next = 'response.items'
def database_get_cities_by_id(self, **params):
"""Docs: https://dev.vk.com/method/database.getCitiesById"""
return self.post(f'database.getCitiesById', params=params)
database_get_cities_by_id.iter_key = 'response.items'
database_get_cities_by_id.iter_field = 'offset'
database_get_cities_by_id.iter_mode = 'offset'
database_get_cities_by_id.iter_next = 'response.items'
def database_get_countries(self, **params):
"""Docs: https://dev.vk.com/method/database.getCountries"""
return self.post(f'database.getCountries', params=params)
database_get_countries.iter_key = 'response.items'
database_get_countries.iter_field = 'offset'
database_get_countries.iter_mode = 'offset'
database_get_countries.iter_next = 'response.items'
def database_get_countries_by_id(self, **params):
"""Docs: https://dev.vk.com/method/database.getCountriesById"""
return self.post(f'database.getCountriesById', params=params)
database_get_countries_by_id.iter_key = 'response.items'
database_get_countries_by_id.iter_field = 'offset'
database_get_countries_by_id.iter_mode = 'offset'
database_get_countries_by_id.iter_next = 'response.items'
def database_get_faculties(self, **params):
"""Docs: https://dev.vk.com/method/database.getFaculties"""
return self.post(f'database.getFaculties', params=params)
database_get_faculties.iter_key = 'response.items'
database_get_faculties.iter_field = 'offset'
database_get_faculties.iter_mode = 'offset'
database_get_faculties.iter_next = 'response.items'
def database_get_metro_stations(self, **params):
"""Docs: https://dev.vk.com/method/database.getMetroStations"""
return self.post(f'database.getMetroStations', params=params)
database_get_metro_stations.iter_key = 'response.items'
database_get_metro_stations.iter_field = 'offset'
database_get_metro_stations.iter_mode = 'offset'
database_get_metro_stations.iter_next = 'response.items'
def database_get_metro_stations_by_id(self, **params):
"""Docs: https://dev.vk.com/method/database.getMetroStationsById"""
return self.post(f'database.getMetroStationsById', params=params)
database_get_metro_stations_by_id.iter_key = 'response.items'
database_get_metro_stations_by_id.iter_field = 'offset'
database_get_metro_stations_by_id.iter_mode = 'offset'
database_get_metro_stations_by_id.iter_next = 'response.items'
def database_get_regions(self, **params):
"""Docs: https://dev.vk.com/method/database.getRegions"""
return self.post(f'database.getRegions', params=params)
database_get_regions.iter_key = 'response.items'
database_get_regions.iter_field = 'offset'
database_get_regions.iter_mode = 'offset'
database_get_regions.iter_next = 'response.items'
def database_get_school_classes(self, **params):
"""Docs: https://dev.vk.com/method/database.getSchoolClasses"""
return self.post(f'database.getSchoolClasses', params=params)
database_get_school_classes.iter_key = 'response.items'
database_get_school_classes.iter_field = 'offset'
database_get_school_classes.iter_mode = 'offset'
database_get_school_classes.iter_next = 'response.items'
def database_get_schools(self, **params):
"""Docs: https://dev.vk.com/method/database.getSchools"""
return self.post(f'database.getSchools', params=params)
database_get_schools.iter_key = 'response.items'
database_get_schools.iter_field = 'offset'
database_get_schools.iter_mode = 'offset'
database_get_schools.iter_next = 'response.items'
def database_get_universities(self, **params):
"""Docs: https://dev.vk.com/method/database.getUniversities"""
return self.post(f'database.getUniversities', params=params)
database_get_universities.iter_key = 'response.items'
database_get_universities.iter_field = 'offset'
database_get_universities.iter_mode = 'offset'
database_get_universities.iter_next = 'response.items'
# Docs
def docs_add(self, **params):
"""Docs: https://dev.vk.com/method/docs.add"""
return self.post(f'docs.add', params=params)
def docs_delete(self, **params):
"""Docs: https://dev.vk.com/method/docs.delete"""
return self.post(f'docs.delete', params=params)
def docs_edit(self, **params):
"""Docs: https://dev.vk.com/method/docs.edit"""
return self.post(f'docs.edit', params=params)
def docs_get(self, **params):
"""Docs: https://dev.vk.com/method/docs.get"""
return self.post(f'docs.get', params=params)
def docs_get_by_id(self, **params):
"""Docs: https://dev.vk.com/method/docs.getById"""
return self.post(f'docs.getById', params=params)
def docs_get_messages_upload_server(self, **params):
"""Docs: https://dev.vk.com/method/docs.getMessagesUploadServer"""
return self.post(f'docs.getMessagesUploadServer', params=params)
def docs_get_types(self, **params):
"""Docs: https://dev.vk.com/method/docs.getTypes"""
return self.post(f'docs.getTypes', params=params)
def docs_get_upload_server(self, **params):
"""Docs: https://dev.vk.com/method/docs.getUploadServer"""
return self.post(f'docs.getUploadServer', params=params)
def docs_getWallUploadServer(self, **params):
"""Docs: https://dev.vk.com/method/docs.getWallUploadServer"""
return self.post(f'docs.getWallUploadServer', params=params)
def docs_save(self, **params):
"""Docs: https://dev.vk.com/method/docs.save"""
return self.post(f'docs.save', params=params)
def docs_search(self, **params):
"""Docs: https://dev.vk.com/method/docs.search"""
return self.post(f'docs.search', params=params)
# Donut
def donut_get_fiends(self, **params):
"""Docs: https://dev.vk.com/method/donut.getFriends"""
return self.post(f'donut.getFriends', params=params)
def donut_get_subscription(self, **params):
"""Docs: https://dev.vk.com/method/donut.getSubscription"""
return self.post(f'donut.getSubscription', params=params)
def donut_get_subscriptions(self, **params):
"""Docs: https://dev.vk.com/method/donut.getSubscriptions"""
return self.post(f'donut.getSubscriptions', params=params)
def donut_is_don(self, **params):
"""Docs: https://dev.vk.com/method/donut.isDon"""
return self.post(f'donut.isDon', params=params)
# Downloaded Games
def downloaded_games_get_paid_status(self, **params):
"""Docs: https://dev.vk.com/method/downloadedGames.getPaidStatus"""
return self.post(f'downloadedGames.getPaidStatus', params=params)
# Friends
def friends_add(self, **params):
"""Docs: https://dev.vk.com/method/friends.add"""
return self.post(f'friends.add', params=params)
def friends_add_list(self, **params):
"""Docs: https://dev.vk.com/method/friends.addList"""
return self.post(f'friends.addList', params=params)
def friends_are_friends(self, **params):
"""Docs: https://dev.vk.com/method/friends.areFriends"""
return self.post(f'friends.areFriends', params=params)
def friends_delete(self, **params):
"""Docs: https://dev.vk.com/method/friends.delete"""
return self.post(f'friends.delete', params=params)
def friends_delete_all_requests(self, **params):
"""Docs: https://dev.vk.com/method/friends.deleteAllRequests"""
return self.post(f'friends.deleteAllRequests', params=params)
def friends_delete_list(self, **params):
"""Docs: https://dev.vk.com/method/friends.deleteList"""
return self.post(f'friends.deleteList', params=params)
def friends_edit(self, **params):
"""Docs: https://dev.vk.com/method/friends.edit"""
return self.post(f'friends.edit', params=params)
def friends_edit_list(self, **params):
"""Docs: https://dev.vk.com/method/friends.editList"""
return self.post(f'friends.editList', params=params)
def friends_get(self, **params):
"""Docs: https://dev.vk.com/method/friends.get"""
return self.post(f'friends.get', params=params)
def friends_get_app_users(self, **params):
"""Docs: https://dev.vk.com/method/friends.getAppUsers"""
return self.post(f'friends.getAppUsers', params=params)
def friends_get_available_for_call(self, **params):
"""Docs: https://dev.vk.com/method/friends.getAvailableForCall"""
return self.post(f'friends.getAvailableForCall', params=params)
def friends_get_by_phones(self, **params):
"""Docs: https://dev.vk.com/method/friends.getByPhones"""
return self.post(f'friends.getByPhones', params=params)
def friends_get_lists(self, **params):
"""Docs: https://dev.vk.com/method/friends.getLists"""
return self.post(f'friends.getLists', params=params)
def friends_get_mutual(self, **params):
"""Docs: https://dev.vk.com/method/friends.getMutual"""
return self.post(f'friends.getMutual', params=params)
def friends_get_online(self, **params):
"""Docs: https://dev.vk.com/method/friends.getOnline"""
return self.post(f'friends.getOnline', params=params)
def friends_get_recent(self, **params):
"""Docs: https://dev.vk.com/method/friends.getRecent"""
return self.post(f'friends.getRecent', params=params)
def friends_get_requests(self, **params):
"""Docs: https://dev.vk.com/method/friends.getRequests"""
return self.post(f'friends.getRequests', params=params)
def friends_get_suggestions(self, **params):
"""Docs: https://dev.vk.com/method/friends.getSuggestions"""
return self.post(f'friends.getSuggestions', params=params)
def friends_search(self, **params):
"""Docs: https://dev.vk.com/method/friends.search"""
return self.post(f'friends.search', params=params)
# Gifts
def gifts_get(self, **params):
"""Docs: https://dev.vk.com/method/gifts.get"""
return self.post(f'gifts.get', params=params)
# Likes
def likes_add(self, **params):
"""Docs: https://dev.vk.com/method/likes.add"""
return self.post(f'likes.add', params=params)
def likes_delete(self, **params):
"""Docs: https://dev.vk.com/method/likes.delete"""
return self.post(f'likes.delete', params=params)
def likes_get_list(self, **params):
"""Docs: https://dev.vk.com/method/likes.getList"""
return self.post(f'likes.get_list', params=params)
def likes_is_liked(self, **params):
"""Docs: https://dev.vk.com/method/likes.isLiked"""
return self.post(f'likes.isLiked', params=params)
# Notifications
def notifications_get(self, **params):
"""Docs: https://dev.vk.com/method/notifications.get"""
return self.post(f'notifications.get', params=params)
def notifications_mark_as_viewed(self, **params):
"""Docs: https://dev.vk.com/method/notifications.markAsViewed"""
return self.post(f'notifications.markAsViewed', params=params)
def notifications_send_message(self, **params):
"""Docs: https://dev.vk.com/method/notifications.sendMessage"""
return self.post(f'notifications.sendMessage', params=params)
# Pages
def pages_clear_cache(self, **params):
"""Docs: https://dev.vk.com/method/pages.clearCache"""
return self.post(f'pages.clearCache', params=params)
def pages_get(self, **params):
"""Docs: https://dev.vk.com/method/pages.get"""
return self.post(f'pages.get', params=params)
def pages_get_history(self, **params):
"""Docs: https://dev.vk.com/method/pages.getHistory"""
return self.post(f'pages.getHistory', params=params)
def pages_get_titles(self, **params):
"""Docs: https://dev.vk.com/method/pages.getTitles"""
return self.post(f'pages.getTitles', params=params)
def pages_get_version(self, **params):
"""Docs: https://dev.vk.com/method/pages.getVersion"""
return self.post(f'pages.getVersion', params=params)
def pages_parse_wiki(self, **params):
"""Docs: https://dev.vk.com/method/pages.parseWiki"""
return self.post(f'pages.parseWiki', params=params)
def pages_preview(self, **params):
"""Docs: https://dev.vk.com/method/pages.preview"""
return self.post(f'pages.preview', params=params)
def pages_save(self, **params):
"""Docs: https://dev.vk.com/method/pages.save"""
return self.post(f'pages.save', params=params)
def pages_save_access(self, **params):
"""Docs: https://dev.vk.com/method/pages.saveAccess"""
return self.post(f'pages.saveAccess', params=params)
# Places
def places_get_checkins(self, **params):
"""Docs: https://dev.vk.com/method/places.getCheckins"""
return self.post(f'places.getCheckins', params=params)
# Podcasts
def podcasts_search_podcast(self, **params):
"""Docs: https://dev.vk.com/method/podcasts.searchPodcast"""
return self.post(f'podcasts.searchPodcast', params=params)
# Search
def places_get_hints(self, **params):
"""Docs: https://dev.vk.com/method/search.getHints"""
return self.post(f'search.getHints', params=params)
# Stats
def stats_get(self, **params):
"""Docs: https://dev.vk.com/method/stats.get"""
return self.post(f'stats.get', params=params)
def stats_get_post_reach(self, **params):
"""Docs: https://dev.vk.com/method/stats.getPostReach"""
return self.post(f'stats.getPostReach', params=params)
def stats_track_visitor(self, **params):
"""Docs: https://dev.vk.com/method/stats.trackVisitor"""
return self.post(f'stats.trackVisitor', params=params)
# Status
def status_get(self, **params):
"""Docs: https://dev.vk.com/method/status.get"""
return self.post(f'status.get', params=params)
def status_set(self, **params):
"""Docs: https://dev.vk.com/method/status.set"""
return self.post(f'status.set', params=params)
# Storage
def storage_get(self, **params):
"""Docs: https://dev.vk.com/method/storage.get"""
return self.post(f'storage.', params=params)
def storage_get_keys(self, **params):
"""Docs: https://dev.vk.com/method/storage.getKeys"""
return self.post(f'storage.getKeys', params=params)
def storage_set(self, **params):
"""Docs: https://dev.vk.com/method/storage.set"""
return self.post(f'storage.set', params=params)
# Users
def users_get(self, **params):
"""Docs: https://dev.vk.com/method/users.get"""
return self.post(f'users.get', params=params)
def users_get_followers(self, **params):
"""Docs: https://dev.vk.com/method/users.getFollowers"""
return self.post(f'users.getFollowers', params=params)
def users_get_subscriptions(self, **params):
"""Docs: https://dev.vk.com/method/users.getSubscriptions"""
return self.post(f'users.getSubscriptions', params=params)
def users_report(self, **params):
"""Docs: https://dev.vk.com/method/users.report"""
return self.post(f'users.report', params=params)
def users_search(self, **params):
"""Docs: https://dev.vk.com/method/users.search"""
return self.post(f'users.search', params=params)
# Utils
def utils_check_link(self, **params):
"""Docs: https://dev.vk.com/method/utils.checkLink"""
return self.post(f'utils.checkLink', params=params)
def utils_delete_from_last_shortened(self, **params):
"""Docs: https://dev.vk.com/method/utils.deleteFromLastShortened"""
return self.post(f'utils.deleteFromLastShortened', params=params)
def utils_get_last_shortened_links(self, **params):
"""Docs: https://dev.vk.com/method/utils.getLastShortenedLinks"""
return self.post(f'utils.getLastShortenedLinks', params=params)
def utils_get_link_stats(self, **params):
"""Docs: https://dev.vk.com/method/utils.getLinkStats"""
return self.post(f'utils.getLinkStats', params=params)
def utils_get_server_time(self, **params):
"""Docs: https://dev.vk.com/method/utils.getServerTime"""
return self.post(f'utils.getServerTime', params=params)
def utils_get_short_link(self, **params):
"""Docs: https://dev.vk.com/method/utils.getShortLink"""
return self.post(f'utils.getShortLink', params=params)
def utils_resolve_screen_name(self, **params):
"""Docs: https://dev.vk.com/method/utils.resolveScreenName"""
return self.post(f'utils.resolveScreenName', params=params)
# Wall
def wall_check_copyright_link(self, **params):
"""Docs: https://dev.vk.com/method/wall.checkCopyrightLink"""
return self.post(f'wall.checkCopyrightLink', params=params)
def wall_close_comments(self, **params):
"""Docs: https://dev.vk.com/method/wall.closeComments"""
return self.post(f'wall.closeComments', params=params)
def wall_create_comment(self, **params):
"""Docs: https://dev.vk.com/method/wall.createComment"""
return self.post(f'wall.createComment', params=params)
def wall_delete(self, **params):
"""Docs: https://dev.vk.com/method/wall.delete"""
return self.post(f'wall.delete', params=params)
def wall_delete_comment(self, **params):
"""Docs: https://dev.vk.com/method/wall.deleteComment"""
return self.post(f'wall.deleteComment', params=params)
def wall_delete_like(self, **params):
"""Docs: https://dev.vk.com/method/wall.deleteLike"""
return self.post(f'wall.deleteLike', params=params)
def wall_edit(self, **params):
"""Docs: https://dev.vk.com/method/wall.edit"""
return self.post(f'wall.edit', params=params)
def wall_edit_ads_stealth(self, **params):
"""Docs: https://dev.vk.com/method/wall.editAdsStealth"""
return self.post(f'wall.editAdsStealth', params=params)
def wall_edit_comment(self, **params):
"""Docs: https://dev.vk.com/method/wall.editComment"""
return self.post(f'wall.editComment', params=params)
def wall_get(self, **params):
"""Docs: https://dev.vk.com/method/wall.get"""
return self.post(f'wall.get', params=params)
def wall_get_by_id(self, **params):
"""Docs: https://dev.vk.com/method/wall.getById"""
return self.post(f'wall.getById', params=params)
def wall_get_comment(self, **params):
"""Docs: https://dev.vk.com/method/wall.getComment"""
return self.post(f'wall.getComment', params=params)
def wall_get_comments(self, **params):
"""Docs: https://dev.vk.com/method/wall.getComments"""
return self.post(f'wall.getComments', params=params)
def wall_get_likes(self, **params):
"""Docs: https://dev.vk.com/method/wall.getLikes"""
return self.post(f'wall.getLikes', params=params)
def wall_get_photo_upload_server(self, **params):
"""Docs: https://dev.vk.com/method/wall.getPhotoUploadServer"""
return self.post(f'wall.getPhotoUploadServer', params=params)
def wall_get_reposts(self, **params):
"""Docs: https://dev.vk.com/method/wall.getReposts"""
return self.post(f'wall.getReposts', params=params)
wall_get_reposts.iter_key = 'response.items'
wall_get_reposts.iter_field = 'offset'
wall_get_reposts.iter_mode = 'offset'
wall_get_reposts.iter_next = 'response.items'
def wall_open_comments(self, **params):
"""Docs: https://dev.vk.com/method/wall.openComments"""
return self.post(f'wall.openComments', params=params)
def wall_pin(self, **params):
"""Docs: https://dev.vk.com/method/wall.pin"""
return self.post(f'wall.pin', params=params)
def wall_post(self, **params):
"""Docs: https://dev.vk.com/method/wall.post"""
return self.post(f'wall.post', params=params)
def wall_post_ads_stealth(self, **params):
"""Docs: https://dev.vk.com/method/wall.postAdsStealth"""
return self.post(f'wall.postAdsStealth', params=params)
def wall_report_comment(self, **params):
"""Docs: https://dev.vk.com/method/wall.reportComment"""
return self.post(f'wall.reportComment', params=params)
def wall_report_post(self, **params):
"""Docs: https://dev.vk.com/method/wall.reportPost"""
return self.post(f'wall.report_post', params=params)
def wall_repost(self, **params):
"""Docs: https://dev.vk.com/method/wall.repost"""
return self.post(f'wall.repost', params=params)
def wall_restore(self, **params):
"""Docs: https://dev.vk.com/method/wall.restore"""
return self.post(f'wall.restore', params=params)
def wall_restore_comment(self, **params):
"""Docs: https://dev.vk.com/method/wall.restoreComment"""
return self.post(f'wall.restoreComment', params=params)
def wall_search(self, **params):
"""Docs: https://dev.vk.com/method/wall.search"""
return self.post(f'wall.search', params=params)
wall_search.iter_key = 'response.items'
wall_search.iter_field = 'offset'
wall_search.iter_mode = 'offset'
wall_search.iter_next = 'response.items'
def wall_unpin(self, **params):
"""Docs: https://dev.vk.com/method/wall.unpin"""
return self.post(f'wall.unpin', params=params)
| StarcoderdataPython |
9741486 | import sqlite3
from sqlite3 import Error
from flask import (Flask, render_template, request, flash, redirect, url_for,
current_app)
from flask_login import current_user, login_user, login_required, logout_user
import os
from . import auth
from .forms import RegistraForm, LoginForm
from .db_conn import insert_utente, login_ok, load_user, User, delete_user_notconfirmed
from werkzeug.security import generate_password_hash
from SingSong.email import send_email
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
# ----------------- RICEVERE I DATI UTENTE DA FORM REGISTRAZIONE -------------------------
#indicare il file del template in cui si trova il form
@auth.route('/registra', methods=['POST', 'GET'])
def registra():
# se lo user è già loggato, gli impediamo di tornare sulla pagina di login
if current_user.is_authenticated:
return redirect(url_for('profile.user_can', nome=current_user.nome))
form = RegistraForm()
if form.validate_on_submit():
hash_pss = generate_password_hash(form.password.data, "pbkdf2:sha256")
# creo la lista dei valori inseriti nei campi dall'utente. Questi sono i
# 4 valori da inserire nel DB
user_data= list(form.data.values())[0:3]
# sostituisco la password nella lista con l'hash
user_data[2] = hash_pss
insert_utente(user_data)
mes_ins = "Data entered correctly"
mes_log = "A confirmation email has been sent to you by email"
"""L'utente è stato inserito nel DB, ma ancora non si può loggare; la FUNZIONE
send_email gli invierà una mail con link per confermare la registrazione.
Il link di conferma contiene un token generato con itsdangerous.
La generazione del token utilizza la funzione generate_confirmation_token,
istanza dell'oggeto User (in db_conn). Quindi il token è saldamente legato all'id utente.
La funzione load_user, in db_conn, restituisce esattamente l'oggetto User"""
email = form.email.data
id = login_ok(email)
user = load_user(id)
nome = user.nome
# utilizzo l'oggetto User per creare e legare il token all'id.
token = user.generate_confirmation_token()
# Invio email con token di conferma per confermare
subject = "User account confirmation!"
send_email(email, subject, "email_confirm", nome=nome, token=token)
# Chiamo la funzione da db.conn che cancella l'utente se non ha confermato
# entro lo scadere del token. In questo modo può registrarsi di nuovo.
delete_user_notconfirmed(id)
return render_template('reg_index.html', form=form, mi=mes_ins, ml=mes_log)
return render_template('reg_index.html', form=form)
# ------------------------- PAGINA DI LOGIN --------------------------------
# Ricevo i dati da login
@auth.route("/login", methods=['POST', 'GET'])
def login():
# se lo user è già loggato, gli impediamo di tornare sulla pagina di login
if current_user.is_authenticated:
return redirect(url_for('profile.user_can', nome=current_user.nome))
form = LoginForm()
if form.validate_on_submit():
email = form.email.data
"""Il processo del login funziona così: recupero l'id dell'utente che si sta loggando
e lo passo alla funzione load_user (in db_conn). Ritorna l'oggetto User (che
contiene user id, Nome, email e password cioè contiene tutta la riga dei dati relativi
all'utente). L'oggetto poi può essere passato alla funzione login_user per il login. """
id = login_ok(email)
Us = load_user(id)
""" login_user è il momento in cui l'utente viene loggato e le classi
is_active e is_authenticated risultano True.
parametro:remember se True, ad ogni login, viene lasciato nel browser un cookie
'remember_token' che permette di riaccedere al proprio profilo anche chiudendo
il browser. Bisogna però andare diretti al proprio profilo, altrimenti il cookie
verrà cancellato."""
# se l'utente non ha confermato il link non può entrare.
if Us.conferma == False:
flash('You must confirm your account first. Please check your email!')
return render_template('log_in.html', form=form)
login_user(Us, remember=form.remember_me.data)
return redirect(url_for('profile.user_can', nome=current_user.nome))
return render_template('log_in.html', form=form)
@auth.route("/confirm/<token>", methods=['GET', 'POST'])
def confirm(token):
s = Serializer(current_app.config['SECRET_KEY'])
# Con try vogliamo testare se il link è scaduto o non valido. In quest'ultimo
# caso restituisce errore e chiede di registrarsi di nuovo se l'utente non è già confermato.
try:
# .loads() decodifica il token appena ricevuto tramite email;
# il token diventa un dizionario {confirm:self.id}
data = s.loads(token)
except:
flash('The confirmation link is invalid or has expired.', 'invalid')
flash("""If you did not confirm your account trough email and can't login,
please register again.""", 'invalid')
return redirect(url_for('main.home'))
# prendo email dal token
email = data.get('email')
id = login_ok(email)
user = load_user(id)
# Se il campo Conferma del DB è True,quindi utente già predentemente confermato.
if user.conferma:
logout_user()
flash('Your account is already confirmed, you can login!', 'confirm')
return redirect(url_for('main.home'))
# Se l'utente ha appena confermato via mail viene richiamata la funzione confirm_email
# da db_conn per validare il token.
if user.confirm_email(data):
nome = user.nome
# Viene creata la cartella personale per i files delle canzoni
os.mkdir("SingSong/static/upload_song/" + nome)
flash('You have confirmed your account, thanks! You can login now.', 'confirm')
return redirect(url_for('main.home'))
else:
# In tutti gli altri casi la conferma non avviene.
flash('The confirmation link is invalid or has expired. Please register again.',
'invalid')
return redirect(url_for('main.home'))
| StarcoderdataPython |
8193157 | <gh_stars>0
#!/usr/bin/env python3
# <NAME>, 2020, based on an implementation by Isabelle and <NAME> distibuted under the MIT license.
#The three bug algorithms differ only in how they decide to leave the wall and return to the path through free space to the goal. To implement this, a single Bug class was created that contained all of the shared logic of the bug algorithms, with the main loop: while current_location.distance(tx, ty) > delta: hit_wall = bug.go_until_obstacle() if hit_wall: bug.follow_wall() print "Arrived at", (tx, ty)
#where follow_wall() loops until bug.should_leave_wall() is true.
#Bug0 implements logic to see if the path in the direction of the goal is clear. Bug1 implements logic to confirm circumnavigation occured and the robot is at the closest point. Bug2 implements logic to see if the slope of the line to the destination is the same as the slope at impact and the current position is closer.
# Code for navigating around obstacles as opposed to simply avoiding them.
# Implements bug 0 algorithm
# Assumes that the destination coordinates are provided and that all obstacles
# are convex.
# Bug 0
# 1) Head towards target coordinates
# 2) Follow obstacles (random initial turn) until can head towards goal again
# 3) Release control
# Bug 1
# 1) Head towards target coordinates
# 2) When obstacle encountered circumnavigate and remember the minimum distance
# between the robot and the target coordinates
# 3) Return to that closest point by wall following and release control
# Bug 2
# Description TODO
import math
import sys
import rospy
import tf.transformations as transform
from geometry_msgs.msg import Twist, Point
from srcp2_msgs import msg, srv
from sensor_msgs.msg import LaserScan, Imu
from nav_msgs.msg import Odometry
from std_msgs.msg import String
from obstacle.msg import Obstacles
import threading
import sys
import random
from signal import signal, SIGINT
from tqdm import tqdm # For progress bars
import Queue
# Constants
OBSTACLE_EDGE_TOL = 0.5 # meters
max_num_waypoints = 100
waypoint_bounds = 75 # meters. distance from center to edges
waypoint_queue = Queue.Queue( max_num_waypoints )
# Limit on reaching waypoint
waypoint_timeout = 300
timed_out = False
start_time = 0
delta = 2 # meters. How close the robot tries to get to a waypoint
WALL_PADDING = 3 # meters. This has to be carefully set to balance not running into objects and thinking slopes are obstacles
# Track success stats
success_count = 0.0
success_distance = 0.0
success_time = 0.0
stats_printed = False
total_time_start = 0
status_msg = None
escape_waypoint = None
STRAIGHT = 0
LEFT = 1
RIGHT = 2
BACK = 3
MSG_STOP = 4
CLOCKWISE = 5
ANTICLOCKWISE = 6
current_location = None
current_dists = None
# Timout exception
class TimedOutException(Exception):
pass
def random_waypoint_generator( n_waypoints ):
pub = rospy.Publisher('/small_scout_1/waypoints', Point, queue_size=1)
print("Generating Waypoints...")
for i in tqdm(range(n_waypoints)):
wp = Point(random.uniform(-waypoint_bounds, waypoint_bounds), random.uniform(-40, 40), 0)
pub.publish(wp)
rospy.sleep(0.1)
print("Finished")
# Message Handlers
# Processes move to waypoint requests. Waypoints are given as geometry points
def waypoint_handler( msg ):
# print("New waypoint recieved: Coords <", msg.x, ",", msg.y, ",", msg.z,">")
# print("Waypoint Queue Length:", waypoint_queue.qsize())
waypoint_queue.put(msg)
bug_algorithm(msg.x, msg.y, bug_type=0)
# Location is used to maintain a single current location of the robot in a
# thread-safe manner such that the event handlers and readers can all use it without
# issue
class Location:
def __init__(self):
self.m = threading.Lock() # global lock b/c easy and not a problem yet
self.x = None
self.y = None
self.t = None
self.pitch = 0.0
self.deltaT = 0.25 # how close to angle to be to go
def update_location(self, x, y, t, pitch):
self.m.acquire()
self.x = x
self.y = y
self.t = t
self.pitch = pitch
self.m.release()
def current_location(self):
self.m.acquire()
x = self.x
y = self.y
t = self.t
pitch = self.pitch
self.m.release()
return (x, y, t, pitch)
def distance(self, x, y):
(x0, y0, _, _) = self.current_location()
if x0 == None or y0 == None:
# will be none on the first iteration
return sys.maxint
return math.sqrt((x-x0)**2 + (y-y0)**2)
def facing_point(self, x, y):
(cx, cy, current_heading, _) = self.current_location()
if None in (cx, cy, current_heading):
return False
n = necessary_heading(cx, cy, x, y)
# TODO(exm) possible bug with boundary conditions?
return n - self.deltaT <= current_heading <= n + self.deltaT
def faster_left(self, x, y):
(cx, cy, current_heading, _) = self.current_location()
if None in (cx, cy, current_heading):
return False
return current_heading - necessary_heading(cx, cy, x, y) < 0
def global_to_local(self, desired_angle):
(_, _, current_heading, _) = self.current_location()
ans = desired_angle - current_heading
if ans < -math.pi:
ans += 2* math.pi
return ans
# current x, y; target x,y
def necessary_heading(cx, cy, tx, ty):
return math.atan2(ty-cy, tx-cx)
class Dist:
def __init__(self):
self.m = threading.Lock()
self.left = 0
self.front = 0
self.raw = []
self.right = 0
def update(self, data):
def getmin(a, b):
in_rng = lambda x: data.range_min <= x <= data.range_max
vsp = filter(in_rng, data.ranges[a:b])
if len(vsp) > 0:
return min(vsp)
else:
return sys.maxint
newfront = getmin(40, 60)
newleft = getmin(60, 100)
newright = getmin(0, 40)
self.m.acquire()
self.left = newleft
self.front = newfront
self.right = newright
self.raw = data
self.m.release()
def get(self):
self.m.acquire()
l = self.left
f = self.front
r = self.right
self.m.release()
return (f, l, r)
def angle_to_index(self, angle):
return int((angle - self.raw.angle_min)/self.raw.angle_increment)
# angle in radians
#def at(self, angle):
# def getmin(a, b):
# in_rng = lambda x: self.raw.range_min <= x <= self.raw.range_max
# vsp = filter(in_rng, self.raw.ranges[a:b])
# if len(vsp) > 0:
# return min(vsp)
# else:
# return sys.maxint
# self.m.acquire()
# i = self.angle_to_index(angle)
# start = i - 40
# if start < 0:
# start = 0
# end = i + 40
# if end >= len(self.raw.ranges):
# end = len(self.raw.ranges) - 1
# ans = getmin(start, end)
# self.m.release()
# return ans
def at( self ):
#self.m.acquire()
#index_min = min(range(len(self.raw.ranges)), key=self.raw.ranges.__getitem__)
#self.m.release()
return min(self.raw.ranges)
def init_listener():
rospy.Subscriber('/small_scout_1/odometry/filtered', Odometry, estimated_location_handler)
rospy.Subscriber('/small_scout_1/laser/scan', LaserScan, lidar_handler)
rospy.Subscriber("/small_scout_1/imu", Imu, imu_handler)
rospy.Subscriber('/small_scout_1/obstacle', Obstacles, obstacle_handler)
rospy.logwarn("Waiting for brake service...")
rospy.wait_for_service('/small_scout_1/brake_rover')
brakeService = rospy.ServiceProxy('/small_scout_1/brake_rover', srv.BrakeRoverSrv)
rospy.logwarn("... active.")
waypoint_topic = "/small_scout_1/waypoints"
rospy.Subscriber('/small_scout_1/waypoints', Point, waypoint_handler)
rospy.logwarn("Subscribing to"+ waypoint_topic)
def estimated_location_handler(data):
p = data.pose.pose.position
q = (
data.pose.pose.orientation.x,
data.pose.pose.orientation.y,
data.pose.pose.orientation.z,
data.pose.pose.orientation.w)
roll, pitch, yaw = transform.euler_from_quaternion(q) # in [-pi, pi]
estimated_current_location.update_location(p.x, p.y, yaw, pitch)
def actual_location_handler(data):
p = data.pose.pose.position
q = (
data.pose.pose.orientation.x,
data.pose.pose.orientation.y,
data.pose.pose.orientation.z,
data.pose.pose.orientation.w)
roll, pitch, yaw = transform.euler_from_quaternion(q) # in [-pi, pi]
actual_current_location.update_location(p.x, p.y, yaw, pitch)
def lidar_handler(data):
current_dists.update(data)
def imu_handler( data ):
q = (
data.orientation.x,
data.orientation.y,
data.orientation.z,
data.orientation.w)
roll, pitch, yaw = transform.euler_from_quaternion(q) # in [-pi, pi]
def obstacle_handler(data):
pass
class Bug:
def __init__(self, tx, ty):
# Robot linear velocity in meters per second
self.linear_vel = 5
# Robot angular velocity in radians per second
self.angular_vel = round(2*math.pi,2)
self.pub = rospy.Publisher('/small_scout_1/skid_cmd_vel', Twist, queue_size=1)
self.tx = tx
self.ty = ty
self.stuck_linear_tol = 2
self.stuck_angular_tol = math.pi/4
# We only want one function driving at a time
self.drive_mutex = threading.Lock()
# Remember where we were before
self.last_x = sys.maxint
self.last_y = sys.maxint
self.last_h = sys.maxint
# How long to check between struck checks
self.stuck_check_period = 20
# Setup a timer to check if we are stuck
self.stuck_timer = rospy.Timer(rospy.Duration(self.stuck_check_period), self.stuck_handler)
def apply_brakes(self):
brake_service.call(100)
# print "Applied Brakes"
def release_brakes(self):
brake_service.call(0)
#print "Released Brakes"
def stuck_handler(self, event=None):
# Check if we are stuck
#print "#########################"
#print "# Stuck handler called #"
#print "#########################"
#self.print_error()
# Check for timeout
elapsed_time = rospy.get_rostime().secs - start_time
#print waypoint_timeout - elapsed_time
if elapsed_time > waypoint_timeout:
global timed_out
timed_out = True
return
x, y, h, pitch = estimated_current_location.current_location()
#print "delta_x: ", abs(x - self.last_x)
#print "delta_y: ", abs(y - self.last_y)
#print "delta_h: ", abs(h - self.last_h)
if estimated_current_location.distance(self.last_x, self.last_y) < self.stuck_linear_tol and estimated_current_location.distance(self.tx, self.ty) > delta:
self.drive_mutex.acquire()
#print "Escaping: Robot displaced by", current_location.distance(self.last_x, self.last_y), "meters over", self.stuck_check_period, " seconds."
cmd = Twist()
cmd.linear.x = self.linear_vel*random.randint(-1,1)
if cmd.linear.x == 0:
cmd.angular.z = self.angular_vel
#print "Escape: turning at ", cmd.angular.z, "rad/s"
else:
pass
#print "Escape: driving at ", cmd.linear.x, "m/s"
for i in range(10):
self.pub.publish(cmd)
rospy.sleep(3)
self.drive_mutex.release()
#global escape_waypoint
#if abs(x - self.last_x) < self.stuck_linear_tol and abs(y - self.last_y) < self.stuck_linear_tol and abs(h - self.last_h) < self.stuck_angular_tol and current_location.distance(self.tx, self.ty) > delta:
#wp = Point(random.uniform(-waypoint_bounds, waypoint_bounds), random.uniform( -waypoint_bounds, waypoint_bounds), 0)
# wp = Point(0, 0, 0)
# escape_waypoint = wp
# print "Setting escape waypoint:", (wp.x, wp.y)
# waypoint_queue.put(wp)
#else:
# if escape_waypoint != None:
# if escape_waypoint != waypoint_queue.queue[0]:
# print "Escaped: WARNING! The escape waypoint was not at the head of the queue! Not removing."
# else:
# waypoint_queue.get()
# print "Escaped: removing escape waypoint from queue"
# escape_waypoint = None
self.last_x = x
self.last_y = y
self.last_h = h
self.stuck = False # We hope, if not this function will be exectuted again
def go(self, direction):
# Check for timeout
if timed_out:
raise TimedOutException()
# Do nothing if someone else is driving (avoids blocking mutex lock)
if self.drive_mutex.locked():
#print "go(): Someone else is driving"
pass
#self.print_LiDAR_ranges()
# Add noise so we don't get into loops
linear_vel = self.linear_vel + random.gauss(0, 1)
# Robot angular velocity in radians per second
angular_vel = self.angular_vel + random.gauss(0, 1)
command_reps = 10
self.drive_mutex.acquire()
self.release_brakes()
cmd = Twist()
if direction == STRAIGHT:
cmd.linear.x = linear_vel
#print "Moving forward at ", self.linear_vel, "m/s"
elif direction == LEFT:
# cmd.linear.x = self.linear_vel/10
cmd.angular.z = angular_vel
#print "Turning left at ", self.angular_vel, "rad/s"
elif direction == RIGHT:
#cmd.linear.x = -self.linear_vel/10
cmd.angular.z = -angular_vel
#print "Turning right at ", self.angular_vel, "rad/s"
elif direction == BACK:
cmd.linear.x = -linear_vel
#print "Backing up at ", self.linear_vel, "m/s"
elif direction == MSG_STOP:
#print "Stopping"
cmd.angular.z = 0
cmd.linear.x = 0
self.apply_brakes()
for i in range(command_reps):
self.pub.publish(cmd)
rospy.sleep(0.1)
self.drive_mutex.release()
def print_error(self):
cx, cy, t, pitch = estimated_current_location.current_location()
print "Estamated distance to target: ", round(estimated_current_location.distance(self.tx, self.ty)), "m"
print "Actual distance to target: ", round(actual_current_location.distance(self.tx, self.ty)), "m"
print "Angle Error: ", necessary_heading(cx, cy, self.tx, self.ty)-t, "rad"
# Return True if a wall was encountered otherwise false
def go_until_obstacle(self):
#print "Going until destination or obstacle."
#self.print_error()
#print "Travelling to waypoint"
while estimated_current_location.distance(self.tx, self.ty) > delta:
(frontdist, leftdist, rightdist) = current_dists.get()
_, _, _, pitch = estimated_current_location.current_location()
if frontdist <= WALL_PADDING and leftdist <= WALL_PADDING:
#self.go(MSG_STOP)
self.go(BACK)
#self.print_LiDAR_ranges()
return ANTICLOCKWISE
elif frontdist <= WALL_PADDING and rightdist <= WALL_PADDING:
#self.go(MSG_STOP)
self.go(BACK)
#self.print_LiDAR_ranges()
return CLOCKWISE
elif frontdist <= WALL_PADDING:
self.go(BACK)
return CLOCKWISE
#elif rightdist <= WALL_PADDING:
# self.go(LEFT)
#elif leftdist <= WALL_PADDING:
# self.go(RIGHT)
if estimated_current_location.facing_point(self.tx, self.ty):
self.go(STRAIGHT)
elif estimated_current_location.faster_left(self.tx, self.ty):
self.go(LEFT)
else:
self.go(RIGHT)
return False
def print_LiDAR_ranges(self):
front_range, left_range, right_range = current_dists.get()
if left_range > 100:
left_range = "max"
if front_range > 100:
front_range = "max"
if right_range > 100:
right_range = "max"
print "LiDAR range. Front:", front_range, "m. Left: ", left_range, "m. Right: ", right_range, "m"
def follow_wall_anticlockwise(self):
#print "Navigating around obstacle anticlockwise"
while current_dists.get()[0] <= WALL_PADDING:
#self.print_LiDAR_ranges()
#print "Aligning with obstacle"
self.go(RIGHT)
rospy.sleep(0.1)
while not self.should_leave_wall() and estimated_current_location.distance(self.tx, self.ty) > delta:
rospy.sleep(0.1)
(front, left, right) = current_dists.get()
#if front <= WALL_PADDING-OBSTACLE_EDGE_TOL:
# self.go(BACK)
#elif front <= WALL_PADDING:
if front <= WALL_PADDING:
#self.print_LiDAR_ranges()
#print "Still aligning with obstacle"
self.go(RIGHT)
elif WALL_PADDING - OBSTACLE_EDGE_TOL <= left <= WALL_PADDING + OBSTACLE_EDGE_TOL:
#print "Following obstacle edge"
#self.print_LiDAR_ranges()
self.go(STRAIGHT)
elif left > WALL_PADDING + 0.5:
#print "Getting too far away from obstacle"
self.go(LEFT)
elif front > WALL_PADDING and left > WALL_PADDING and right > WALL_PADDING:
self.go(STRAIGHT)
#print "Free of obstacle"
return
else:
#print "Aligning with obstacle again."
self.go(RIGHT)
# self.print_error()
# print "Left Obstacle"
def follow_wall_clockwise(self):
#print "Navigating around obstacle clockwise"
while current_dists.get()[0] <= WALL_PADDING:
#self.print_LiDAR_ranges()
#print "Aligning with obstacle"
self.go(LEFT)
rospy.sleep(0.1)
while not self.should_leave_wall() and estimated_current_location.distance(self.tx, self.ty) > delta:
rospy.sleep(0.1)
(front, left, right) = current_dists.get()
#if front <= WALL_PADDING-OBSTACLE_EDGE_TOL:
# self.go(BACK)
#elif front <= WALL_PADDING:
if front <= WALL_PADDING:
#self.print_LiDAR_ranges()
#print "Still aligning with obstacle"
self.go(LEFT)
elif WALL_PADDING - OBSTACLE_EDGE_TOL <= right <= WALL_PADDING + OBSTACLE_EDGE_TOL:
#print "Following obstacle edge"
#self.print_LiDAR_ranges()
self.go(STRAIGHT)
elif left > WALL_PADDING + 0.5:
#print "Getting too far away from obstacle"
self.go(RIGHT)
elif front > WALL_PADDING and left > WALL_PADDING and right > WALL_PADDING:
self.go(STRAIGHT)
#print "Free of obstacle"
return
else:
#print "Aligning with obstacle again."
self.go(LEFT)
# self.print_error()
# print "Left Obstacle"
def should_leave_wall(self):
print "You dolt! You need to subclass bug to know how to leave the wall"
sys.exit(0.1)
class Bug0(Bug):
# If we are pointing towards the target location and the way is clear leave the obstacle
def should_leave_wall(self):
(x, y, t, _) = estimated_current_location.current_location()
dir_to_go = estimated_current_location.global_to_local(necessary_heading(x, y, self.tx, self.ty))
if abs(dir_to_go - t) < math.pi/4 and current_dists.get()[0] > 5:
#self.print_error()
# print "Leaving obstacle"
return True
return False
class Bug1(Bug):
def __init__(self, tx, ty):
Bug.__init__(self, tx, ty)
self.closest_point = (None, None)
self.origin = (None, None)
self.circumnavigated = False
def should_leave_wall(self):
(x, y, t, _) = estimated_current_location.current_location()
if None in self.closest_point:
self.origin = (x, y)
self.closest_point = (x, y)
self.closest_distance = estimated_current_location.distance(self.tx, self.ty)
self.left_origin_point = False
return False
d = estimated_current_location.distance(self.tx, self.ty)
if d < self.closest_distance:
print "New closest point at", (x, y)
self.closest_distance = d
self.closest_point = (x, y)
(ox, oy) = self.origin
if not self.left_origin_point and not near(x, y, ox, oy):
# we have now left the point where we hit the wall
print "Left original touch point"
self.left_origin_point = True
elif near(x, y, ox, oy) and self.left_origin_point:
# circumnavigation achieved!
print "Circumnavigated obstacle"
self.circumnavigated = True
(cx, ct) = self.closest_point
if self.circumnavigated and near(x, y, cx, ct):
self.closest_point = (None, None)
self.origin = (None, None)
self.circumnavigated = False
self.left_origin_point = False
print "Leaving wall"
return True
else:
return False
class Bug2(Bug):
def __init__(self, tx, ty):
Bug.__init__(self, tx, ty)
self.lh = None
self.encountered_wall_at = (None, None)
def face_goal(self):
while not estimated_current_location.facing_point(self.tx, self.ty):
self.go(RIGHT)
rospy.sleep(.01)
def follow_wall(self):
Bug.follow_wall(self)
self.face_goal()
def should_leave_wall(self):
(x, y, _, _) = estimated_current_location.current_location()
if None in self.encountered_wall_at:
self.encountered_wall_at = (x, y)
self.lh = necessary_heading(x, y, self.tx, self.ty)
return False
t_angle = necessary_heading(x, y, self.tx, self.ty)
(ox, oy) = self.encountered_wall_at
od = math.sqrt((ox-self.tx)**2 + (oy-self.ty)**2)
cd = math.sqrt( (x-self.tx)**2 + (y-self.ty)**2)
dt = 0.01
if self.lh - dt <= t_angle <= self.lh + dt and not near(x, y, ox, oy):
if cd < od:
print "Leaving wall"
return True
return False
def near(cx, cy, x, y):
nearx = x - .3 <= cx <= x + .3
neary = y - .3 <= cy <= y + .3
return nearx and neary
def bug_algorithm(tx, ty, bug_type):
# Track success stats
global success_count
global success_distance
global success_time
global stats_printed
global total_time_start
print "Waiting for location data on '/small_scout_1/odom/filtered...'"
rospy.wait_for_message('/small_scout_1/odom/filtered', Odometry,)
print "... received."
print("Waiting for break service...")
rospy.wait_for_service('/small_scout_1/brake_rover')
global brake_service
brake_service = rospy.ServiceProxy('/small_scout_1/brake_rover', srv.BrakeRoverSrv)
print("... active.")
if bug_type == 0:
bug = Bug0(tx,ty)
elif bug_type == 1:
bug = Bug1(tx,ty)
elif bug_type == 2:
bug = Bug2(tx,ty)
else:
print "Unknown Bug algorithm", bug_type
sys.exit(3)
# For status messages so other nodes know when we are done or if we failed
status_topic = '/small_scout_1/bug_nav_status'
bug_nav_status_publisher = rospy.Publisher(status_topic, String, queue_size=10)
print "Publishing status messages on", status_topic
# Add the command line waypoint to the queue
waypoint_queue.put(Point(tx, ty, 0))
# Generate waypoints - use a thread so we don't continue until the waypoints are completed
#thread = threading.Thread(target=random_waypoint_generator( max_num_waypoints ))
#thread.start()
# wait here for waitpoint generation to complete
#thread.join()
# Track total time spent
total_time_start = rospy.get_rostime().secs
# Check for new waypoints every 10 seconds
idle = rospy.Rate(10)
###### main waypoint consumer loop - run till node shuts down ######
while not rospy.is_shutdown():
rospy.sleep(0.1)
# Process waypoint queue, or if there are none and we are not at the coords provided on the
# command line go there.
while not waypoint_queue.empty():
waypoint = waypoint_queue.get()
wtx = waypoint.x
wty = waypoint.y
bug.tx = wtx
bug.ty = wty
# Begin timout timer
global start_time
start_time = rospy.get_rostime().secs
est_distance_to_cover = estimated_current_location.distance(wtx, wty)
act_distance_to_cover = actual_current_location.distance(wtx, wty)
print("Est (x,y):", (estimated_current_location.current_location()[0] , estimated_current_location.current_location()[1]))
print("Actual (x,y):", (actual_current_location.current_location()[0] , actual_current_location.current_location()[1]))
print "Moving to coordinates from waypoint:", (round(wtx,2), round(wty,2)), "Distance: ", round(est_distance_to_cover,2), "m."
print "Actual Distance: ", round(act_distance_to_cover,2), "m."
global status_msg
while estimated_current_location.distance(wtx, wty) > delta:
try:
# These two functions are the heart of the algorithm. "Go_until_obstacle" moves towards the target location when there are no
# detected obstacles.
# The second (wall_follow) navigates around obstacles and positions the rover so that it can move towards the
# target location again
circumnavigate_obstacle = bug.go_until_obstacle()
if circumnavigate_obstacle == CLOCKWISE:
bug.follow_wall_clockwise()
elif circumnavigate_obstacle == ANTICLOCKWISE:
bug.follow_wall_anticlockwise()
except TimedOutException:
elapsed_time = rospy.get_rostime().secs - start_time
print "Failed to reach", (round(wtx,2), round(wty,2)), " after", round(elapsed_time), "(sim) seconds. Distance: ", round(estimated_current_location.distance(wtx, wty),2)
status_msg = "Timeout:", (wtx, wty)
bug_nav_status_publisher.publish(status_msg)
global timed_out
timed_out = False
break
# Confirm the target location was reached
if estimated_current_location.distance(wtx, wty) < delta:
elapsed_time = rospy.get_rostime().secs - start_time
print "Arrived at", (round(wtx,2), round(wty,2)), " after", round(elapsed_time), "seconds. Distance: ", round(actual_current_location.distance(wtx, wty),2)
status_msg = "Arrived!"
bug_nav_status_publisher.publish(status_msg)
if escape_waypoint == None:
success_count += 1.0
success_distance += act_distance_to_cover
success_time += elapsed_time
bug.apply_brakes()
print "There are", waypoint_queue.qsize(), "waypoints remaining."
if not stats_printed:
try:
success_perc = round((success_count/max_num_waypoints)*100)
except ZeroDivisionError:
success_perc = 0.0
print "Succeeded: ", success_perc, "% of the time."
print "Distance covered: ", round(success_distance,2), "m"
print "Time spent on successful runs: ", round(success_time,2), "s"
try:
avg_speed = round(success_distance/success_time,2)
except ZeroDivisionError:
avg_speed = 0.0
print "Avg Speed: ", avg_speed, "m/s"
# Track total time spent
total_time_elapsed = rospy.get_rostime().secs - total_time_start
print "Total Time: ", round(total_time_elapsed,2), "s"
stats_printed = True
idle.sleep()
def sigint_handler(signal_received, frame):
waypoints_processed = max_num_waypoints-waypoint_queue.qsize()
print "Processed", waypoints_processed,"waypoints."
try:
success_perc = round((success_count/waypoints_processed)*100)
except ZeroDivisionError:
success_perc = 0.0
print "Succeeded: ", success_perc, "% of the time."
print "Distance covered: ", round(success_distance,2), "m"
print "Time spent on successful runs: ", round(success_time,2), "s"
try:
avg_speed = round(success_distance/success_time,2)
except ZeroDivisionError:
avg_speed = 0.0
print "Avg Speed: ", avg_speed, "m/s"
# Track total time spent
total_time_elapsed = rospy.get_rostime().secs - total_time_start
print "Total Time: ", round(total_time_elapsed,2), "s"
print('SIGINT or CTRL-C received. Exiting.')
exit(0)
def main( task=None ):
global estimated_current_location
global actual_current_location
global current_dists
global status_msg
estimated_current_location = Location()
actual_current_location = Location()
current_dists = Dist()
init_listener()
signal(SIGINT, sigint_handler)
while not rospy.is_shutdown():
if status_msg is not None:
if status_msg == "Arrived!":
sys.exit(0)
else:
sys.exit(1)
if __name__ == '__main__':
rospy.init_node('Bug_Obstacle_Nav', anonymous=True)
rospy.loginfo('Bug nav started.')
sys.exit(main())
| StarcoderdataPython |
5167438 | import dataset
import models
import train
'''
FAUST = "../../Downloads/Mesh-Datasets/MyFaustDataset"
COMA = "../../Downloads/Mesh-Datasets/MyComaDataset"
SHREC14 = "../../Downloads/Mesh-Datasets/MyShrec14"
PARAMS_FILE = "../model_data/FAUST10.pt"
traindata = dataset.FaustDataset(FAUST, train=True, test=False)
traindata = dataset.FaustAugmented(FAUST, train=True, test=False)
testdata = dataset.FaustDataset(FAUST, train=False, test=True)
model = models.ChebnetClassifier(
param_conv_layers=[128,128,64,64],
D_t=traindata.downscale_matrices,
E_t=traindata.downscaled_edges,
num_classes = traindata.num_classes,
parameters_file=PARAMS_FILE)
#train network
train.train(
train_data=traindata,
classifier=model,
parameters_file=PARAMS_FILE,
epoch_number=0)
#compute accuracy
accuracy, confusion_matrix = train.evaluate(eval_data=testdata,classifier=model)
print(accuracy)
i = 20
x = traindata[i].pos
e = traindata[i].edge_index.t()
f = traindata[i].face.t()
y = traindata[i].y
t = 2
n = x.shape[0]
eigs_num = 100
import adversarial.carlini_wagner as cw
# targeted attack using C&W method
logger = cw.ValueLogger({"adversarial": lambda x:x.adversarial_loss()})
builder = cw.CWBuilder(search_iterations=1)
builder.set_classifier(model).set_mesh(x,e,f).set_target(t)
builder.set_distortion_function(cw.L2_distortion).set_perturbation_type("lowband", eigs_num=eigs_num)
builder.set_minimization_iterations(0).set_adversarial_coeff(0.1)
adex_cw = builder.build(usetqdm="standard")
# untargeted attack using FGSM
adex_it = pgd.FGSMBuilder().set_classifier(model).set_mesh(x,e,f).build()
'''
# built-in libraries
import os
# third party libraries
import matplotlib.pyplot as plt
import numpy as np
import tqdm
import torch
import torch.nn.functional as func
# repository modules
import models
import train
import adversarial.carlini_wagner as cw
import adversarial.pgd as pgd
import dataset
import utils
REPO_ROOT = os.path.abspath(os.path.join(os.path.dirname(os.path.realpath('__file__')),".."))
FAUST = os.path.join(REPO_ROOT,"datasets/faust")
SHREC14 = os.path.join(REPO_ROOT,"datasets/shrec14")
SMAL = os.path.join(REPO_ROOT,"datasets/smal")
PARAMS_FILE = os.path.join(REPO_ROOT, "model_data/data.pt")
DEVICE = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
SRC_DIR = os.path.join(REPO_ROOT,"src")
SHREC14 = os.path.join(REPO_ROOT,"datasets/shrec14")
traindata = dataset.Shrec14Dataset(SHREC14,device=DEVICE, train=True, test=False)
testdata = dataset.Shrec14Dataset(SHREC14, device=DEVICE, train=False, test=True, transform_data=False)
from torch import nn
from models.pointnet import SimplePointNet
#autoencoder
LATENT_SPACE = 128
NUM_POINTS = 7000
ENC = SimplePointNet(
latent_dimensionality=LATENT_SPACE*2,
convolutional_output_dim=512,
conv_layer_sizes=[32, 128, 256],
fc_layer_sizes=[512, 256, 128],
transformer_positions=[0]).to(DEVICE)
# classifier
CLA = nn.Sequential(nn.Linear(LATENT_SPACE, 64), nn.ReLU(), nn.Linear(64,10)).to(DEVICE)
params = sum([np.prod(p.size()) for p in ENC.parameters()])
ENC(traindata[0].pos)
traindata = dataset.FaustDataset(FAUST, train=True, test=False, transform_data=True)
testdata = dataset.FaustDataset(FAUST, train=False, test=True, transform_data=True)
model = models.ChebnetClassifier(
param_conv_layers=[128,128,64,64],
D_t = traindata.downscale_matrices,
E_t = traindata.downscaled_edges,
num_classes = traindata.num_classes,
parameters_file=PARAMS_FILE)
model = model.to(torch.device("cpu"))
#train network
train.train(
train_data=traindata,
classifier=model,
parameters_file=PARAMS_FILE,
epoch_number=0) | StarcoderdataPython |
3570047 | import random, discord
from discord.ext import commands
class _2048(commands.Cog):
def __init__(self, bot):
self.bot = bot
def format_board(self, board):
h = []
for row in board:
h.append(''.join(str(row)))
h = '\n'.join(h)
return f"```\n{h}\n```"
def go_up(self, board):
moved = False
for x in range(0, 4):
for y in range(0, 4):
if board[y][x] != 0 and y < 3:
for yprime in range(y + 1, 4):
if board[yprime][x] != 0:
if board[yprime][x] == board[y][x]:
board[y][x] = 2 * board[y][x]
moved = True
board[yprime][x] = 0
break
else:
break
for y in range(0, 4):
if board[y][x] == 0 and y < 3:
for yprime in range(y + 1, 4):
if board[yprime][x] != 0:
board[y][x] = board[yprime][x]
board[yprime][x] = 0
moved = True
break
return moved, board
def go_down(self, board):
moved = False
for x in range(0, 4):
for y in range(3, -1, -1):
if board[y][x] != 0 and y > 0:
for yprime in range(y-1, -1, -1):
if board[yprime][x] != 0:
if board[yprime][x] == board[y][x]:
board[y][x] = board[y][x] * 2
moved = True
board[yprime][x] = 0
break
else:
break
for y in range(3, -1, -1):
if board[y][x] == 0 and y > 0:
for yprime in range(y-1, -1, -1):
if board[yprime][x] != 0:
board[y][x] = board[yprime][x]
board[yprime][x] = 0
moved = True
break
return moved, board
def go_right(self, board):
moved = False
for y in range(0, 4):
for x in range(3, -1, -1):
if board[y][x] != 0 and x > 0:
for xprime in range(x-1, -1, -1):
if board[y][xprime] != 0:
if board[y][xprime] == board[y][x]:
board[y][x] = 2 * board[y][x]
moved = True
board[y][xprime] = 0
break
else:
break
for x in range(3, -1, -1):
if board[y][x] == 0 and x > 0:
for xprime in range(x-1, -1, -1):
if board[y][xprime] != 0:
board[y][x] = board[y][xprime]
board[y][xprime] = 0
moved = True
break
return moved, board
def go_left(self, board):
moved = False
for y in range(0, 4):
for x in range(0, 4):
if board[y][x] != 0 and x < 3:
for xprime in range(x+1, 4):
if board[y][xprime] != 0:
if board[y][x] == board[y][xprime]:
board[y][x] = 2 * board[y][x]
moved = True
board[y][xprime] = 0
break
else:
break
for x in range(0, 4):
if board[y][x] == 0 and x < 3:
for xprime in range(x+1, 4):
if board[y][xprime] != 0:
board[y][x] = board[y][xprime]
board[y][xprime] = 0
moved = True
break
return moved, board
def add_number(self, board):
while True:
x = random.randint(0,3)
y = random.randint(0,3)
pickanumber = random.randint(0,9)
if pickanumber < 1:
num = 4
else:
num = 2
if board[x][y] == 0:
board[x][y] = num
break
return board
def get_result(self, board):
zeroes = 0
playsleft = False
for x in range(len(board)):
for y in range(len(board[x])):
if board[x][y] == 2048:
return True
for y in range(0, 4):
zeroes = zeroes + board[y].count(0)
if zeroes > 0:
break
for x in range(0,4):
if x < 3 and board[y][x+1] == board[y][x]:
playsleft = True
break
if y < 3 and board[y+1][x] == board[y][x]:
playsleft = True
break
if playsleft == True:
break
if zeroes == 0 and playsleft == False:
return False
def create_board(self, ):
b = [[0 for _ in range(4)] for _ in range(4)]
b = self.add_number(b)
b = self.add_number(b)
return b
@commands.command('2048')
async def _2048_(self, ctx):
b = self.create_board()
e = discord.Embed(title='2048',description=self.format_board(b), color=discord.Color.blurple())
msg = await ctx.send(embed=e)
for emoji in ["➡️","⬆️", "⬇️", "⬅️", "⏹️"]:
await msg.add_reaction(emoji)
while True:
e = discord.Embed(title='2048',description=self.format_board(b), color=discord.Color.blurple())
await msg.edit(embed=e)
reaction, user = await self.bot.wait_for(
"reaction_add",
check=lambda r, u: u == ctx.author
and r.message == msg
and str(r) in ["⬆️", "⬇️", "➡️", "⬅️", "⏹️"],
)
try:
await msg.remove_reaction(str(reaction), user)
except discord.Forbidden:
pass
if str(reaction) == '⏹️':
await ctx.send("Game ended")
return
elif str(reaction) == '⬆️':
ans, b = self.go_up(b)
elif str(reaction) == '⬇️':
ans, b = self.go_down(b)
elif str(reaction) == '➡️':
ans, b = self.go_right(b)
elif str(reaction) == '⬅️':
ans, b = self.go_left(b)
if ans:
b = self.add_number(b)
res = self.get_result(b)
if res:
e = discord.Embed(title='2048',description=self.format_board(b), color=discord.Color.blurple())
await msg.edit(content='You won!!!', embed=e)
return
elif res == False:
e = discord.Embed(title='2048',description=self.format_board(b), color=discord.Color.blurple())
await msg.edit(content='You lost', embed=e)
return | StarcoderdataPython |
3395404 | import os,cv2
import scipy.io as scio
import numpy as np
from tqdm import tqdm
# from scipy.misc import imresize
standard_size = [720,1280];
val_mat_dir='/input0/train_mat'
image_dir='/input0/image/'
save_label_path='train_label'
os.mkdir(save_label_path)
def main():
img_list=os.listdir(val_mat_dir)
for idx in tqdm(range(len(img_list))):
filename=img_list[idx].replace('.mat','')
if not filename:
continue
i = idx
# if (mod(idx,10)==0)
# fprintf(1,'Processing %3d/%d files\n', idx, num_images);
# end
mat=scio.loadmat(os.path.join(val_mat_dir,filename+'.mat'))
input_img_name=os.path.join(image_dir,filename+'.jpg')
im = cv2.imread(input_img_name,cv2.IMREAD_GRAYSCALE)
h,w=im.shape
annPoints = mat['image_info']
rate = standard_size[0]/h;
rate_w = w*rate;
if rate_w>standard_size[1]:
rate = standard_size[1]/w;
rate_h = float(int(h*rate))/h;
rate_w = float(int(w*rate))/w;
im = cv2.resize(im,(int(w*rate),int(h*rate)));
# print(annPoints[0, 0][0])
annPoints=annPoints[0,0][0]
annPoints[:,0] = annPoints[:,0]*float(rate_w)
annPoints[:,1] = annPoints[:,1]*float(rate_h)
im_density=get_density_map_gaussian(im,annPoints)
# print(im_density.shape,im_density,type(im_density))
# np.savetxt("%s/%s.csv"%(save_label_path,filename), im_density)
np.save('%s/%s.npy'%(save_label_path,filename),im_density)
import numpy as np
import math
def matlab_style_gauss2D(shape=(3, 3), sigma=0.5):
"""
2D gaussian mask - should give the same result as MATLAB's
fspecial('gaussian',[shape],[sigma])
"""
m, n = [(ss - 1.) / 2. for ss in shape]
y, x = np.ogrid[-m:m + 1, -n:n + 1]
h = np.exp(-(x * x + y * y) / (2. * sigma * sigma))
h[h < np.finfo(h.dtype).eps * h.max()] = 0
sumh = h.sum()
if sumh != 0:
h /= sumh
return h
def get_density_map_gaussian(im, points):
im_density = np.zeros(im.shape)
[h, w] = im_density.shape
for j in range(0, len(points)):
f_sz = 15
sigma = 4.0
# H = matlab.fspecial('Gaussian', [f_sz, f_sz], sigma)
H = matlab_style_gauss2D([f_sz, f_sz], sigma)
x = min(w, max(1, abs(int(math.floor(points[j, 0])))))
y = min(h, max(1, abs(int(math.floor(points[j, 1])))))
if x > w or y > h:
continue
x1 = x - int(np.floor(f_sz / 2))
y1 = y - int(np.floor(f_sz / 2))
x2 = x + int(np.floor(f_sz / 2))
y2 = y + int(np.floor(f_sz / 2))
dfx1 = 0
dfy1 = 0
dfx2 = 0
dfy2 = 0
change_H = False
if x1 < 1:
dfx1 = abs(x1) + 1
x1 = 1
change_H = True
if y1 < 1:
dfy1 = abs(y1) + 1
y1 = 1
change_H = True
if x2 > w:
dfx2 = x2 - w
x2 = w
change_H = True
if y2 > h:
dfy2 = y2 - h
y2 = h
change_H = True
x1h = 1 + dfx1
y1h = 1 + dfy1
x2h = f_sz - dfx2
y2h = f_sz - dfy2
if change_H:
# H = matlab.fspecial('Gaussian', [double(y2h - y1h + 1), double(x2h - x1h + 1)], sigma)
H = matlab_style_gauss2D([float(y2h - y1h + 1), float(x2h - x1h + 1)], sigma)
im_density[y1-1: y2, x1-1: x2] = im_density[y1-1: y2, x1-1: x2] + H
return im_density
if __name__=='__main__':
main() | StarcoderdataPython |
9741076 | <reponame>simonsimon006/tensorflow-wavelets
import pickle
import matplotlib.pyplot as plt
history_file_path = r"..\trainHistoryCifar10CNN.txt"
# history_file_path = r"..\trainHistoryWaveletCifar10CNN.txt"
# history_file_path = r"..\trainHistoryWaveletCifarDb410CNN.txt"
with open(history_file_path, 'rb') as pickle_file:
history = pickle.load(pickle_file)
# plot train and validation loss
plt.plot(history['loss'])
plt.plot(history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'validation'], loc='upper left')
plt.show()
| StarcoderdataPython |
4846401 | <reponame>TaoYibo1866/webots_ros2
#!/usr/bin/env python
# Copyright 1996-2021 Cyberbotics Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Launch Webots Universal Robot simulation with MoveIt2."""
import os
import pathlib
import yaml
from launch.actions import IncludeLaunchDescription, LogInfo
from launch.launch_description_sources import PythonLaunchDescriptionSource
from launch import LaunchDescription
from launch_ros.actions import Node
from ament_index_python.packages import get_package_share_directory, get_packages_with_prefixes
PACKAGE_NAME = 'webots_ros2_universal_robot'
def generate_launch_description():
launch_description_nodes = []
package_dir = get_package_share_directory(PACKAGE_NAME)
def load_file(filename):
return pathlib.Path(os.path.join(package_dir, 'resource', filename)).read_text()
def load_yaml(filename):
return yaml.safe_load(load_file(filename))
# Check if moveit is installed
if 'moveit' in get_packages_with_prefixes():
# Configuration
description = {'robot_description': load_file('moveit_ur5e_description.urdf')}
description_semantic = {'robot_description_semantic': load_file('moveit_ur5e.srdf')}
description_kinematics = {'robot_description_kinematics': load_yaml('moveit_kinematics.yaml')}
sim_time = {'use_sim_time': True}
# Rviz node
rviz_config_file = os.path.join(package_dir, 'resource', 'moveit_visualization.rviz')
launch_description_nodes.append(
Node(
package='rviz2',
executable='rviz2',
name='rviz2',
arguments=['-d', rviz_config_file],
parameters=[
description,
description_semantic,
description_kinematics,
sim_time
],
)
)
# MoveIt2 node
movegroup = {'move_group': load_yaml('moveit_movegroup.yaml')}
moveit_controllers = {
'moveit_controller_manager': 'moveit_simple_controller_manager/MoveItSimpleControllerManager',
'moveit_simple_controller_manager': load_yaml('moveit_controllers.yaml')
}
launch_description_nodes.append(
Node(
package='moveit_ros_move_group',
executable='move_group',
output='screen',
parameters=[
description,
description_semantic,
description_kinematics,
moveit_controllers,
movegroup,
sim_time
],
)
)
# Webots simulation with robot
launch_description_nodes.append(
IncludeLaunchDescription(
PythonLaunchDescriptionSource(os.path.join(package_dir, 'launch', 'robot_launch.py'))
)
)
else:
launch_description_nodes.append(LogInfo(msg='"moveit" package is not installed, \
please install it in order to run this demo.'))
return LaunchDescription(launch_description_nodes)
| StarcoderdataPython |
345652 | <gh_stars>0
# Write programs that read a sequence of integer inputs and print
# a. The smallest and largest of the inputs.
# b. The number of even and odd inputs.
# c. Cumulative totals. For example, if the input is 1 7 2 9, the program should print
# 1 8 10 19.
# d. All adjacent duplicates. For example, if the input is 1 3 3 4 5 5 6 6 6 2, the
# program should print 3 5 6.
numEven = 0
numOdd = 0
stop = False
while not stop:
inputN = str(input("Enter a number:(Stop/stop to stop): "))
if inputN == "Stop" or inputN == "stop":
stop = True
elif inputN.isdigit():
inputN = int(inputN)
if inputN % 2 == 0:
numEven += 1
else:
numOdd += 1
print("Number of even numbers:", numEven)
print("Number of odd numbers:", numOdd) | StarcoderdataPython |
9797493 | <reponame>arianasatryan/IntrinsicAnalysis
from typing import Tuple, List
import os
import sys
from IntrinsicAnalysis.feature_extractors.utils import remove_non_letters, EXTERNAL_DIR
from sklearn.feature_extraction.text import TfidfVectorizer
from nltk.corpus import stopwords
UNIGRAM_IDF_THRESHOLD = 4.0
BIGRAM_IDF_THRESHOLD = 5.0
TRIGRAM_IDF_THRESHOLD = 6.0
def load_ngrams(n: int):
if n not in [1, 2, 3]:
raise ModuleNotFoundError("Incorrect number, choose on of these: 1,2,3")
path = os.path.join(EXTERNAL_DIR, ["unigrams_with_idf.txt", "bigrams_with_idf.txt", "trigrams_with_idf.txt"][n-1])
dict_of_freqs = {}
with open(path, "r") as f:
for line in f:
res = line.split()
dict_of_freqs[' '.join(res[:-1])] = float(res[-1])
return dict_of_freqs
unigrams_with_freqs = load_ngrams(1)
bigrams_with_freqs = load_ngrams(2)
trigrams_with_freqs = load_ngrams(3)
def extract_ngram_features(text: str, paragraph: List[List[Tuple[str, str, str, str]]], feature_names=None):
par_text = ' '.join([token[1] for sentence in paragraph for token in sentence])
par_text = remove_non_letters(par_text)
par_text = par_text.split(' ')
features = []
if feature_names is None:
for feature in n_grams_features.values():
features.extend(feature(text, par_text))
else:
for feature in feature_names:
features.extend(n_grams_features[feature](text, par_text))
return features
def save_unigrams_with_idf(documents: List[str]):
documents = [remove_non_letters(doc) for doc in documents]
vectorizer = TfidfVectorizer(smooth_idf=False, use_idf=True, stop_words=stopwords.words('armenian'))
vectorizer.fit_transform(documents)
with open(os.path.join(EXTERNAL_DIR, "unigrams_with_idf.txt"), 'w') as f:
for word, idf in zip(vectorizer.get_feature_names(), vectorizer.idf_):
f.write("{} {}\n".format(word, idf))
def save_bigrams_with_idf(documents: List[str]):
documents = [remove_non_letters(doc) for doc in documents]
vectorizer = TfidfVectorizer(smooth_idf=False, use_idf=True, ngram_range=(2, 2), stop_words=stopwords.words('armenian'))
vectorizer.fit_transform(documents)
with open(os.path.join(EXTERNAL_DIR, "bigrams_with_idf.txt"), 'w') as f:
for word, idf in zip(vectorizer.get_feature_names(), vectorizer.idf_):
f.write("{} {}\n".format(word, idf))
def save_trigrams_with_idf(documents: List[str]):
documents = [remove_non_letters(doc) for doc in documents]
vectorizer = TfidfVectorizer(smooth_idf=False, use_idf=True, ngram_range=(3, 3), stop_words=stopwords.words('armenian'))
vectorizer.fit_transform(documents)
with open(os.path.join(EXTERNAL_DIR, "trigrams_with_idf.txt"), 'w') as f:
for word, idf in zip(vectorizer.get_feature_names(), vectorizer.idf_):
f.write("{} {}\n".format(word, idf))
def get_bigrams(text: list):
return [' '.join(bigram) for bigram in zip(text[:-1], text[1:])]
def get_trigrams(text: list):
return [' '.join(trigram) for trigram in zip(text[:-1], text[1:], text[2:])]
def find_unigrams_with_low_idf(text: str, paragraph: list):
unigrams = paragraph
for token in unigrams:
if unigrams_with_freqs.get(token, sys.maxsize) < UNIGRAM_IDF_THRESHOLD:
return [1.0]
return [0.0]
def find_bigrams_with_low_idf(text: str, paragraph: list):
bigrams = get_bigrams(paragraph)
for bigram in bigrams:
if bigrams_with_freqs.get(bigram, sys.maxsize) < BIGRAM_IDF_THRESHOLD:
return [1.0]
return [0.0]
def find_trigrams_with_low_idf(text: str, paragraph: list):
trigrams = get_trigrams(paragraph)
for trigram in trigrams:
if trigrams_with_freqs.get(trigram, sys.maxsize) < BIGRAM_IDF_THRESHOLD:
return [1.0]
return [0.0]
def unigrams_with_low_idf(text: str, paragraph: list):
unigrams = paragraph
c = 0.0
for token in unigrams:
if unigrams_with_freqs.get(token, sys.maxsize) < UNIGRAM_IDF_THRESHOLD:
c += 1
return [c / len(unigrams)]
def bigrams_with_low_idf(text: str, paragraph: list):
bigrams = get_bigrams(paragraph)
c = 0.0
for bigram in bigrams:
if bigrams_with_freqs.get(bigram, sys.maxsize) < BIGRAM_IDF_THRESHOLD:
c += 1
return [c / (len(bigrams) - 1)if len(bigrams)-1 != 0 else 0.0]
def trigrams_with_low_idf(text: str, paragraph: list):
trigrams = get_trigrams(paragraph)
c = 0.0
for bigram in trigrams:
if trigrams_with_freqs.get(bigram, sys.maxsize) < BIGRAM_IDF_THRESHOLD:
c += 1
return [c / (len(trigrams) - 2)if len(trigrams)-2 != 0 else 0.0]
n_grams_features = {
"low_idf_unigrams_occurrence": find_unigrams_with_low_idf,
"low_idf_bigrams_occurrence": find_bigrams_with_low_idf,
"low_idf_trigrams_occurrence": find_trigrams_with_low_idf,
"low_idf_unigrams_rate": unigrams_with_low_idf,
"low_idf_bigrams_rate": bigrams_with_low_idf,
"low_idf_trigrams_rate": trigrams_with_low_idf
}
| StarcoderdataPython |
6503969 | import spacy
import pytextrank # noqa: F401
from math import sqrt
from operator import itemgetter
from .base_single_doc_model import SingleDocSummModel
from typing import Union, List
class TextRankModel(SingleDocSummModel):
# static variables
model_name = "TextRank"
is_extractive = True
is_neural = False
def __init__(self, num_sentences=1):
super(TextRankModel, self).__init__(
trained_domain="not_trained", max_input_length=None, max_output_length=None
)
self.num_sentences = num_sentences
# load a spaCy model, depending on language, scale, etc.
self.nlp = spacy.load("en_core_web_sm")
self.nlp.add_pipe("textrank", last=True)
def summarize(
self, corpus: Union[List[str], List[List[str]]], queries: List[str] = None
) -> List[str]:
self.assert_summ_input_type(corpus, queries)
return list(map(lambda x: " ".join(self.summarize_single(x)), corpus))
def summarize_single(self, corpus) -> List[str]:
# add PyTextRank to the spaCy pipeline
doc = self.nlp(corpus)
sent_bounds = [[s.start, s.end, set([])] for s in doc.sents]
limit_phrases = self.num_sentences
phrase_id = 0
unit_vector = []
for p in doc._.phrases:
unit_vector.append(p.rank)
for chunk in p.chunks:
for sent_start, sent_end, sent_vector in sent_bounds:
if chunk.start >= sent_start and chunk.end <= sent_end:
sent_vector.add(phrase_id)
break
phrase_id += 1
if phrase_id == limit_phrases:
break
sum_ranks = sum(unit_vector)
unit_vector = [rank / sum_ranks for rank in unit_vector]
sent_rank = {}
sent_id = 0
for sent_start, sent_end, sent_vector in sent_bounds:
sum_sq = 0.0
for phrase_id in range(len(unit_vector)):
if phrase_id not in sent_vector:
sum_sq += unit_vector[phrase_id] ** 2.0
sent_rank[sent_id] = sqrt(sum_sq)
sent_id += 1
sorted(sent_rank.items(), key=itemgetter(1))
sent_text = {}
sent_id = 0
limit_sentences = self.num_sentences
summary_sentences = []
for sent in doc.sents:
sent_text[sent_id] = sent.text
sent_id += 1
num_sent = 0
for sent_id, rank in sorted(sent_rank.items(), key=itemgetter(1)):
summary_sentences.append(sent_text[sent_id])
num_sent += 1
if num_sent == limit_sentences:
break
return summary_sentences
@classmethod
def show_capability(cls):
basic_description = cls.generate_basic_description()
more_details = (
"A graphbased ranking model for text processing. Extractive sentence summarization. \n "
"Strengths: \n - Fast with low memory usage \n - Allows for control of summary length \n "
"Weaknesses: \n - Not as accurate as neural methods."
)
print(f"{basic_description} \n {'#'*20} \n {more_details}")
| StarcoderdataPython |
90532 | import numpy as np
class MAX_POOL_LAYER:
"""MAX_POOL_LAYER only reduce dimensions of height and width by a factor.
It does not put max filter on same input twice i.e. stride = factor = kernel_dimension
"""
def __init__(self, **params):
self.factor = params.get('stride', 2)
def forward(self, X):
"""
Computes the forward pass of MaxPool Layer.
Input:
X: Input data of shape (N, D, H, W)
where, N = batch_size or number of images
H, W = Height and Width of input layer
D = Depth of input layer
"""
"""
Dokumentasi
input :
output :
"""
factor = self.factor
N, D, H, W = X.shape
#assert H%factor == 0 and W%factor == 0
self.cache = [X, factor]
self.feature_map = X.reshape(N, D, H//factor, factor, W//factor, factor).max(axis=(3,5))
#assert self.feature_map.shape == (N, D, H//factor, W//factor)
return self.feature_map, 0
def backward(self, delta):
"""
Computes the backward pass of MaxPool Layer.
Input:
delta: delta values of shape (N, D, H/factor, W/factor)
"""
"""
Dokumentasi
input :
output :
"""
X, factor = self.cache
if len(delta.shape) != 4: # then it must be 2
#assert delta.shape[0] == X.shape[0]
delta = delta.reshape(self.feature_map.shape)
fmap = np.repeat(np.repeat(self.feature_map, factor, axis=2), factor, axis=3)
dmap = np.repeat(np.repeat(delta, factor, axis=2), factor, axis=3)
#assert fmap.shape == X.shape and dmap.shape == X.shape
self.delta_X = np.zeros(X.shape)
#print(delta.shape)
#print(fmap.shape)
#print(dmap.shape)
#print(self.delta_X.shape)
self.delta_X = (fmap == X) * dmap
#assert self.delta_X.shape == X.shape
return self.delta_X
| StarcoderdataPython |
9624717 | """
Name : c8_07_pandas_read+csv_function.py
Book : Python for Finance (2nd ed.)
Publisher: Packt Publishing Ltd.
Author : <NAME>
Date : 6/6/2017
email : <EMAIL>
<EMAIL>
"""
import pandas as pd
url='http://canisius.edu/~yany/data/ibm.csv'
x=pd.read_csv(url,index_col=0,parse_dates=True)
print(x.head())
| StarcoderdataPython |
6701295 | # Recursion Exercise 4
# Write a recursive function called reverse_string that takes a string as a parameter.
# Return the string in reverse order. Hint, the slice operator will be helpful when solving this problem.
# Expected Output
# If the function call is reverse_string("cat"), then the function would return tac
# If the function call is reverse_string("house"), then the function would return esuoh
# Hint
# The recursive pattern is to take the last character from the string and pass the string
# (minus the last character) to the function.
# 1st solution
def reverse_string(string):
"""Recursively returns the string in reverse order"""
if len(string) == 1:
return string
else:
return reverse_string(string[1:]) + string[0]
# 2nd solution
def reverse_string(string):
if len(string) == 1:
return string[0]
else:
return string[-1] + reverse_string(string[:-1])
if __name__ == "__main__":
print(reverse_string("cat"))
print(reverse_string("house"))
| StarcoderdataPython |
3242210 | # This is here for loading old models
from tlkit.models.student_models import FCN4Reshaped
FCN5SkipCifar = FCN4Reshaped | StarcoderdataPython |
9784410 |
from find_files_in_folder import search_dirs
import pandas as pd
runs_directory = "D:\\Igor\\Research_USF\\University of South Florida\\Mao, Wenbin - Igor\\Febio-Models\\Active-Models\\PAQ\\Gamma-5-2\\runs"
pickles_paths = search_dirs(runs_directory, ".pickle")
df = pd.read_pickle(pickles_paths[0])
print(df)
# for i, pp in enumerate(pickles_paths):
# print(pp)
# if i == 0:
# df = pd.read_pickle(pp)
# else:
# new_df = pd.read_pickle(pp)
# print("new df:")
# print(new_df[0:2])
# df = pd.concat_data([df, new_df]) | StarcoderdataPython |
355252 | <filename>python_native/01_collections/sorting.py
# Sorting
## Sorting with keys
airports = [
('MROC', 'San Jose, CR'),
('KLAS', 'Las Vegas, USA'),
('EDDM', 'Munich, DE'),
('LSZH', 'Zurich, CH'),
('VNLK', 'Lukla, NEP')
]
sorted_airports = dict(sorted(airports, key=lambda x:x[0]))
print(sorted_airports) | StarcoderdataPython |
9645108 | <reponame>anuraagbaishya/uiautomator
import os
import hashlib
import time
import json
from json_rpc_error import JsonRPCError
try:
import urllib2
except ImportError:
import urllib.request as urllib2
try:
from httplib import HTTPException
except:
from http.client import HTTPException
try:
if os.name == 'nt':
import urllib3
except: # to fix python setup error on Windows.
pass
class JsonRPCMethod(object):
if os.name == 'nt':
try:
pool = urllib3.PoolManager()
except:
pass
def __init__(self, url, method, timeout=30):
self.url, self.method, self.timeout = url, method, timeout
def __call__(self, *args, **kwargs):
if args and kwargs:
raise SyntaxError("Could not accept both *args and **kwargs as JSONRPC parameters.")
data = {"jsonrpc": "2.0", "method": self.method, "id": self.id()}
if args:
data["params"] = args
elif kwargs:
data["params"] = kwargs
jsonresult = {"result": ""}
if os.name == "nt":
res = self.pool.urlopen("POST",
self.url,
headers={"Content-Type": "application/json"},
body=json.dumps(data).encode("utf-8"),
timeout=self.timeout)
jsonresult = json.loads(res.data.decode("utf-8"))
else:
result = None
try:
req = urllib2.Request(self.url,
json.dumps(data).encode("utf-8"),
{"Content-type": "application/json"})
result = urllib2.urlopen(req, timeout=self.timeout)
jsonresult = json.loads(result.read().decode("utf-8"))
finally:
if result is not None:
result.close()
if "error" in jsonresult and jsonresult["error"]:
raise JsonRPCError(
jsonresult["error"]["code"],
"%s: %s" % (jsonresult["error"]["data"]["exceptionTypeName"], jsonresult["error"]["message"])
)
return jsonresult["result"]
def id(self):
m = hashlib.md5()
m.update(("%s at %f" % (self.method, time.time())).encode("utf-8"))
return m.hexdigest() | StarcoderdataPython |
105363 | <reponame>rnui2k/vivisect
import vdb.extensions.i386 as v_ext_i386
import vdb.extensions.i386 as vdb_ext_i386
def vdbExtension(vdb, trace):
vdb.addCmdAlias('db','mem -F bytes')
vdb.addCmdAlias('dw','mem -F u_int_16')
vdb.addCmdAlias('dd','mem -F u_int_32')
vdb.addCmdAlias('dq','mem -F u_int_64')
vdb.addCmdAlias('dr','mem -F "Deref View"')
vdb.addCmdAlias('ds','mem -F "Symbols View"')
vdb.registerCmdExtension(vdb_ext_i386.eflags,'amd64')
| StarcoderdataPython |
5069362 | <reponame>sanja7s/MedRed
import pandas as pd
import numpy as np
from collections import defaultdict, Counter, OrderedDict
import re
import nltk
from nltk import word_tokenize
# THIS needs to be RUN once, for the first time
# nltk.download('punkt')
import string
"""
a bit low-level code to transform the AMT inputs to NER labels for DL
"""
class CADEC_labels():
def __init__(self):
self.fin = '../../data/AMT/cadec_labels.csv'
self.fout = '../../data/AMT/labels/NER_cadec_labels.csv'
self.fout2 = '../../data/AMT/labels/filtered_cadec_labels.csv'
def __parse_answers(self, df, column_name):
# maybe just do NLTK tokenize here
def parse_row(row):
row = [ elem.strip().strip(string.punctuation) for elem in row ]
return [ elem for elem in row if elem != '' ]
df[column_name] = df[column_name].apply(
#lambda row: row.split(';')
lambda row: re.split(';|,|/|\.',str(row) )
)
df[column_name] = df[column_name].apply(
lambda row: parse_row(row)
)
def assign_tags_to_text(self, text, entity_type='DIS', entities=None):
# print(text)
# sort the entities by size (length of tokens)
# because we will prioritise longer among overlapping entities
entities = sorted(entities, key=lambda x: len(x.split()), reverse=True)
# this is to check how many entities we end up discarding in this way
cnt_not_found = 0
# text into tokens -- we removed punctuation in AMT, so here, too
try:
split_text = word_tokenize(text)
except TypeError:
print (text, entities)
return {}, [], 0, 0
# print(split_text)
# let us create two indices for each token -- one is its position in the list
# the other is the character position in the text
# the first index is used to mark with the tags the entity tokens
# the second index will be used to compare the output of str.search
indices_TAGS = OrderedDict({ (i, token):'O' for i, token in enumerate( split_text ) })
assert(len(indices_TAGS) == len(split_text))
double_indices_TAGS_lst = OrderedDict()
double_indices_TAGS = OrderedDict()
# go through the split text tokens and search for all the indices where they are found
# we need to assign to each token their right position using the double indices
for (i,token) in indices_TAGS:
try:
all_ii_found = [ m.start() for m in re.finditer(re.escape(token), text) ]
#print(all_ii_found)
all_ii = frozenset(all_ii_found)
except:
all_ii = frozenset([])
# we save for each token, its position among the tokens,
# and the index of its firsct character in the original text
double_indices_TAGS_lst[(i, all_ii, token)] = 'O'
assert(len(double_indices_TAGS_lst) == len(split_text))
# we now select from the list of possible indices for each token
# the right one -- because we know its token's position,
# i.e., the first index in the pair
prev_ii = -1
for (i, all_ii, token) in double_indices_TAGS_lst:
all_ii_lst = sorted(list(all_ii))
#print(tag, all_ii_lst)
if all_ii_lst == []:
this_ii = prev_ii+1
for possible_ii in all_ii_lst:
if possible_ii > prev_ii:
this_ii = possible_ii
break
prev_ii = this_ii
double_indices_TAGS[(i, this_ii, token)] = 'O'
assert(len(double_indices_TAGS) == len(split_text))
entity_missmatch_with_AMT = 0
entity_missmatched = []
kept_entities = []
# go from the longest to the shortest entities
for e in entities:
if e == 'nan':
continue
found_i = -1
# if they are found in the raw text
if text.find(e) != -1:
# get the index of its FIRST position
#found_ii = text.find(e)
found_ii_list = [ m.start() for m in re.finditer(re.escape(e), text) ]
l = len(e.split())
# now find the corresponding tokens
for (i, ii, token) in double_indices_TAGS:
if ii in found_ii_list:
# mark the first token, i.e., the beginning
found_i = i
if indices_TAGS[(found_i, token)] == 'O':
indices_TAGS[(found_i, token)] = 'B-' + entity_type
kept_entities.append(e)
# find the rest of the tokens and mark them with inside tags
s = 1
while s < l:
if indices_TAGS[( found_i+s, split_text[found_i+s] )] == 'O':
indices_TAGS[( found_i+s, split_text[found_i+s] )] = 'I-' + entity_type
else:
print ('XXXXX UNSOLVED OVERLAPP XXXXX')
s += 1
break
# this is to be solved -- how you clean AMT text and parse tokens here should be the same
if found_i == -1:
entity_missmatch_with_AMT += 1
entity_missmatched.append(e)
continue
#print (e, found_ii, found_i+s+1)
else:
#print ('NOT FOUND', e)
cnt_not_found += 1
# if entity_missmatch_with_AMT or cnt_not_found:
# print(entity_missmatch_with_AMT, cnt_not_found)
# print(entity_missmatched)
# unkept_entities = set(entities).difference(set(kept_entities))
# print(unkept_entities)
return indices_TAGS, kept_entities, entity_missmatch_with_AMT, cnt_not_found
def parse_all(self):
self.df_in = pd.read_csv(self.fin)
self.__parse_answers(self.df_in, 'golden.symptoms0')
self.__parse_answers(self.df_in, 'golden.drugs0')
df_out = self.df_in.copy()
with open(self.fout, 'w') as f:
total_entities, total_missmatched_entities, total_unkept_entities = 0, 0.0, 0.0
all_sym_kept, all_drug_kept = [], []
for i, line in self.df_in.iterrows():
syms = line['golden.symptoms0']
drugs = line['golden.drugs0']
text = line['body']
# if i in range(439,459):
# print (i, text),
# print (syms)
# print (drugs)
#print(i)
post_tags_sym, sym_kept, sym_missmatch_with_AMT, sym_unkept = self.assign_tags_to_text(text, entity_type='DIS', entities=syms)
#print(sym_kept)
post_tags_drug, drug_kept, drug_missmatch_with_AMT, drug_unkept = self.assign_tags_to_text(text, entity_type='DRUG', entities=drugs)
# print(drug_kept)
post_tags = OrderedDict({ (index, token): sym_tag if sym_tag != 'O' else post_tags_drug[(index, token)] for (index, token), sym_tag in post_tags_sym.items() })
all_sym_kept.append(';'.join(sym_kept))
all_drug_kept.append(';'.join(drug_kept))
total_entities += len(sym_kept) + len(drug_kept)
total_missmatched_entities += sym_missmatch_with_AMT + drug_missmatch_with_AMT
total_unkept_entities += sym_unkept + drug_unkept
i_post_len = 0
for (index, token),tag in post_tags.items():
if token != 'null':
i_post_len += 1
f.write(token + '\t' + tag + '\n')
if i_post_len >= 300:
i_post_len = 0
print ("For long posts ")
f.write('\t\n')
# this is to match CADEC format, one post and then an empty line
f.write('\t\n')
# if i == 1:
# break
#print (post_tags)
print("Processed {} posts. Percent of entities not matched {:.2f}%, and percent of entities discounted {:.2f}%, of total {} accepted.".\
format(i, total_missmatched_entities/total_entities*100, total_unkept_entities/total_entities*100, total_entities))
df_out['new_sym'] = all_sym_kept
df_out['new_drug'] = all_drug_kept
df_out.to_csv(self.fout2, columns=['golden.symptoms0', 'new_sym','golden.drugs0', 'new_drug', 'body'])
def split_train_test(self):
# might be able to skip this -- just do the merging later as done for words, anyway
#self.remove_wrong_chars()
all_labels = pd.read_csv(self.fout, sep='\t', header=None)
N = len(all_labels)
train = all_labels.iloc[:int(N*0.6)]
#train.loc[len(train)] = ["-DOCEND-", 'O']
dev = all_labels.iloc[int(N*0.6):int(N*0.80)]
#dev.loc[len(dev)] = ["-DOCEND-", 'O']
test = all_labels.iloc[int(N*0.80):]
#test.loc[len(test)] = ["-DOCEND-", 'O']
print (N, len(train), len(dev), len(test))
assert (len(train)+len(test)+len(dev) == N)
train.to_csv(self.fout.replace('.csv', '_train.csv'), sep=' ', index=None, header=None)
dev.to_csv(self.fout.replace('.csv', '_dev.csv'), sep=' ', index=None, header=None)
test.to_csv(self.fout.replace('.csv', '_test.csv'), sep=' ', index=None, header=None)
# not needed and not used after all
def remove_wrong_chars(self):
cadec_chars, cadec_tags = [], []
cadec_chars_file = '../data/NER/preprocessed/chars.txt'
with open(cadec_chars_file, 'r') as f:
for line in f:
cadec_chars.append(line.strip())
cadec_tags_file = '../data/NER/preprocessed/tags.txt'
with open(cadec_tags_file, 'r') as f:
for line in f:
cadec_tags.append(line.strip())
print ("Total cadec chars ", len(cadec_chars))
cadec_set = set(cadec_chars)
all_labels = pd.read_csv(self.fout, sep='\t', header=None)
clean_labels = all_labels.copy()
print (all_labels.head())
to_delete = []
for i, row in all_labels.iterrows():
word_set = set(list(str(row.values[0])))
# if word_set.difference(cadec_set):
# print (row.values, word_set.difference(cadec_set))
# to_delete.append(i)
# if len(word_set) == 0:
# print ("~~~~~", i)
# to_delete.append(i)
# if not (str(row.values[1]) in cadec_tags):
# print ("^^^^^^^^^^^^", i)
# to_delete.append(i)
print (to_delete)
clean_labels = clean_labels.drop(clean_labels.index[to_delete])
print ("Dropped {} words out of total {}.".format( len(all_labels)-len(clean_labels), len(all_labels) ))
clean_labels = clean_labels.reset_index(drop=True)
clean_labels.to_csv(self.fout, sep='\t', header=None, index=None)
amt = CADEC_labels()
amt.parse_all()
# amt.remove_wrong_chars()
amt.split_train_test() | StarcoderdataPython |
9720736 | <filename>compliance_checker/runner.py
import io
import json
import os
import sys
import traceback
from collections import OrderedDict
from contextlib import contextmanager
from compliance_checker.suite import CheckSuite
# Py 3.4+ has contextlib.redirect_stdout to redirect stdout to a different
# stream, but use this decorated function in order to redirect output in
# previous versions
@contextmanager
def stdout_redirector(stream):
old_stdout = sys.stdout
sys.stdout = stream
try:
yield
finally:
sys.stdout = old_stdout
class ComplianceChecker(object):
"""
Compliance Checker runner class.
Ties together the entire compliance checker framework, is used from
the command line or can be used via import.
"""
# Consider using __init__ instead of so many classmethods
@classmethod
def run_checker(
cls,
ds_loc,
checker_names,
verbose,
criteria,
skip_checks=None,
output_filename="-",
output_format=["text"],
options=None,
):
"""
Static check runner.
@param ds_loc Dataset location (url or file)
@param checker_names List of string names to run, should match keys of checkers dict (empty list means run all)
@param verbose Verbosity of the output (0, 1, 2)
@param criteria Determines failure (lenient, normal, strict)
@param output_filename Path to the file for output
@param skip_checks Names of checks to skip
@param output_format Format of the output(s)
@returns If the tests failed (based on the criteria)
"""
all_groups = []
cs = CheckSuite(options=options or {})
# using OrderedDict is important here to preserve the order
# of multiple datasets which may be passed in
score_dict = OrderedDict()
if not isinstance(ds_loc, str):
locs = ds_loc
# if single dataset, put in list
else:
locs = [ds_loc]
# Make sure output format is a list
if isinstance(output_format, str):
output_format = [output_format]
for loc in locs: # loop through each dataset and run specified checks
ds = cs.load_dataset(loc)
score_groups = cs.run(ds, skip_checks, *checker_names)
for group in score_groups.values():
all_groups.append(group[0])
# TODO: consider wrapping in a proper context manager instead
if hasattr(ds, "close"):
ds.close()
if not score_groups:
raise ValueError(
"No checks found, please check the name of the checker(s) and that they are installed"
)
else:
score_dict[loc] = score_groups
# define a score limit to truncate the output to the strictness level
# specified by the user
if criteria == "normal":
limit = 2
elif criteria == "strict":
limit = 1
elif criteria == "lenient":
limit = 3
for out_fmt in output_format:
if out_fmt == "text":
if output_filename == "-":
cls.stdout_output(cs, score_dict, verbose, limit)
# need to redirect output from stdout since print functions are
# presently used to generate the standard report output
else:
if len(output_format) > 1:
# Update file name if needed
output_filename = "{}.txt".format(
os.path.splitext(output_filename)[0]
)
with io.open(output_filename, "w", encoding="utf-8") as f:
with stdout_redirector(f):
cls.stdout_output(cs, score_dict, verbose, limit)
elif out_fmt == "html":
# Update file name if needed
if len(output_format) > 1 and output_filename != "-":
output_filename = "{}.html".format(
os.path.splitext(output_filename)[0]
)
cls.html_output(cs, score_dict, output_filename, ds_loc, limit)
elif out_fmt in {"json", "json_new"}:
# Update file name if needed
if len(output_format) > 1 and output_filename != "-":
output_filename = "{}.json".format(
os.path.splitext(output_filename)[0]
)
cls.json_output(cs, score_dict, output_filename, ds_loc, limit, out_fmt)
else:
raise TypeError("Invalid format %s" % out_fmt)
errors_occurred = cls.check_errors(score_groups, verbose)
return (
all(cs.passtree(groups, limit) for groups in all_groups),
errors_occurred,
)
@classmethod
def stdout_output(cls, cs, score_dict, verbose, limit):
"""
Calls output routine to display results in terminal, including scoring.
Goes to verbose function if called by user.
@param cs Compliance Checker Suite
@param score_dict Dict with dataset name as key, list of results as
value
@param verbose Integer value for verbosity level
@param limit The degree of strictness, 1 being the strictest, and going up from there.
"""
for ds, score_groups in score_dict.items():
for checker, rpair in score_groups.items():
groups, errors = rpair
score_list, points, out_of = cs.standard_output(
ds, limit, checker, groups
)
# send list of grouped result objects to stdout & reasoning_routine
cs.standard_output_generation(
groups, limit, points, out_of, check=checker
)
return groups
@classmethod
def html_output(cls, cs, score_dict, output_filename, ds_loc, limit):
"""
Generates rendered HTML output for the compliance score(s)
@param cs Compliance Checker Suite
@param score_groups List of results
@param output_filename The file path to output to
@param ds_loc List of source datasets
@param limit The degree of strictness, 1 being the strictest, and going up from there.
"""
checkers_html = []
for ds, score_groups in score_dict.items():
for checker, (groups, errors) in score_groups.items():
checkers_html.append(cs.checker_html_output(checker, groups, ds, limit))
html = cs.html_output(checkers_html)
if output_filename == "-":
print(html)
else:
with io.open(output_filename, "w", encoding="utf8") as f:
f.write(html)
return groups
@classmethod
def json_output(
cls, cs, score_dict, output_filename, ds_loc, limit, output_type="json"
):
"""
Generates JSON output for the ocmpliance score(s)
@param cs Compliance Checker Suite
@param score_groups List of results
@param output_filename The file path to output to
@param ds_loc List of source datasets
@param limit The degree of strictness, 1 being the strictest,
and going up from there.
@param output_type Either 'json' or 'json_new'. json_new is the new
json output format that supports multiple datasets
"""
results = {}
# json output keys out at the top level by
if len(score_dict) > 1 and output_type != "json_new":
raise ValueError(
"output_type must be set to 'json_new' if outputting multiple datasets to a single json file or stdout"
)
if output_type == "json":
for ds, score_groups in score_dict.items():
for checker, rpair in score_groups.items():
groups, errors = rpair
results[checker] = cs.dict_output(checker, groups, ds, limit,)
elif output_type == "json_new":
for ds, score_groups in score_dict.items():
for checker, rpair in score_groups.items():
groups, errors = rpair
results[ds] = {}
results[ds][checker] = cs.dict_output(checker, groups, ds, limit)
json_results = json.dumps(results, indent=2, ensure_ascii=False)
if output_filename == "-":
print(json_results)
else:
with io.open(output_filename, "w", encoding="utf8") as f:
f.write(json_results)
return groups
@classmethod
def check_errors(cls, score_groups, verbose):
"""
Reports any errors (exceptions) that occurred during checking to stderr.
Goes to verbose function if called by user.
@param score_groups List of results
@param verbose Integer value for verbosity level
"""
errors_occurred = False
for checker, rpair in score_groups.items():
errors = rpair[-1]
if len(errors):
errors_occurred = True
print(
"WARNING: The following exceptions occurred during the %s checker (possibly indicate compliance checker issues):"
% checker,
file=sys.stderr,
)
for check_name, epair in errors.items():
print(
"%s.%s: %s" % (checker, check_name, epair[0]), file=sys.stderr
)
if verbose > 0:
traceback.print_tb(
epair[1].tb_next.tb_next
) # skip first two as they are noise from the running itself @TODO search for check_name
print(file=sys.stderr)
return errors_occurred
| StarcoderdataPython |
8028008 | <filename>mscreen/autodocktools_prepare_py3k/AutoDockTools/Utilities24/prepare_ligand_vif.py
#!/usr/bin/env python
#
#
#
# $Header: /opt/cvs/python/packages/share1.5/AutoDockTools/Utilities24/prepare_ligand_vif.py,v 1.2 2012/01/31 17:57:37 rhuey Exp $
#
import os
from MolKit import Read
from string import split, strip
if __name__ == '__main__':
import sys
import getopt
def usage():
"Print helpful, accurate usage statement to stdout."
print("Usage: prepare_ligand_vif.py -l filename")
print()
print(" Description of command...")
print(" -l ligand_filename (.pdbqt format)")
print(" Optional parameters:")
print(" [-v] verbose output")
print(" [-o pdbqt_filename] (default output filename is ligand_filename_stem + '_L.pdbqt')")
print(" [-P] list of indicies of residues to write with LP dummy atoms '22,26,30,40,41,42' ")
print(" [-S] list of indicies of residues to write with LS dummy atoms '15,17' ")
# process command arguments
try:
opt_list, args = getopt.getopt(sys.argv[1:], 'l:vo:P:S:h')
except getopt.GetoptError as msg:
print('prepare_ligand_vif.py: %s' %msg)
usage()
sys.exit(2)
# initialize required parameters
#-l: ligand
ligand_filename = None
# optional parameters
verbose = None
#-o outputfilename
outputfilename = None
#-P LP_atom_residues
LP_atom_residues = None
#-S LS_atom_residues
LS_atom_residues = None
#'l:vo:P:S:'
for o, a in opt_list:
#print "o=", o, " a=", a
if o in ('-l', '--l'):
ligand_filename = a
if verbose: print('set ligand_filename to ', a)
outputfilename = ligand_filename.split('.')[0] + '_LLL.pdbqt'
if verbose: print('set outputfilename from ligand_filename to ', outputfilename)
if o in ('-v', '--v'):
verbose = True
if verbose: print('set verbose to ', True)
if o in ('-o', '--o'):
outputfilename = a
if verbose: print('set outputfilename to ', outputfilename)
if o in ('-P', '--P'):
LP_atom_residues = a
if LP_atom_residues.find(',')>-1:
LP_atom_residues = list(map(int, split(a, ',')))
if verbose: print('set LP_atom_residues to ', LP_atom_residues)
if o in ('-S', '--S'):
LS_atom_residues = a
if LS_atom_residues.find(',')>-1:
LS_atom_residues = list(map(int, split(a, ',')))
if verbose: print('set LS_atom_residues to ', LS_atom_residues)
if o in ('-h', '--'):
usage()
sys.exit()
# check input
if not ligand_filename:
print('prepare_ligand_vif: ligand filename must be specified.')
usage()
sys.exit()
if not LP_atom_residues:
print('prepare_ligand_vif: LP_atom_residues must be specified.')
usage()
sys.exit()
if not LS_atom_residues:
print('prepare_ligand_vif: LS_atom_residues must be specified.')
usage()
sys.exit()
if verbose:
print('LP_atom_residues=', LP_atom_residues)
print('LS_atom_residues=', LS_atom_residues)
print("reading ", ligand_filename)
# process ligand
#??check that ligand in pdbqt format??
ext = os.path.splitext(ligand_filename)[1]
assert ext=='.pdbqt'
fptr = open(ligand_filename)
liglines = fptr.readlines()
fptr.close()
if verbose: print('read ', len(liglines), ' lines from ', ligand_filename)
optr = open(outputfilename, 'w')
if verbose: print("writing ", outputfilename)
# check whether already has ROOT/ENDROOT/TORSDOF
i = 0
has_root = 0
if liglines[i]=='ROOT\n':
optr.write('ROOT\n')
has_root = 1
i+=1
else:
optr.write('ROOT\n')
for j in liglines[i:]:
ll = split(j)
if j[0:4]in ['ATOM', 'HETA']:
atname = strip(j[12:16])
resnum = strip(j[22:26])
if strip(atname)=='CA':
optr.write(j)
#@@ guard against duplicates?
#if j[-4:].find("RP")<0:
optr.write(j[:-4]+' RP\n')
if verbose: print("wrote RP")
resnum = strip(resnum)
if int(resnum) in LP_atom_residues:
optr.write(j[:-4]+' LP\n')
if verbose: print(" LP ", resnum)
if int(resnum) in LS_atom_residues:
optr.write(j[:-4]+' LS\n')
if verbose: print(" LS ", resnum)
else:
optr.write(j)
else:
optr.write(j)
if not has_root:
optr.write("ENDROOT\n")
optr.write("TORSDOF 0 \n")
optr.close()
# To execute this command type:
#prepare_ligand_vif -l 1vzf.pdbqt -P 22,26,30,40,41,42 -S 15,17 -o 1vzf_L.pdbqt
| StarcoderdataPython |
1925041 | <gh_stars>0
"""Dashboard Module for all html/dash components"""
| StarcoderdataPython |
9610675 | def count_lines(fname):
with open(fname) as f:
return sum(1 for line in f)
def detokenize(tokens):
ret = ''
for g, a in zip(tokens['gloss'], tokens['after']):
ret += g + a
return ret.strip()
| StarcoderdataPython |
3504060 | #!/usr/bin/env python
# -*- coding:UTF-8 -*-
#
# shutdownevt.py
#
# Example of a generator that uses an event to shut down
import time
def follow(thefile,shutdown=None):
thefile.seek(0,2)
while True:
# 通过设置一个"全局"标志位,来关闭生成器
if shutdown and shutdown.isSet(): break # 从内部关闭
line = thefile.readline()
if not line:
time.sleep(0.1)
continue
yield line
import threading
shutdown_event = threading.Event()
def run():
lines = follow(open("run/foo/access-log"),shutdown_event)
for line in lines:
print line,
print "Done"
# Run the above in a separate thread
t = threading.Thread(target=run)
t.start()
# Wait a while then shut down
time.sleep(60)
print "Shutting down"
shutdown_event.set()
| StarcoderdataPython |
6641518 | # -*- coding: utf-8 -*-
# from Programs import program as pr
import csv
import re
# import os
import sys
# yazma-okuma işlemleri için parametreler
csv.register_dialect("myDialect", delimiter='|', quoting=csv.QUOTE_NONE, skipinitialspace=True)
def tr_title(paramWord: str) -> str:
"""türkçe harfler için title fonksiyonu"""
wordList = paramWord.split(sep=" ")
newWord = ""
for word in wordList:
firstLetter = word[0]
lastPart = word[1:]
firstLetter = re.sub(r"i", "İ", firstLetter)
firstLetter = re.sub(r"ı", "I", firstLetter)
firstLetter = re.sub(r"ç", "Ç", firstLetter)
firstLetter = re.sub(r"ş", "Ş", firstLetter)
firstLetter = re.sub(r"ü", "Ü", firstLetter)
firstLetter = re.sub(r"ğ", "Ğ", firstLetter)
lastPart = re.sub(r"İ", "i", lastPart)
lastPart = re.sub(r"I", "ı", lastPart)
lastPart = re.sub(r"Ç", "ç", lastPart)
lastPart = re.sub(r"Ş", "ş", lastPart)
lastPart = re.sub(r"Ü", "ü", lastPart)
lastPart = re.sub(r"Ğ", "ğ", lastPart)
rebuiltWord = firstLetter + lastPart
"""türkçe olmayan harfler için capitalize fonksiyonu"""
rebuiltWord = rebuiltWord.capitalize()
newWord = newWord + " " + rebuiltWord
newWord = newWord.strip()
return newWord
class BadCommandError(Exception):
"""Class for BadCommandError"""
pass
class Uyeler:
salonAdi = "<NAME>"
# constructor
def __init__(self, Id: str, ad: str, soyad: str, yas: str, cinsiyet: str, boy: str, kilo: str, telefon: str, emailadress: str,
dogumTarihi: str, program: str):
self.Id = Id
self.ad = tr_title(ad)
self.soyad = tr_title(soyad)
# print(adSoyad.title(), type(adSoyad.title()))
self.yas = yas
self.cinsiyet = cinsiyet
self.boy = boy
self.kilo = kilo
self.telefon = telefon # int to str? #AYB
self.emailadress = emailadress # AYB
self.dogumTarihi = dogumTarihi
self.program = program # AYB
@staticmethod
def kayit(Id: str, ad: str, soyad: str, yas: str, cinsiyet: str, boy: str, kilo: str, telefon: str, emailadress: str, dogumTarihi: str,
program: str) -> object:
# yeni kayıt için daha önceki üyeler kontrol
if Uyeler.arama(Id) is None:
print(f"{Id} numaralı üye oluşturuluyor.")
return Uyeler(Id, ad, soyad, yas, cinsiyet, boy, kilo, telefon, emailadress, dogumTarihi, program)
else:
print(f"{Id} numaralı üye zaten kayıtlı.")
return None
def serialize(self) -> list: # üye bilgilerinin yazıya dökümü
return [str(self.Id), self.ad, self.soyad, self.yas, self.cinsiyet, self.boy, self.kilo,
str(self.telefon), self.emailadress, str(self.dogumTarihi), self.program]
def yazma(self): # bir üyenin bilgilerinin dosyaya yazımı
liste = self.serialize()
with open("uyeTablo.csv", mode='a', encoding="utf-8", newline='') as writeFile:
writer = csv.writer(writeFile, 'myDialect')
writer.writerow(liste)
def yazmaAhmet(self): # AYB
liste = self.serialize()
print(liste)
with open("deneme.csv", mode='a', encoding="utf-8", newline='') as writeFile:
writer = csv.writer(writeFile, 'myDialect')
writer.writerow(liste)
@staticmethod
def okuma() -> list: # üye listesinin okunması #filenotfound except edildi
liste = []
try:
with open("uyeTablo.csv", mode='r', encoding="utf-8") as readerFile:
reader = csv.reader(readerFile, 'myDialect')
for row in reader:
liste.append(row)
except FileNotFoundError:
file = open("uyeTablo.csv", mode='w', encoding="utf-8")
file.close()
finally:
return liste
@staticmethod
def arama(aranan: str): # TODO: sadece isimle arama yapılırsa aranan ot of index veriyor
# id(tc kimlik no) veya isim ile arama yapılabilir
arananL = []
if not (aranan.isalnum() and aranan.istitle()):
aranan = tr_title(aranan)
temp = aranan.split(" ")
arananL.append(' '.join(temp[0:-1]))
arananL.append(temp[-1])
for uye in Uyeler.okuma():
if (arananL[0] in uye) or (arananL[1] in uye):
return uye
return None
@staticmethod
def silme(silinecekUye: str, command: str):
"""Üye silmek için method"""
ind = None # dosyanın sırasını bozmamak için indis değeri
liste = Uyeler.okuma()
if command == "sil":
for ind, uye in enumerate(liste):
if silinecekUye in uye:
liste.pop(ind)
with open("uyeTablo.csv", mode='w', encoding="utf-8", newline='') as writeFile:
writer = csv.writer(writeFile, 'myDialect')
writer.writerows(liste)
print("Üye başarıyla silinmiştir.")
break
elif command == "update": # TODO: Yusuftaki dosyayı düzenle
for ind, uye in enumerate(liste):
if silinecekUye in uye:
liste.pop(ind)
return ind, liste
return None
else:
raise BadCommandError("Bad command: %s" %command)
@staticmethod
def guncelleme(guncelUye: list):
"""Üye güncellemek için method"""
indis, liste = Uyeler.silme(str(guncelUye[0]), "update")
if indis is not None:
# * opertörüyle indis indis yazmadan constructor çalışır mı???
yeniUye = Uyeler(guncelUye[0], guncelUye[1], guncelUye[2], guncelUye[3], guncelUye[4], guncelUye[5],
guncelUye[6], guncelUye[7], guncelUye[8], guncelUye[9], guncelUye[10])
if yeniUye is not None:
liste.insert(indis, yeniUye.serialize())
# IDEA dosyayı baştan yazmak yerine yeni dosya oluşturmayı da deneyebilirsin
print(liste)
with open("uyeTablo.csv", mode='w', encoding="utf-8", newline='') as writeFile:
writer = csv.writer(writeFile, 'myDialect')
writer.writerows(liste)
print("Üye başarıyla güncellenmiştir.")
else:
return f"Böyle bir üye yok!"
"""@property
def Id(self):
return self.Id"""
"""@Id.setter
def Id(self, Id):
# TODO: encapsulation yapılacak
#if Id.isdecimal():
self.Id = Id"""
uye = []
uye.append(Uyeler.kayit("1", "Yasin", "Işıktaş", "24", "Erkek", "205", "70", "05394670523", "<EMAIL>", "07.01.1997",
"program"))
if uye[0] is not None:
uye[0].yazma()
uye.append(
Uyeler.kayit("2", "Ahmet", "ikinci", "24", "Erkek", "178", "70", "05394670523", "<EMAIL>", "07.01.1997", "kalori"))
if uye[1] is not None:
uye[1].yazma()
uye.append(
Uyeler.kayit("3", "<NAME>", "birinci", "24", "Erkek", "184", "70", "05394670523", "<EMAIL>", "07.01.1997",
"program"))
if uye[2] is not None:
uye[2].yazma()
uye.append(
Uyeler.kayit("4", "rabia", "ertem", "24", "Kadın", "180", "70", "05394670523", "<EMAIL>", "07.01.1997", "program"))
if uye[3] is not None:
uye[3].yazma()
#print(f"str: {uye[0]}")# TODO: __repr__ ve __str__ yazılacak
Uyeler.guncelleme(["2", "Ahmet", "ikinci", "22", "ERkek", "183", "85", "05394670523", "<EMAIL>", "27.07.1999",
"proGram"])
#Uyeler.arama("<NAME>")
#Uyeler.silme("2", "sil")
# Uyeler.okuma()
# print (locale.getdefaultlocale())
""" print(Uyeler.arama("rabia ertem"))
print(pr.yagsizKiloHesapla(1,180, 70))
print(pr.kaloriYakmaHesapla(1, 60))
print((pr.boyKiloendeks(170,70)))
print((pr.netyaghesaplama(1,70,180))) """
# Uyeler()
"""def convertToUye(asd): #AYB
uye = asd.split('|')
return Uyeler.kayit(uye[0], uye[1], uye[2], uye[3], uye[4], uye[5], uye[6], uye[7], uye[8], uye[9], uye[10])
def convertToUyeUp(asd): #AYB
uye = asd.split('|')
return uye
def convertFromUye(uye): #AYB
return f"{uye[0]}|{uye[1]}|{uye[2]}|{uye[3]}|{uye[4]}|{uye[5]}|{uye[6]}|{uye[7]}|{uye[8]}|{uye[9]}|{uye[10]}"
islem = sys.argv[1] #AYB
if islem == 'w': #AYB
a = convertToUye(sys.argv[2]).yazmaAhmet() #AYB
elif islem == 'r':
#print(sys.argv[2])
#aa = "<NAME>"
a = Uyeler.arama(sys.argv[2])
print(convertFromUye(a))
elif islem == 'g':
a = convertToUye(sys.argv[2]).guncelleme()
a = Uyeler.guncelleme(convertToUyeUp(sys.argv[2]))"""
| StarcoderdataPython |
1834286 | <reponame>prateekcom/django-phone-auth<gh_stars>0
import re
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
def get_username_regex():
"""Username should be alphanumeric and in lowercase"""
return r'^[a-z0-9]{4,30}$'
def validate_username(username):
if re.search(get_username_regex(), username) is None:
raise ValidationError(
_('Username should be alphanumeric, lowercase and should\
contain atleast 4 and atmost 30 characters'),
params={'username': username},
)
| StarcoderdataPython |
6443452 | <reponame>ViciousCircle-Github/arithmetic_arranger<gh_stars>0
def arithmetic_arranger(problems, *args):
if len(problems) > 5:
return "Error: Too many problems."
arranged_problems = []
for index, value in enumerate(problems):
operation = value.split(" ")
if operation[1] not in "-+":
return "Error: Operator must be '+' or '-'."
if len(operation[0]) > 4 or len(operation[2]) > 4:
return "Error: Numbers cannot be more than four digits."
try:
value_1 = int(operation[0])
value_2 = int(operation[2])
except ValueError:
return "Error: Numbers must only contain digits."
# calculate the length of each line
longest_val = max(len(operation[0]), len(operation[2]))
width = longest_val + 2
L1 = f"{operation[0]:>{width}}"
L2 = operation[1] + f"{operation[2]:>{width-1}}"
d = '-' * width
try:
arranged_problems[0] += (' ' * 4) + L1
except IndexError:
arranged_problems.append(L1)
try:
arranged_problems[1] += (' ' * 4) + L2
except IndexError:
arranged_problems.append(L2)
try:
arranged_problems[2] += (' ' * 4) + d
except IndexError:
arranged_problems.append(d)
if args:
ans = int(operation[0]) + int(operation[2]) if operation[1] == '+' else int(operation[0]) - int(operation[2])
a = f"{str(ans):>{width}}"
try:
arranged_problems[3] += (' ' * 4) + a
except IndexError:
arranged_problems.append(a)
output = f"{arranged_problems[0]}\n{arranged_problems[1]}\n{arranged_problems[2]}"
output = output + f"\n{arranged_problems[3]}" if args else output
return output
| StarcoderdataPython |
9613744 | import attrs
from databutler.datana.generic.corpus.code import DatanaFunction
from databutler.datana.generic.corpus.processing.base_processor import DatanaFunctionProcessor
from databutler.utils import code as codeutils
class CodeNormalizer(DatanaFunctionProcessor):
def _process(self, d_func: DatanaFunction) -> DatanaFunction:
"""
Returns a new Datana function that contains the formatting-normalized code.
"""
new_d_func = d_func.copy()
new_d_func.code_str = codeutils.normalize_code(d_func.code_str)
return new_d_func
@classmethod
def get_processor_name(cls) -> str:
return "code-normalizer"
| StarcoderdataPython |
6445892 | <gh_stars>10-100
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2016 uralbash <<EMAIL>>
#
# Distributed under terms of the MIT license.
from pyramid import testing
from pyramid_sacrud import CONFIG_RESOURCES
from pyramid_sacrud.views import home_view
class Foo:
pass
class TestHome(object):
def test_no_models(self):
request = testing.DummyRequest()
request.registry.settings = {}
assert home_view(request) ==\
{'dashboard_row_len': 3, 'resources': None}
def test_empty_of_models(self):
request = testing.DummyRequest()
request.registry.settings = {CONFIG_RESOURCES: None}
assert home_view(request) ==\
{'dashboard_row_len': 3, 'resources': None}
def test_empty_list_of_models(self):
request = testing.DummyRequest()
request.registry.settings = {CONFIG_RESOURCES: []}
assert home_view(request) ==\
{'dashboard_row_len': 3, 'resources': []}
def test_with_models(self):
request = testing.DummyRequest()
request.registry.settings = {CONFIG_RESOURCES: [("foo", Foo)]}
assert home_view(request) ==\
{'dashboard_row_len': 3, 'resources': [("foo", Foo)]}
def test_unicode_group_name(self):
request = testing.DummyRequest()
request.registry.settings = {CONFIG_RESOURCES: [("говядо", Foo)]}
assert home_view(request) ==\
{'dashboard_row_len': 3, 'resources': [("говядо", Foo)]}
request.registry.settings = {CONFIG_RESOURCES: [(u"говядо", Foo)]}
assert home_view(request) ==\
{'dashboard_row_len': 3, 'resources': [(u"говядо", Foo)]}
request.registry.settings = {CONFIG_RESOURCES: [
(u'\u0433\u043e\u0432\u044f\u0434\u043e', Foo)
]}
assert home_view(request) ==\
{'dashboard_row_len': 3, 'resources': [(u"говядо", Foo)]}
class TestHomeFunc(object):
def test_200(self, testapp):
testapp.get('/sacrud/', status=200)
assert True
| StarcoderdataPython |
3347013 | #
# Copyright (c) 2020 Seagate Technology LLC and/or its Affiliates
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For any questions about this software or licensing,
# please email <EMAIL> or <EMAIL>.
import argparse
import subprocess
import logging
import re
import os
import json
from s3cipher.cortx_s3_cipher import CortxS3Cipher
from s3backgrounddelete.cortx_cluster_config import CipherInvalidToken
from cortx.utils.validator.v_pkg import PkgV
from cortx.utils.validator.v_service import ServiceV
logging.basicConfig(level=logging.INFO)
# logging.basicConfig(level=logging.DEBUG)
log=logging.getLogger('=>')
class S3CortxSetup:
__preqs_conf_file="/opt/seagate/cortx/s3/mini-prov/s3setup_prereqs.json"
def __init__(self):
"""Instantiate S3CortxSetup."""
def delete_background_delete_account(self, ldappasswd: str, keylen: int, key: str, s3background_cofig:str):
"""Delete s3 account which was used by s3background delete."""
cmd = 'ldapsearch -b "o=s3-background-delete-svc,ou=accounts,dc=s3,dc=seagate,dc=com" -x -w ' + ldappasswd + ' -D "cn=sgiamadmin,dc=seagate,dc=com" -H ldap://'
output,error = subprocess.Popen(cmd, universal_newlines=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
log.debug("\ncmd:{0},\noutput:{1},\nerror:{2}".format(cmd, output, error))
output = re.sub(r"[\n\t\s]*","",output)
if "result:32Nosuchobjectmatched" in output:
print("No s3-background-delete-svc account found")
return False
# Delete s3background delete account.
s3_cipher = CortxS3Cipher(None, True, keylen, key)
access_key = ""
try:
access_key = s3_cipher.generate_key()
except CipherInvalidToken as err:
log.debug("Cipher generate key failed with error : {0}, trying from flat file : {1}".format(err, s3background_cofig))
cmd = "awk '/background_account_access_key/ {print}' "+ s3background_cofig + " | cut -d " " -f 5 | sed -e 's/^\"//' -e 's/\"$//"
access_key,error = subprocess.Popen(cmd, universal_newlines=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
log.debug("\ncmd:{0},\noutput:{1},\nerror:{2}".format(cmd, access_key, error))
cmd = "ldapdelete -x -w " + ldappasswd + " -r \"ak=" + access_key + ",ou=accesskeys,dc=s3,dc=seagate,dc=com\" -D \"cn=sgiamadmin,dc=seagate,dc=com\" -H ldapi:///"
output,error = subprocess.Popen(cmd, universal_newlines=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
log.debug("\ncmd:{0},\noutput:{1},\nerror:{2}".format(cmd, output, error))
cmd = 'ldapdelete -x -w ' + ldappasswd + ' -r "o=s3-background-delete-svc,ou=accounts,dc=s3,dc=seagate,dc=com" -D "cn=sgiamadmin,dc=seagate,dc=com" -H ldapi:///'
output,error = subprocess.Popen(cmd, universal_newlines=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
log.debug("\ncmd:{0},\noutput:{1},\nerror:{2}".format(cmd, output, error))
if not error:
print ("Deleted s3backgrounddelete account successfully...")
return True
print ("Delete s3backgrounddelete account failed with: {}".format(error))
return False
def accounts_cleanup(self, ldappasswd, s3background_cofig:str = "/opt/seagate/cortx/s3/s3backgrounddelete/config.yaml"):
"""Clean up s3 accounts."""
rc1 = self.delete_background_delete_account(ldappasswd, 22, "s3backgroundaccesskey", s3background_cofig)
return rc1
def dependencies_cleanup(self):
"""Clean up configs."""
log.debug("removing s3 dependencies")
cmd = "mv -f /opt/seagate/cortx/auth/resources/authserver.properties /opt/seagate/cortx/auth/resources/authserver.properties.bak"
output,error = subprocess.Popen(cmd, universal_newlines=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
log.debug("\ncmd:{0},\noutput:{1},\nerror:{2}".format(cmd, output, error))
cmd = "mv -f /opt/seagate/cortx/s3/s3backgrounddelete/config.yaml /opt/seagate/cortx/s3/s3backgrounddelete/config.yaml.bak"
output,error = subprocess.Popen(cmd, universal_newlines=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
log.debug("\ncmd:{0},\noutput:{1},\nerror:{2}".format(cmd, output, error))
cmd = "mv -f /opt/seagate/cortx/auth/resources/keystore.properties /opt/seagate/cortx/auth/resources/keystore.properties.bak"
output,error = subprocess.Popen(cmd, universal_newlines=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
log.debug("\ncmd:{0},\noutput:{1},\nerror:{2}".format(cmd, output, error))
cmd = "mv -f /opt/seagate/cortx/s3/conf/s3config.yaml /opt/seagate/cortx/s3/conf/s3config.yaml.bak"
output,error = subprocess.Popen(cmd, universal_newlines=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
log.debug("\ncmd:{0},\noutput:{1},\nerror:{2}".format(cmd, output, error))
cmd = "mv -f /opt/seagate/cortx/s3/conf/s3_confstore.json /opt/seagate/cortx/s3/conf/s3_confstore.json.bak"
output,error = subprocess.Popen(cmd, universal_newlines=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
log.debug("\ncmd:{0},\noutput:{1},\nerror:{2}".format(cmd, output, error))
return True
def validate_pre_requisites(self, rpms: list = None, pip3s: list = None, services: list = None):
try:
if pip3s:
PkgV().validate('pip3s', pip3s)
if services:
ServiceV().validate('isrunning', services)
if rpms:
PkgV().validate('rpms', rpms)
except Exception as e:
print(f"{e}, config:{self.__preqs_conf_file}")
return False
return True
def run(self):
parser = argparse.ArgumentParser(description='Cortx S3 Setup')
# parser.add_argument("post_install", help='Perform S3setup mini-provisioner post_install actions', action="store_true", default=False)
parser.add_argument("action", type=str, help='Perform S3setup mini-provisioner actions',nargs='*', choices=['post_install', 'cleanup' ])
parser.add_argument("--cleanup", help='Cleanup S3 accounts and dependencies. Valid values: all/accounts/dependencies')
# Future functionalities to be added here.
parser.add_argument("--ldappasswd", help='ldap password, needed for --cleanup')
parser.add_argument("--validateprerequisites", help='validate prerequisites for mini-provisioner setup', action="store_true")
parser.add_argument("--preqs_conf_file", help='optional conf file location used with --validateprerequisites')
parser.add_argument("--config",
help='config file url, check cortx-py-utils::confstore for supported formats.',
type=str)
args = parser.parse_args()
if args.cleanup != None:
if args.ldappasswd == None:
print("Invalid input, provide --ldappasswd for cleanup")
exit (-2)
if args.cleanup == "accounts":
if args.ldappasswd:
rc = self.accounts_cleanup(args.ldappasswd)
exit (not rc)
elif args.cleanup == "dependencies":
rc = self.dependencies_cleanup()
exit (not rc)
elif args.cleanup == "all":
if args.ldappasswd:
rc1 = self.accounts_cleanup(args.ldappasswd)
rc2 = self.dependencies_cleanup()
exit (not (rc1 & rc2))
else:
print("Invalid input for cleanup {}. Valid values: all/accounts/dependencies".format(args.cleanup))
exit (-2)
if args.validateprerequisites or "post_install" in args.action:
if args.preqs_conf_file:
self.__preqs_conf_file = args.preqs_conf_file
if not os.path.isfile(self.__preqs_conf_file):
print(f"preqs config file {self.__preqs_conf_file} not found")
exit (-2)
try:
with open(self.__preqs_conf_file) as preqs_conf:
preqs_conf_json = json.load(preqs_conf)
except Exception as e:
print(f"open() or json.load() failed: {e}")
exit (-2)
rc = self.validate_pre_requisites(rpms=preqs_conf_json['rpms'], services=preqs_conf_json['services'], pip3s=preqs_conf_json['pip3s'])
exit(not rc)
| StarcoderdataPython |
9635250 | <reponame>mainulhossain/biowl<filename>app/biowl/libraries/fastqc/adapter.py
import os
from os import path
from pathlib import Path
from ...exechelper import func_exec_run
from ...fileop import PosixFileSystem
from ....util import Utility
fastqc = path.join(path.abspath(path.dirname(__file__)), path.join('lib', 'fastqc'))
def run_fastqc(*args, **kwargs):
paramindex = 0
if 'data' in kwargs.keys():
data = kwargs['data']
else:
if len(args) == paramindex:
raise ValueError("Argument missing error in FastQC.")
data = args[paramindex]
paramindex +=1
data = Utility.get_normalized_path(data)
if 'outdir' in kwargs.keys():
outdir = kwargs['outdir']
else:
if len(args) > paramindex:
outdir = args[paramindex]
paramindex +=1
if outdir:
outdir = Utility.get_normalized_path(outdir)
else:
outdir = path.dirname(data)
if not os.path.exists(outdir):
os.makedirs(outdir)
cmdargs = [data, "--outdir=" + outdir]
for arg in args[2:]:
cmdargs.append(arg)
outpath = Path(data).stem + "_fastqc.html"
outpath = os.path.join(outdir, os.path.basename(outpath))
if os.path.exists(outpath):
os.remove(outpath)
_,err = func_exec_run(fastqc, *cmdargs)
fs = PosixFileSystem(Utility.get_rootdir(2))
stripped_path = fs.strip_root(outpath)
if not os.path.exists(outpath):
raise ValueError("FastQC could not generate the file " + stripped_path + " due to error " + err)
return stripped_path
| StarcoderdataPython |
11344388 | from django import forms
class InputForm(forms.Form):
age_v = forms.DecimalField(min_value=0)
sex_v = forms.DecimalField(min_value=0)
cp_v = forms.DecimalField(min_value=0)
thalach_v = forms.DecimalField(min_value=0)
exang_v = forms.DecimalField(min_value=0)
oldpeak_v = forms.DecimalField(min_value=0)
slope_v = forms.DecimalField(min_value=0)
ca_v = forms.DecimalField(min_value=0) | StarcoderdataPython |
125855 | <filename>reus/transfermarkt/tm_team_transfers.py
from ..util import get_page_soup
from .util import tm_format_currency
import pandas as pd
def tm_team_transfers(club, season, position_group='All', main_position='All', window='All', currency='EUR'):
"""
Extracts basic player information for each player in a squad including basic player information, market value, and contract expiration
Parameters:
club (string): club name
season (string or int): year at start of season
position_group (string): positional group
main_position (string): main position
window (string): transfer window
currency (string): desired currency to return for values
Returns:
list: team transfers
"""
# Validate variables
assert position_group in ['All', 'Goalkeepers', 'Defenders', 'Midfielders', 'Strikers'], 'Select a valid position group'
assert main_position in ['All', 'Goalkeeper', 'Sweeper', 'Centre-Back', 'Left-Back', 'Right-Back',
'Defensive Midfield', 'Central Midfield', 'Right Midfield', 'Left Midfield', 'Attacking Midfield',
'Left Winger', 'Right Winger', 'Second Striker', 'Centre-Forward'], 'Select a valid main position'
assert window in ['All', 'Summer', 'Winter'], 'Select a valid transfer window'
# Lookup team name
df = pd.read_csv('https://raw.githubusercontent.com/ian-shepherd/reus_data/main/raw-data/team_translations.csv', keep_default_na=False)
df = df[(df.fbref_name==club) | (df.transfermarkt_name==club) | \
(df.transfermarkt_link==club) | (df.fcpython==club) | \
(df.fivethirtyeight==club)]
season = str(season)
# Determine domain
match currency:
case 'EUR':
domain = 'https://www.transfermarkt.com'
signed_currency = '€'
case 'GBP':
domain = 'https://www.transfermarkt.co.uk'
signed_currency = '£'
case 'USD':
domain = 'https://www.transfermarkt.us'
signed_currency = '$'
# Determine position group subdirectory
match position_group:
case 'All':
pos_group_subdir = ''
case 'Goalkeepers':
pos_group_subdir = 'Torwart'
case 'Defenders':
pos_group_subdir = 'Abwehr'
case 'Midfielders':
pos_group_subdir = 'Mittelfeld'
case 'Strikers':
pos_group_subdir = 'Sturm'
# Determine position subdirectory
match main_position:
case 'All':
pos_subdir = '0'
case 'Goalkeeper':
pos_subdir = '1'
case 'Sweeper':
pos_subdir = '2'
case 'Centre-Back':
pos_subdir = '3'
case 'Left-Back':
pos_subdir = '4'
case 'Right-Back':
pos_subdir = '5'
case 'Defensive Midfield':
pos_subdir = '6'
case 'Central Midfield':
pos_subdir = '7'
case 'Right Midfield':
pos_subdir = '8'
case 'Left Midfield':
pos_subdir = '9'
case 'Attacking Midfield':
pos_subdir = '10'
case 'Left Winger':
pos_subdir = '11'
case 'Right Winger':
pos_subdir = '12'
case 'Second Striker':
pos_subdir = '13'
case 'Centre-Forward':
pos_subdir = '14'
# Determine transfer window subdirectory
match window:
case 'All':
window_subdir = ''
case 'Summer':
window_subdir = 's'
case 'Winter':
window_subdir = 'w'
# Generate url
try:
subdir = '/'.join(("saison_id", season, "pos", pos_group_subdir, "detailpos", pos_subdir, "w_s", window_subdir, "plus/1#zugaenge"))
page = '/'.join((domain, df.transfermarkt_link.iloc[0], "transfers/verein", str(df.transfermarkt.iloc[0]), subdir))
except IndexError:
print('This team does not exist, please confirm spelling')
exit()
pageSoup = get_page_soup(page)
# Find table objects
tables = pageSoup.find_all('table', {'class' : 'items'})
# Error handling for no transfers or non-conducive position combinations
assert len(tables) > 0, 'Confirm that you have entered a valid combination of positions and that there are transfers'
if len(tables) == 2:
table_arrivals, table_departures = tables
elif len(tables) == 1:
if pageSoup.find('span', {'class' : 'empty'}).find_previous('h2').text.strip() == 'Arrivals':
table_arrivals = None
table_departures = tables[0]
elif pageSoup.find('span', {'class' : 'empty'}).find_previous('h2').text.strip() == 'Departures':
table_arrivals = tables[0]
table_departures = None
# Generate empty list
mylist = []
# iterate over arrivals and departures
for table in [table_arrivals, table_departures]:
# error handling for no transfers
try:
tbody = table.find('tbody')
except AttributeError:
continue
# determine transfer direction
if table == table_arrivals:
direction = 'arrival'
else:
direction = 'departure'
# find rows
rows = tbody.find_all('tr')
# iterate through each transfer and store attributes
for row in rows:
# check if valid row
try:
row_ = row['class'] not in ['odd', 'even']
except KeyError:
continue
# row classes
hauptlink_class = row.find_all('td', {'class' : 'hauptlink'})
signing_class = row.find_all('img', {'class' : 'flaggenrahmen'})[-1]
# extract basic info
url = row.find('a', href=True)['href']
name = row.find('img')['alt']
pos = row.find('td', {'class' : 'hauptlink'}).find_next('td').text.strip()
age = row.find('td', {'class' : 'zentriert'}).text.strip()
nation = row.find_all('td', {'class' : 'zentriert'})[1].find('img')['alt']
# transfer info
mv = row.find('td', {'class' : 'rechts'}).text.strip()
transfer_club = hauptlink_class[1].text.strip()
try:
transfer_club_url = hauptlink_class[1].find('a', href=True)['href']
except TypeError:
transfer_club_url = None
transfer_league = signing_class.find_next('a', href=True).text
transfer_league_url = signing_class.find_next('a', href=True)['href']
transfer_country = signing_class['alt']
signed_value = hauptlink_class[-1].text
# value cleaning
if "End of loan" in signed_value:
transfer_type = 'End of loan'
signed_value = '0'
elif 'Loan' in signed_value:
transfer_type = 'Loan'
signed_value = signed_value.replace('Loan', '').replace('fee:', '').strip()
elif 'loan transfer' in signed_value:
transfer_type = 'Loan'
signed_value = '0'
elif 'free transfer' in signed_value:
transfer_type = 'free transfer'
signed_value = '0'
else:
signed_value = signed_value.replace('-','0')
transfer_type = 'Transfer'
# generate dictionary for each player
mydict = {'direction' : direction,
'transfer_type' : transfer_type,
'name' : name,
'url' : url,
'position' : pos,
'age' : age,
'nation' : nation,
'transfer_club' : transfer_club,
'transfer_club_url' : transfer_club_url,
'transfer_league' : transfer_league,
'transfer_league_url' : transfer_league_url,
'transfer_country' : transfer_country,
'currency' : signed_currency,
'fee' : tm_format_currency(signed_value),
'market_value' : tm_format_currency(mv)}
# append dictionary to list
mylist.append(mydict)
return mylist | StarcoderdataPython |
195107 | <filename>py-misc/evaluate_categories.py
import json
import pprint
import argparse
from sklearn.metrics import log_loss
class CategoryResult:
def __init__(self, data_dict, pretty_print):
self.data_dict = data_dict
self.lang_code = data_dict['lang_code']
self.category_scores = data_dict['category']
self.__pretty_print = pretty_print
def __repr__(self):
return f'{self.lang_code} {self.category_scores}'
def __str__(self):
return self.__pretty_print.pformat(self.data_dict)
def label_from_fasttext(label):
return label[9:].replace('_', ' ')
def get_log_loss(prediction_dict, category_gt):
category_gt_dict = {category: 0 for category in prediction_dict.keys()}
category_gt_dict[category_gt] = 1.0
y_pred = []
y_true = []
for category_name in prediction_dict.keys():
y_pred.append(prediction_dict[category_name])
y_true.append(category_gt_dict[category_name])
return log_loss(y_true, y_pred)
def get_fake_log_loss(num):
y_pred = [0.0 for i in range(num)]
y_true = [0.0 for i in range(num)]
y_true[0] = 1.0
return log_loss(y_true, y_pred)
categoryTransformDict = {
'Offers & Promotions': 'Offers & Promotion',
'Foreign Language Learning': 'Foreign Language Learnin'
}
results_name = '../../outputs/out_cat_{}.txt'
label_name = '../../fastText/data/{}.val'
if __name__ == '__main__':
categoryList = []
with open('categories.list', 'r') as f:
for line in f.readlines():
categoryList.append(line[:-1])
parser = argparse.ArgumentParser()
parser.add_argument('--language', default='en')
args = parser.parse_args()
pp = pprint.PrettyPrinter(indent=4)
categoryResults = []
with open(results_name.format(args.language), 'r') as f:
for line in f.readlines():
data_dict = json.loads(line)
for category in categoryList:
if category not in data_dict['category'].keys():
data_dict['category'][category] = 0.0
categoryResult = CategoryResult(data_dict, pp)
categoryResults.append(categoryResult)
categoryGroundTruths = []
with open(label_name.format(args.language), 'r') as f:
for line in f.readlines():
words = line.split(' ')
category = label_from_fasttext(words[0])
categoryGroundTruths.append(category)
category_num = len(categoryResults[0].category_scores)
pred_num = len(categoryResults)
fake_loss = get_fake_log_loss(category_num)
success_num = 0
log_loss_mean = 0.0
for categoryResult, categoryGroundTruth in zip(categoryResults, categoryGroundTruths):
if categoryGroundTruth in categoryTransformDict.keys():
categoryGroundTruth = categoryTransformDict[categoryGroundTruth]
scores = categoryResult.category_scores
categoryNow = ''
if categoryResult.lang_code == args.language:
log_loss_value = get_log_loss(scores, categoryGroundTruth)
if len(scores) > 0:
categoryNow = max(scores, key=scores.get)
else:
log_loss_value = fake_loss
if categoryNow == categoryGroundTruth:
success_num += 1
log_loss_mean += log_loss_value / pred_num
precision_at_1 = success_num / pred_num
print(f'Precision-at-1: {precision_at_1:.6f}')
print(f'Log Loss Score: {log_loss_mean:.6f}')
| StarcoderdataPython |
1718264 | <filename>pkg/binary_tree.py
# Given the root to a binary tree, implement serialize(root), which serializes the tree into a string, and deserialize(s), which deserializes the string back into the tree.
# For example, given the following Node class
# class Node:
# def __init__(self, val, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
#
# The following test should pass:
#
#node = Node('root', Node('left', Node('left.left')), Node('right'))
#assert deserialize(serialize(node)).left.left.val == 'left.left'
def serialize(root):
if root is None:
return ""
serialized = f"{root.val} | {serialize(root.left)} | {serialize(root.right)}"
def deserialize(serialized):
elements = serialized.split(' | ')
return buildNodes(elements)
def buildNodes(elements):
if len(elements) == 0:
return None
val = elements.pop(0)
if val == "":
return None
root = Node(val)
root.left = buildNodes(elements)
root.right = buildNodes(elements)
return root
class Node:
def __init__(self, val, left=None, right=None):
self.val = val
self.left = left
self.right = right
# The following test should pass:
node = Node('root', Node('left', Node('left.left')), Node('right'))
assert deserialize(serialize(node)).left.left.val == 'left.left'
| StarcoderdataPython |
6544154 | import sys, os, inspect, glob, datetime, time, shutil
import timeit
start = timeit.default_timer()
cmd_folder = os.path.dirname(os.path.realpath(os.path.abspath(os.path.split(inspect.getfile( inspect.currentframe() ))[0])))
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
import imageryObject
baseFolder = r"\\ddhprdcifs\DDH-PRD\ddhfiles\internal"
inputFolder = r"\\ddhprdcifs\DDH-PRD\ddhfiles\internal\imagerysource\Vendor_Distributions"
outLogFile = r"\\ddhprdcifs\DDH-PRD\ddhfiles\internal\imagerysource\Logs\Imagery_Log_%s.csv" % datetime.date.today().strftime("%Y_%m_%d")
finalLogFile = r"\\ddhprdcifs\DDH-PRD\ddhfiles\internal\imagerysource\Logs\CURRENT_Imagery_Log.csv"
performUpload = False
log = open(outLogFile, 'wb')
log.write("Sensor,ZipFile,SourceExists,SourceLocation,ServiceExists,ServiceSource,ServiceSourceOID,SourceService\n")
'''prints the time along with the message'''
def tPrint(s):
print"%s\t%s" % (time.strftime("%H:%M:%S"), s)
allFiles = []
for dirName, subdirList, fileList in os.walk(inputFolder):
#Run through and copy all the files in the fileList
sensor = os.path.basename(dirName)
for f in fileList:
if f[-4:] == ".zip" and not sensor in ['SPOT5','DRONE','DEM','AERIAL']:
allFiles.append([f, sensor])
#allFiles = [['055997346020_01.zip', 'WV02']]
for s in allFiles:
inputZip = "%s/%s/%s" % (inputFolder, s[1], s[0])
xx = imageryObject.imageryZip(inputZip, s[1])
print xx.statusUpdate
if xx.sourceExists == False:
print "*****Extracting and Tiling*****"
xx.extractandTile()
#Get outlines of source imagery and create output GEOJSON
if xx.sourceExists:
xx.createImageOutlines()
if xx.serviceExists == False and performUpload:
print "*****Uploading Source*****"
xx.uploadSourceData()
#xx.finalProcessing()
log.write(xx.statusUpdate)
log.write("\n")
log.close()
shutil.copyfile(outLogFile, finalLogFile)
print ("Finsihed Log")
stop = timeit.default_timer()
print stop - start
#synchronize the derived RGB
#fileGDB = imageryObject.ImageryGDB()
#fileGDB.updateFields()
#fileGDB.applyStretch()
| StarcoderdataPython |
12865651 | import tide_constituents as tc
from py_noaa import coops
import pandas as pd
import numpy as np
import tappy
start = '20180201'
end = '20180228'
interval = 1
start = pd.to_datetime(start)
end = pd.to_datetime(end)
d = start
w, t, p, r = [], [], [], []
while d < end:
start_ = d
end_ = start_ + pd.DateOffset(interval)
end_ = end_ if end_ < end else end
water_level, tide = tc.get_water_levels(start_.strftime('%Y%m%d'),
end_.strftime('%Y%m%d'),
-88.2, 30.4)
water_level = water_level.water_level.astype('float')
prediction = 0.0 if 'Z0' not in list(tide.speed_dict.keys()) else tide.speed_dict['Z0']
prediction += sum_signals(tide.key_list, tide.dates, tide.speed_dict, tide.r, tide.phase)
residual = water_level - prediction
w.append(water_level)
p.append(prediction)
d = end_
water_level = pd.concat(w).to_frame()
water_level.columns = ['observation']
water_level['prediction'] = np.hstack(p)
data = tc.get_tides('20180101', '20181231', -88.2, 30.4)
wl = data.predicted_wl.copy()
grouped = wl.groupby(pd.Grouper(freq='M'))
def f(group):
return pd.DataFrame({'original': group, 'demeaned': group - group.mean()})
wl_demeaned = grouped.apply(f)
min_month = wl_demeaned.rolling(30).min().groupby(pd.Grouper(freq='M')).last()
max_month = wl_demeaned.rolling(30).max().groupby(pd.Grouper(freq='M')).last()
monthly_minmax = min_month.copy()
monthly_minmax['high'] = max_month['demeaned']
monthly_minmax = monthly_minmax[['demeaned', 'high']]
monthly_minmax.columns = ['low', 'high']
monthly_minmax['range'] = monthly_minmax.high - monthly_minmax.low
monthly_minmax.sort_values('range') | StarcoderdataPython |
4993020 | import tkinter
class MinhaGUI:
def __init__(self):
# Criando a janela principal
self.main_window = tkinter.Tk()
# Criando os labels
self.label1 = tkinter.Label(self.main_window, text='Curso Python Progressivo!' )
self.label2 = tkinter.Label(self.main_window, text='www.pythonprogressivo.net' )
# Exibindo os labels
self.label1.pack(side='top')
self.label2.pack(side='bottom')
# Fazer o Tkinter exibir o looping da janela
tkinter.mainloop()
minha_gui = MinhaGUI() | StarcoderdataPython |
4854375 | class Node:
def __init__(self, value=None, name=""):
self.value = value
self.name = name
def match(self, params):
raise NotImplementedError
class LogicNode(Node):
def __init__(self):
super().__init__([])
class AndNode(LogicNode):
def match(self, params):
for item in self.value:
if not item.match(params):
return False
return True
class OrNode(LogicNode):
def match(self, params):
for item in self.value:
if item.match(params):
return True
return False
class NotNode(LogicNode):
def match(self, params):
for item in self.value:
if not item.match(params):
return True
return False
class EqNode(Node):
def match(self, params):
if self.name in params:
item = params[self.name]
if isinstance(item, (list, tuple)):
return self.value in item
else:
return item == self.value
return False
class LteNode(Node):
def match(self, params):
if self.name in params:
item = params[self.name]
return item <= self.value
return False
class GteNode(Node):
def match(self, params):
if self.name in params:
item = params[self.name]
return item >= self.value
return False
class ApproxNode(Node):
def match(self, params):
if self.name in params:
item = params[self.name]
if isinstance(item, (list, tuple)):
for sub in item:
if self.value in sub:
return True
else:
return self.value in item
return False
class PresentNode(Node):
def match(self, params):
return self.name in params
class SubstringNode(Node):
def match(self, params):
if self.name in params:
item = params[self.name]
if isinstance(item, (list, tuple)):
for s in item:
if self.value.match(s):
return True
else:
return self.value.match(item)
return False
class AllNode(Node):
def match(self, params):
return True
class NoneNode(Node):
def match(self, params):
return False
| StarcoderdataPython |
4896519 | #!/usr/bin/env python3
#
# Copyright 2022 The Dawn Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import subprocess
import sys
import os
from dir_paths import node_dir
try:
old_sys_path = sys.path
sys.path = [node_dir] + sys.path
from node import GetBinaryPath as get_node_binary_path
finally:
sys.path = old_sys_path
tsc = os.path.join(node_dir, 'node_modules', 'typescript', 'lib', 'tsc.js')
def run_tsc_ignore_errors(args):
cmd = [get_node_binary_path(), tsc] + args
process = subprocess.Popen(cmd,
cwd=os.getcwd(),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
# Typecheck errors go in stdout, not stderr. If we see something in stderr, raise an error.
if len(stderr):
raise RuntimeError('tsc \'%s\' failed\n%s' % (' '.join(cmd), stderr))
return stdout
if __name__ == '__main__':
run_tsc_ignore_errors(sys.argv[1:])
| StarcoderdataPython |
1939240 | # Este es mi primer programa
print('Hello, world!');
# print("HOla")
'''
Se va a imprimir el valor de la variable cantidad
utilizando al comando print
'''
cantidad = 25
print('Mi variable cantidad contiene el valor ', cantidad)
| StarcoderdataPython |
1995342 | <gh_stars>1-10
import os
from lxml import etree
path = "origin" # 文件夹目录
files = os.listdir(path) # 得到文件夹下的所有文件名称
files = filter(lambda x: x.endswith('.xml'), files)
for file in files: # 遍历文件夹
tree = etree.parse(path + '/' + file)
for element in tree.xpath('//i//d'):
info = element.xpath('./@p')[0].split(',')
text = element.xpath('./text()')[0]
# print(info, text)
with open('danmaku.txt', 'a+', encoding='utf-8') as f:
f.write(text + '\n')
| StarcoderdataPython |
1763020 | <reponame>rinha79/discordpy-startup<gh_stars>0
import discord
from discord.ext import commands
import os
import traceback
import datetime
bot = commands.Bot(command_prefix='r!!')
token = os.environ['DISCORD_BOT_TOKEN']
@bot.event
async def on_ready():
channel = bot.get_channel(696922604660850740)
await channel.send("")
@bot.event
async def on_command_error(ctx, error):
orig_error = getattr(error, "original", error)
error_msg = ''.join(traceback.TracebackException.from_exception(orig_error).format())
await ctx.send(error_msg)
@bot.command()
async def test(ctx, arg):
await ctx.send(arg)
@bot.command()
async def joined(ctx, *, member: discord.Member):
await ctx.send('{0} joined on {0.joined_at}'.format(member))
@bot.event
async def on_message(message):
if message.channel.id == 696922604660850740:
is_bot = " [BOT]"if message.author.bot else ""
date = datetime.datetime.now()
hour = date.hour
mnin = date.minute
print("{0.author}{1} ({0.author.id})\n{0.content}".format(message,is_bot))
await bot.process_commands(message)
bot.run(token)
| StarcoderdataPython |
9633636 | <filename>tests/test_e2e_mw.py
import pytest
import kubernetes.client as k8s_client
import kubernetes.config as k8s_config
import sys
from .utils import namespace_handling, kopf_runner, NAMESPACE, DEFAULT_WAIT_TIME
import os
import time
sys.path.insert(0, os.path.abspath(os.path.join(
os.path.dirname(__file__), '../ur_operator')))
import ur_operator.uptimerobot as uptimerobot
from ur_operator.crds.maintenance_window import MaintenanceWindowV1Beta1, MaintenanceWindowType
from ur_operator.k8s import K8s
k8s = K8s()
k8s_config.load_kube_config()
core_api = k8s_client.CoreV1Api()
uptime_robot = uptimerobot.create_uptimerobot_api()
def create_k8s_ur_mw(namespace, name, wait_for_seconds=DEFAULT_WAIT_TIME, **spec):
k8s.create_k8s_crd_obj(MaintenanceWindowV1Beta1, namespace, name, **spec)
time.sleep(wait_for_seconds)
def update_k8s_ur_mw(namespace, name, wait_for_seconds=DEFAULT_WAIT_TIME, **spec):
k8s.update_k8s_crd_obj(MaintenanceWindowV1Beta1, namespace, name, **spec)
time.sleep(wait_for_seconds)
def delete_k8s_ur_mw(namespace, name, wait_for_seconds=DEFAULT_WAIT_TIME):
k8s.delete_k8s_crd_obj(MaintenanceWindowV1Beta1, namespace, name)
time.sleep(wait_for_seconds)
class TestDefaultOperator:
def test_create_once_mw(self, kopf_runner, namespace_handling):
name = 'foo'
mw_type = MaintenanceWindowType.ONCE
start_time = str(int(time.time()) + 60)
duration = 30
create_k8s_ur_mw(NAMESPACE, name, type=mw_type.name, startTime=start_time, duration=duration)
mws = uptime_robot.get_m_window()['mwindows']
assert len(mws) == 1
assert mws[0]['friendly_name'] == name
assert mws[0]['type'] == mw_type.value
assert mws[0]['start_time'] == int(start_time)
assert mws[0]['duration'] == duration
def test_create_daily_mw(self, kopf_runner, namespace_handling):
name = 'foo'
mw_type = MaintenanceWindowType.DAILY
start_time = '06:30'
duration = 30
create_k8s_ur_mw(NAMESPACE, name, type=mw_type.name, startTime=start_time, duration=duration)
mws = uptime_robot.get_m_window()['mwindows']
assert len(mws) == 1
assert mws[0]['friendly_name'] == name
assert mws[0]['type'] == mw_type.value
assert mws[0]['start_time'] == start_time
assert mws[0]['duration'] == duration
def test_create_weekly_mw(self, kopf_runner, namespace_handling):
name = 'foo'
mw_type = MaintenanceWindowType.WEEKLY
start_time = '06:30'
duration = 30
value = '2-4-5'
create_k8s_ur_mw(NAMESPACE, name, type=mw_type.name, startTime=start_time, duration=duration, value=value)
mws = uptime_robot.get_m_window()['mwindows']
assert len(mws) == 1
assert mws[0]['friendly_name'] == name
assert mws[0]['type'] == mw_type.value
assert mws[0]['start_time'] == start_time
assert mws[0]['duration'] == duration
assert mws[0]['value'] == value.replace('-', ',')
def test_create_monthly_mw(self, kopf_runner, namespace_handling):
name = 'foo'
mw_type = MaintenanceWindowType.MONTHLY
start_time = '06:30'
duration = 30
value = '1-11-21-31'
create_k8s_ur_mw(NAMESPACE, name, type=mw_type.name, startTime=start_time, duration=duration, value=value)
mws = uptime_robot.get_m_window()['mwindows']
assert len(mws) == 1
assert mws[0]['friendly_name'] == name
assert mws[0]['type'] == mw_type.value
assert mws[0]['start_time'] == start_time
assert mws[0]['duration'] == duration
assert mws[0]['value'] == value.replace('-', ',')
def test_create_mw_with_friendly_name(self, kopf_runner, namespace_handling):
name = 'foo'
friendly_name = 'bar'
mw_type = MaintenanceWindowType.ONCE
start_time = str(int(time.time()) + 60)
duration = 30
create_k8s_ur_mw(NAMESPACE, name, type=mw_type.name, startTime=start_time, duration=duration, friendlyName=friendly_name)
mws = uptime_robot.get_m_window()['mwindows']
assert len(mws) == 1
assert mws[0]['friendly_name'] == friendly_name
def test_update_mw(self, kopf_runner, namespace_handling):
name = 'foo'
new_name = 'bar'
mw_type = MaintenanceWindowType.ONCE
start_time = str(int(time.time()) + 60)
duration = 30
new_duration = 30
create_k8s_ur_mw(NAMESPACE, name, type=mw_type.name, startTime=start_time, duration=duration)
mws = uptime_robot.get_m_window()['mwindows']
assert len(mws) == 1
assert mws[0]['friendly_name'] == name
assert mws[0]['duration'] == duration
update_k8s_ur_mw(NAMESPACE, name, friendlyName=new_name, duration=new_duration)
mws = uptime_robot.get_m_window()['mwindows']
assert len(mws) == 1
assert mws[0]['friendly_name'] == new_name
assert mws[0]['duration'] == new_duration
def test_update_mw_change_type(self, kopf_runner, namespace_handling):
name = 'foo'
mw_type = MaintenanceWindowType.ONCE
new_mw_type = MaintenanceWindowType.DAILY
start_time = str(int(time.time()) + 60)
new_start_time = '10:00'
duration = 30
create_k8s_ur_mw(NAMESPACE, name, type=mw_type.name, startTime=start_time, duration=duration)
mws = uptime_robot.get_m_window()['mwindows']
assert len(mws) == 1
assert mws[0]['type'] == mw_type.value
assert mws[0]['start_time'] == int(start_time)
update_k8s_ur_mw(NAMESPACE, name, type=new_mw_type.name, startTime=new_start_time)
mws = uptime_robot.get_m_window()['mwindows']
assert len(mws) == 1
assert mws[0]['type'] == new_mw_type.value
assert mws[0]['start_time'] == new_start_time
def test_delete_mw(self, kopf_runner, namespace_handling):
name = 'foo'
mw_type = MaintenanceWindowType.ONCE
start_time = str(int(time.time()) + 60)
duration = 30
create_k8s_ur_mw(NAMESPACE, name, type=mw_type.name, startTime=start_time, duration=duration)
mws = uptime_robot.get_m_window()['mwindows']
assert len(mws) == 1
delete_k8s_ur_mw(NAMESPACE, name)
mws = uptime_robot.get_m_window()['mwindows']
assert len(mws) == 0
| StarcoderdataPython |
6410573 | # Copyright 2017 Confluent Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from six import iteritems
class NodeNotPresentError(Exception):
pass
class InsufficientResourcesError(Exception):
pass
class NodeContainer(object):
def __init__(self, nodes=None):
"""
Create a NodeContainer with the given nodes.
Node objects should implement at least an operating_system property.
:param nodes: A collection of node objects to add, or None to add nothing.
"""
self.os_to_nodes = {}
if nodes is not None:
for node in nodes:
self.os_to_nodes.setdefault(node.operating_system, []).append(node)
def size(self):
"""
Returns the total number of nodes in the container.
"""
return sum([len(val) for val in self.os_to_nodes.values()])
def __len__(self):
return self.size()
def __iter__(self):
return self.elements()
def elements(self, operating_system=None):
"""
Yield the elements in this container.
:param operating_system: If this is non-None, we will iterate only over elements
which have this operating system.
"""
if operating_system is None:
for node_list in self.os_to_nodes.values():
for node in node_list:
yield node
else:
for node in self.os_to_nodes.get(operating_system, []):
yield node
def add_node(self, node):
"""
Add a node to this collection.
:param node: The node to add.
"""
self.os_to_nodes.setdefault(node.operating_system, []).append(node)
def add_nodes(self, nodes):
"""
Add a collection of nodes to this collection.
:param nodes: The nodes to add.
"""
for node in nodes:
self.add_node(node)
def remove_node(self, node):
"""
Removes a node from this collection.
:param node: The node to remove.
:returns: The node which has been removed.
:throws NodeNotPresentError: If the node is not in the collection.
"""
try:
return self.os_to_nodes.get(node.operating_system, []).remove(node)
except ValueError:
raise NodeNotPresentError
def remove_nodes(self, nodes):
"""
Remove a collection of nodes from this collection.
:param nodes: The nodes to remove.
"""
for node in nodes:
self.remove_node(node)
def remove_spec(self, cluster_spec):
"""
Remove nodes matching a ClusterSpec from this NodeContainer.
:param cluster_spec: The cluster spec. This will not be modified.
:returns: A list of the nodes that were removed.
:throws InsufficientResourcesError: If there are not enough nodes in the NodeContainer.
Nothing will be removed unless enough are available.
"""
msg = self.attempt_remove_spec(cluster_spec)
if len(msg) > 0:
raise InsufficientResourcesError("Not enough nodes available to allocate. " + msg)
removed = []
for os, node_specs in iteritems(cluster_spec.nodes.os_to_nodes):
num_nodes = len(node_specs)
avail_nodes = self.os_to_nodes.get(os, [])
for i in range(0, num_nodes):
removed.append(avail_nodes.pop(0))
return removed
def can_remove_spec(self, cluster_spec):
"""
Determine if we can remove nodes matching a ClusterSpec from this NodeContainer.
This container will not be modified.
:param cluster_spec: The cluster spec. This will not be modified.
:returns: True if we could remove the nodes; false otherwise
"""
msg = self.attempt_remove_spec(cluster_spec)
return len(msg) == 0
def attempt_remove_spec(self, cluster_spec):
"""
Attempt to remove a cluster_spec from this node container.
:param cluster_spec: The cluster spec. This will not be modified.
:returns: An empty string if we can remove the nodes;
an error string otherwise.
"""
msg = ""
for os, node_specs in iteritems(cluster_spec.nodes.os_to_nodes):
num_nodes = len(node_specs)
avail_nodes = len(self.os_to_nodes.get(os, []))
if avail_nodes < num_nodes:
msg = msg + "%s nodes requested: %d. %s nodes available: %d" % \
(os, num_nodes, os, avail_nodes)
return msg
def clone(self):
"""
Returns a deep copy of this object.
"""
container = NodeContainer()
for operating_system, nodes in iteritems(self.os_to_nodes):
for node in nodes:
container.os_to_nodes.setdefault(operating_system, []).append(node)
return container
| StarcoderdataPython |
6543766 | <gh_stars>10-100
from celery import Celery
from contentcuration.utils.celery.tasks import CeleryTask
class CeleryApp(Celery):
task_cls = CeleryTask
result_cls = 'contentcuration.utils.celery.tasks:CeleryAsyncResult'
_result_cls = None
def on_init(self):
"""
Use init call back to set our own result class. Celery doesn't yet have an easier way
to customize this class specifically
"""
self._result_cls = self.subclass_with_self(self.result_cls)
@property
def AsyncResult(self):
return self._result_cls
| StarcoderdataPython |
233192 | <gh_stars>10-100
from os import walk
def getFileName(name):
f=open(name,'r+')
lines = f.read().split("\n")
data=[]
for i in range(0,len(lines)):
#this program also take this line because of import
if 'import' in lines[i]:
if 'from' in lines[i]:
temp=lines[i].split(' ')
data.append(temp[1])
else:
temp=lines[i].split(' ')
data.append(temp[-1])
#print(lines[i])
#print('Imported modules are: ',data)
print(data)
return data
f = []
for (dirpath, dirnames, filenames) in walk("."):
f.extend(filenames)
break
print(f)
modulesName=[]
fileWrite=open('yourmodules.txt','w+')
def addin(data):
for i in range(0,len(data)):
if ',' in data[i]:
temp=data[i].split(',')
for i in range(0,len(temp)):
modulesName.append(temp[i])
fileWrite.write(temp[i]+'\n')
else:
modulesName.append(data[i])
fileWrite.write(data[i]+'\n')
for i in range(0,len(f)):
try:
print(f[i])
fileWrite.write(f[i]+'\n')
data=getFileName(f[i])
addin(data)
fileWrite.write('\n')
except:
fileWrite.write(f[i]+'\n\n')
print('Cannot Process File !!!')
pass
fileWrite.close()
#if file name is present then do not get fro modules
| StarcoderdataPython |
6450113 | from .models import Member
from django import forms
class MemberForm(forms.Form):
first_name = forms.CharField(label='First Name', max_length=50, required = True)
surname = forms.CharField(label='Surname', max_length=50, required = True)
email = forms.EmailField(label='Email', max_length=100, required = True)
address = forms.CharField(label='Address', max_length=150, required = False)
| StarcoderdataPython |
3595153 | #!/usr/bin/env python3.8
from account import Account
from credentials import Credentials
def create_account(account_user_name,account_password):
"""
Function to create a new account
"""
new_account = Account(account_user_name,account_password)
return new_account
def save_account(account):
'''
Function to save contact
'''
account.save_this_account()
def delete_accounts(account):
"""
Function to delete account
"""
account.delete_account()
def create_credentials(social_media ,user_name,password):
"""
Function to create a new credential
"""
new_credentials = Credentials(social_media ,user_name,password)
return new_credentials
def save_credentials(credentials):
"""
Function to save credential
"""
credentials.save_this_credentials()
def display_all_credential():
"""
Function that displays all credentials
"""
return Credentials.display_credentials()
def password_creator():
"""
Function that creates a password for the user
"""
return Credentials.password_generate()
def delete_credential(credentials):
"""
Function to delete credentials
"""
credentials.delete_credentials()
def find_credential(social_media):
"""
Function that finds in credential socialmedia and returns the credentials that matches that socialmedia.
"""
return Credentials.find_credentials(social_media)
def credential_exists(social_media):
"""
Function that checks if a credential exists and return true or false.
"""
return Credentials.credentials_exist(social_media)
def main():
print('='*80)
print('*' *80)
print('-' *80)
print(" PASSWORD LOCKER (enjoy your safety)")
print('-' *80)
print('*' *80)
print('='*80)
print("\n What is your name")
name = input()
print(f"\n Hello {name},Good to see you. Use the following short codes to:")
print('\n')
print("-Create New account: CNA\n-Login to your account: LG \n")
short_code = input("").lower().strip()
if short_code == "cna":
print('\n')
print("Sign Up")
print('-' * 40)
account_user_name = input("username:")
print('\n')
while True:
print("-Type your password: TP\n-Generate random Password: RP \n")
option = input().lower().strip()
if option == 'tp':
account_password = input("Enter password of your choice\n")
break
elif option == 'rp':
password = password_<PASSWORD>()
print(f"random password is {password} \n enter CH to use it or \n enter TP to type password of your choice")
choice = input().lower().strip()
if choice == 'ch':
account_password = password
elif choice =='tp':
account_password = input("Enter password of your choice\n")
else:
print("Invalid password please try again")
break
else:
print("Invalid password please try again")
save_account(create_account(account_user_name,account_password)) # create and save new account.
print('\n')
print("*"*65)
print(f"Thanks {account_user_name}, Your account has been created succesfully!")
print(f"Your Username is: {account_user_name}")
print(f"Your password is: {account_password}")
print("*"*65)
print('\n')
elif short_code == "lg" :
username = input("username: ")
password = input("password: ")
print("\n")
print("You don't seem to have a signed up account,\n please try to start again and sign up. \n Byee ****")
print("*"*65)
print("\n")
import sys
sys.exit()
else:
print("Use specified short Codes!!!!\n Byeee****")
print("\n")
import sys
sys.exit()
while True:
print("Use these short codes:")
print('-'*40)
print(" -Save already existing credentials: SC\n -Create new credentials:CC\n -Dispaly credentials: DC\n -Find a credential saved credentials using social media name : FC\n -Delete credential: RC\n -Exit the application:EX\n")
short_code = input().lower().strip()
if short_code == 'sc':
print("Enter social media name")
social_media = input()
print(f"Enter {social_media} username")
user_name = input()
print(f"Enter {social_media} password")
password = input()
save_credentials(create_credentials(social_media ,user_name,password)) # create and save credentails.
print ('\n')
print("*"*65)
print(f"Thefollowing credentails are saved:\n Social media name: {social_media}\n Username {user_name}\n Password {password}\n")
print("*"*65)
elif short_code == 'cc':
print("Create social media name")
social_media = input()
print(f"Create {social_media} username")
user_name = input()
print(f"Create password for {social_media}")
print("-Type your password: TP\n-Generate random Password: RP \n")
option = input().lower().strip()
if option == 'tp':
password = input("Enter password of your choice\n")
elif option == 'rp':
passwords = password_creator()
print(f"random password is {passwords} \n enter CH to use it or \n enter TP to type password of your choice")
choice = input().lower().strip()
if choice == 'ch':
password = <PASSWORD>
elif choice =='tp':
password = input("Enter password of your choice\n")
else:
print("Invalid password please try again")
else:
print("Invalid password please try again")
save_credentials(create_credentials(social_media ,user_name,password)) # create and save credentails.
print ('\n')
print("*"*65)
print(f"Thefollowing credentails are created:\n Social media name: {social_media}\n Username {user_name}\n Password {<PASSWORD>")
print("*"*65)
elif short_code == 'dc':
if display_all_credential():
print("Here is a list of all your credentials")
print('-'*40)
print('\n')
for credentials in display_all_credential():
print(f"Social media: {credentials.social_media} username: {credentials.user_name} password: {<PASSWORD>}")
print("*"*70)
print('\n')
else:
print('\n')
print("You dont seem to have any contacts saved yet")
print('\n')
elif short_code == 'fc':
print("Enter the social media name you want to search for")
print('\n')
search_name = input()
if credential_exists(search_name):
search_social_media = find_credential(search_name)
print(f"username:{search_social_media.user_name}\n password: {search_social_media.password} ")
else:
print("That social media does not exist")
print('*'*50)
print('\n')
elif short_code == 'rc':
print('\n')
print("Enter the name of social media you want to delete it's credentials")
print('\n')
search_name = input()
if credential_exists(search_name):
search_social_media = find_credential(search_name)
search_social_media.delete_credentials()
print(f"{search_social_media.social_media} credentials were succefully deleted")
print('*'*50)
print('\n')
else:
print("That social media does not exist")
print('*'*50)
print('\n')
elif short_code == "ex":
print("Bye see you next time **")
break
else:
print("I really didn't get that. Please use the short codes")
if __name__ == '__main__':
main()
| StarcoderdataPython |
368686 | <reponame>VincentDehaye/recommender-system-liu<gh_stars>0
"""
Purpose: Retrieve users from table in database
"""
from Product.Database.DBConn import User, Rating
from Product.Database.DatabaseManager.Retrieve.Retrieve import Retrieve
from sqlalchemy import desc
class RetrieveUser(Retrieve):
"""
Author:<NAME>
Date: 2017-11-14
Last update: 2017-11-14
Purpose: Retrieve users from table in database
"""
def retrieve_all_users(self):
"""
Author: <NAME>
Date: 2017-11-14
Last update: 2017-11-14
Purpose: retrieve users from table
:return users : list of class User
"""
users = self.session.query(User).all()
self.session.close()
return users
def check_if_user_in_rating(self, user_id):
# TODO Add docstring
# TODO Check so that rating is not null, return boolean instead of object
# TODO write unit test for this method
return self.session.query(Rating).filter_by(user_id=user_id).first()
def retrieve_largest_user_id(self):
"""
Author: <NAME>, <NAME>
Date: 19/11/2017
Last update:
Purpose: Supposed to get the user with the highest id
:return User : a user of type User with highest id
"""
user = self.session.query(User).order_by(desc(User.id)).limit(1).first()
self.session.close()
return user.id
| StarcoderdataPython |
3578737 | <reponame>erelsgl/family-fair-allocation
#!python3
"""
Defines various useful fairness criteria to use with fair allocation algorithms.
"""
from abc import ABC, abstractmethod # Abstract Base Class
from agents import Agent, BinaryAgent
import math
class FairnessCriterion(ABC):
"""
A fairness criterion for a general instance is a function
that takes an agent and an allocation, and returns True iff
the allocation is fair for the agent.
A fairness criterion for a binary instance is an integer function s(r),
where r is the number of goods that an agent values at 1,
and s is the number of goods that this agent should receive in order to satisfy the criterion.
"""
def __init__(self, name:str, abbreviation:str):
self.name = name
self.abbreviation = abbreviation
@abstractmethod
def target_value_for_agent(self, agent:Agent)->int:
"""
:param agent: an Agent object.
:return: The value that this agent should get in order to satisfy the fairness criterion.
"""
@abstractmethod
def target_value_for_binary(self, total_value: int)->int:
"""
:param total_value: The total value of all goods, in the eyes of a particular agent.
:return: The value that this agent should get in order to satisfy the fairness criterion.
Relevant mainly for binary instances.
"""
def is_fair_for(self, agent:Agent, own_bundle: set, all_bundles: list)->bool:
"""
The default fairness calculation - checks if the bundle's value is above the agent's target value.
Relevant for fairness criteria that ignore the other bundles, e.g. proportionality or MMS.
:param agent: An agent in some family.
:param own_bundle: The bundle allocated to the agent's family.
:param all_bundles: The list of bundles allocated to all families (a list of sets).
:return: True iff the agent finds the allocation fair, according to the fairness criterion.
"""
return agent.value(own_bundle) >= self.target_value_for_agent(agent)
class OneOfBestC(FairnessCriterion):
"""
Returns the fairness criterion "1 out of best c".
>>> criterion=OneOfBestC(3)
>>> [criterion.target_value_for_binary(r) for r in range(10)]
[0, 0, 0, 1, 1, 1, 1, 1, 1, 1]
"""
def __init__(self, c:int):
super().__init__("one-of-best-{}".format(c), "1-of-best-{}".format(c))
self.c = c
def target_value_for_agent(self, agent: Agent)->int:
return agent.value_of_cth_best_good(self.c)
def target_value_for_binary(self, total_value: int)->int:
return 1 if total_value >= self.c else 0
class MaximinShareOneOfC(FairnessCriterion):
"""
Returns the fairness criterion "1 of c maximin-share".
>>> criterion=MaximinShareOneOfC(3)
>>> [criterion.target_value_for_binary(r) for r in range(10)]
[0, 0, 0, 1, 1, 1, 2, 2, 2, 3]
"""
def __init__(self, c:int, approximation_factor:float=1):
if approximation_factor==1:
name="1-out-of-{}-maximin-share".format(c)
abbreviation="1-of-{}-MMS".format(c)
else:
name="{}-fraction 1-out-of-{}-maximin-share".format(approximation_factor, c)
abbreviation = "{}-fraction 1-of-{}-MMS".format(approximation_factor, c)
super().__init__(name, abbreviation)
self.c = c
self.approximation_factor = approximation_factor
def target_value_for_agent(self, agent: Agent)->int:
return agent.value_1_of_c_MMS(c=self.c, approximation_factor=self.approximation_factor)
def target_value_for_binary(self, total_value: int)->int:
return math.floor(total_value/self.c)*self.approximation_factor
class EnvyFreeExceptC(FairnessCriterion):
"""
Returns the fairness criterion "EFc" (envy-free except c goods).
Currently, only c=1 is supported.
"""
def __init__(self, c:int):
super().__init__("envy-free-except-{}".format(c), "EF{}".format(c))
self.c = c
def target_value_for_agent(self, agent: Agent)->int:
raise ValueError("target value is not relevant for envy-freeness concepts")
def target_value_for_binary(self, total_value: int)->int:
raise ValueError("target value is not relevant for envy-freeness concepts")
def is_fair_for(self, agent:Agent, own_bundle: set, all_bundles: list)->bool:
return agent.is_EFc(own_bundle, all_bundles, self.c)
class ProportionalExceptC(FairnessCriterion):
"""
Returns the fairness criterion "Proportional except c" -
the agent's value should be at least 1/n times the value of
the set of all goods minus the c best goods.
>>> criterion=ProportionalExceptC(num_of_agents=2, c=1)
>>> [criterion.target_value_for_binary(r) for r in range(10)]
[0, 0, 1, 1, 2, 2, 3, 3, 4, 4]
"""
def __init__(self, num_of_agents:int, c:int):
super().__init__("proportionality-except-{}".format(c), "PROP-{}".format(c))
self.c = c
self.num_of_agents = num_of_agents
def target_value_for_agent(self, agent: Agent)->int:
return agent.value_proportional_except_c(num_of_agents=self.num_of_agents, c=self.c)
def target_value_for_binary(self, total_value: int)->int:
return max(0, math.ceil((total_value - self.c)/self.num_of_agents))
if __name__ == "__main__":
import doctest
(failures,tests) = doctest.testmod(report=True)
print ("{} failures, {} tests".format(failures,tests))
| StarcoderdataPython |
6560560 | <gh_stars>0
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.http import Http404
from django.views.generic import DetailView
from django_filters.rest_framework import DjangoFilterBackend
from guardian.mixins import (
LoginRequiredMixin,
PermissionListMixin,
PermissionRequiredMixin as ObjectPermissionRequiredMixin,
)
from rest_framework import status
from rest_framework.decorators import action
from rest_framework.exceptions import ValidationError
from rest_framework.mixins import (
CreateModelMixin,
ListModelMixin,
RetrieveModelMixin,
)
from rest_framework.permissions import DjangoObjectPermissions
from rest_framework.response import Response
from rest_framework.settings import api_settings
from rest_framework.viewsets import GenericViewSet, ReadOnlyModelViewSet
from rest_framework_csv.renderers import PaginatedCSVRenderer
from rest_framework_guardian.filters import ObjectPermissionsFilter
from grandchallenge.algorithms.tasks import create_algorithm_jobs_for_session
from grandchallenge.archives.tasks import add_images_to_archive
from grandchallenge.cases.models import (
Image,
ImageFile,
RawImageFile,
RawImageUploadSession,
)
from grandchallenge.cases.serializers import (
HyperlinkedImageSerializer,
RawImageFileSerializer,
RawImageUploadSessionPatchSerializer,
RawImageUploadSessionSerializer,
)
from grandchallenge.core.permissions.rest_framework import (
DjangoObjectOnlyWithCustomPostPermissions,
)
from grandchallenge.datatables.views import Column, PaginatedTableListView
from grandchallenge.jqfileupload.widgets.uploader import StagedAjaxFile
from grandchallenge.reader_studies.tasks import (
add_image_to_answer,
add_images_to_reader_study,
)
from grandchallenge.subdomains.utils import reverse_lazy
class RawImageUploadSessionList(
LoginRequiredMixin, PermissionListMixin, PaginatedTableListView,
):
model = RawImageUploadSession
permission_required = f"{RawImageUploadSession._meta.app_label}.view_{RawImageUploadSession._meta.model_name}"
login_url = reverse_lazy("userena_signin")
row_template = "cases/rawimageuploadsession_row.html"
search_fields = [
"pk",
]
columns = [
Column(title="ID", sort_field="pk"),
Column(title="Created", sort_field="created"),
Column(title="Status", sort_field="status"),
]
default_sort_column = 1
class RawImageUploadSessionDetail(
LoginRequiredMixin, ObjectPermissionRequiredMixin, DetailView
):
model = RawImageUploadSession
permission_required = f"{RawImageUploadSession._meta.app_label}.view_{RawImageUploadSession._meta.model_name}"
raise_exception = True
login_url = reverse_lazy("userena_signin")
class OSDImageDetail(
LoginRequiredMixin, ObjectPermissionRequiredMixin, DetailView
):
model = Image
permission_required = (
f"{Image._meta.app_label}.view_{Image._meta.model_name}"
)
raise_exception = True
login_url = reverse_lazy("userena_signin")
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
try:
dzi = self.object.files.get(image_type=ImageFile.IMAGE_TYPE_DZI)
except ObjectDoesNotExist:
raise Http404
context.update({"dzi_url": dzi.file.url})
return context
class ImageViewSet(ReadOnlyModelViewSet):
serializer_class = HyperlinkedImageSerializer
queryset = Image.objects.all().prefetch_related(
"files",
"archive_set",
"componentinterfacevalue_set__algorithms_jobs_as_input",
"readerstudies",
)
permission_classes = (DjangoObjectPermissions,)
filter_backends = (
DjangoFilterBackend,
ObjectPermissionsFilter,
)
filterset_fields = (
"study",
"origin",
"archive",
)
renderer_classes = (
*api_settings.DEFAULT_RENDERER_CLASSES,
PaginatedCSVRenderer,
)
class RawImageUploadSessionViewSet(
CreateModelMixin, RetrieveModelMixin, ListModelMixin, GenericViewSet
):
queryset = RawImageUploadSession.objects.all()
permission_classes = [DjangoObjectOnlyWithCustomPostPermissions]
filter_backends = [ObjectPermissionsFilter]
def perform_create(self, serializer):
serializer.save(creator=self.request.user)
def get_serializer_class(self):
if self.request.method == "PATCH":
return RawImageUploadSessionPatchSerializer
else:
return RawImageUploadSessionSerializer
def validate_staged_files(self, *, staged_files):
file_ids = [f.staged_file_id for f in staged_files]
if any(f_id is None for f_id in file_ids):
raise ValidationError("File has not been staged")
files = [StagedAjaxFile(f_id) for f_id in file_ids]
if not all(s.exists for s in files):
raise ValidationError("File does not exist")
if len({f.name for f in files}) != len(files):
raise ValidationError("Filenames must be unique")
if sum([f.size for f in files]) > settings.UPLOAD_SESSION_MAX_BYTES:
raise ValidationError(
"Total size of all files exceeds the upload limit"
)
@action(detail=True, methods=["patch"])
def process_images(self, request, pk=None):
upload_session: RawImageUploadSession = self.get_object()
serializer = self.get_serializer(
upload_session, data=request.data, partial=True
)
if serializer.is_valid():
try:
self.validate_staged_files(
staged_files=upload_session.rawimagefile_set.all()
)
except ValidationError as e:
return Response(str(e), status=status.HTTP_400_BAD_REQUEST)
if (
upload_session.status == upload_session.PENDING
and not upload_session.rawimagefile_set.filter(
consumed=True
).exists()
):
upload_session.process_images(
linked_task=self.get_linked_task(
validated_data=serializer.validated_data
)
)
return Response(
"Image processing job queued.", status=status.HTTP_200_OK
)
else:
return Response(
"Image processing job could not be queued.",
status=status.HTTP_400_BAD_REQUEST,
)
else:
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
def get_linked_task(self, *, validated_data):
if "algorithm" in validated_data:
return create_algorithm_jobs_for_session.signature(
kwargs={
"algorithm_image_pk": validated_data[
"algorithm"
].latest_ready_image.pk
},
immutable=True,
)
elif "archive" in validated_data:
return add_images_to_archive.signature(
kwargs={"archive_pk": validated_data["archive"].pk},
immutable=True,
)
elif "reader_study" in validated_data:
return add_images_to_reader_study.signature(
kwargs={"reader_study_pk": validated_data["reader_study"].pk},
immutable=True,
)
elif "answer" in validated_data:
return add_image_to_answer.signature(
kwargs={"answer_pk": validated_data["answer"].pk},
immutable=True,
)
else:
raise RuntimeError(
"Algorithm image, archive or reader study must be set"
)
class RawImageFileViewSet(
CreateModelMixin, RetrieveModelMixin, ListModelMixin, GenericViewSet
):
serializer_class = RawImageFileSerializer
queryset = RawImageFile.objects.all()
permission_classes = [DjangoObjectOnlyWithCustomPostPermissions]
filter_backends = [ObjectPermissionsFilter]
| StarcoderdataPython |
9679723 | import numpy as np
import statsmodels as sm
import math
import matplotlib.pyplot as plt
from scipy.integrate import quad
import sys
import os
import logging
from brd_mod.brdstats import *
from brd_mod.brdecon import *
def meter_to_mi(x):
'''
Converts a parameter in metres to miles
using a standard conversion factor.
'''
return 0.000621371*x
def deg_to_rad(x):
'''
Converts a parameter in degrees to radians
'''
return (math.pi*x)/180
def haversine_distance(t_a, n_a, t_b, n_b):
'''
Returns distance between two coordinates using
the Haversine formula
'''
lat_a= deg_to_rad(t_a)
lon_a= deg_to_rad(n_a)
lat_b= deg_to_rad(t_b)
lon_b= deg_to_rad(n_b)
R= 6373000
delta_lat= lat_b - lat_a
delta_lon= lon_b - lon_a
a_var= np.sin(delta_lat/2)**2 + np.cos(lat_a) \
* np.cos(lat_b) * np.sin(delta_lon/2)**2
c_var= 2*(np.arctan2(math.sqrt(a_var), math.sqrt(1-a_var)))
return meter_to_mi(R*c_var) | StarcoderdataPython |
3429100 | import datetime
import Queue
from abc import ABCMeta, abstractmethod
from event import FillEvent, OrderEvent
from event import events
class ExecutionHandler(object):
__metaclass__ = ABCMeta
def __init__(self,events,commission):
self.events = events
self.commission = commission
@abstractmethod
def execute_order(self,event):
raise NotImplemented('Should implement execute_order()')
class SimulatedExecutionHandler(ExecutionHandler):
def __init__(self,commission):
super(SimulatedExecutionHandler,self).__init__(events,commission)
def execute_order(self,orderevent):
if orderevent.live:
time = datetime.datetime.utcnow()
else:
time = orderevent.dt
fill_event = FillEvent(timeindex = time,
symbol = orderevent.symbol,
exchange = 'BLUE SEA',
quantity_l = orderevent.quantity_l,
quantity_s = orderevent.quantity_s,
signal_type = orderevent.signal_type,
direction = orderevent.direction,
price = orderevent.price,
commission = self.commission)
self.events.put(fill_event)
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.