id stringlengths 1 265 | text stringlengths 6 5.19M | dataset_id stringclasses 7
values |
|---|---|---|
43337 | import tensorflow as tf
if __name__ == "__main__":
with tf.Session() as sess:
game_dir = "Gobang"
model_dir = "model2_10_10_5"
batch = "11000"
# 初始化变量
sess.run(tf.global_variables_initializer())
# 获取最新的checkpoint,其实就是解析了checkpoint文件
latest_ckpt = tf.train.latest_checkpoint("../" + game_dir + "/" + model_dir + "/" + batch)
# 加载图
restore_saver = tf.train.import_meta_graph("../" + game_dir + "/" + model_dir + "/" + batch + "/policy_value_net.model.meta")
# 恢复图,即将weights等参数加入图对应位置中
restore_saver.restore(sess, latest_ckpt)
# 将图中的变量转为常量
output_graph_def = tf.graph_util.convert_variables_to_constants(
sess, sess.graph_def, ["action_fc/LogSoftmax", "evaluation_fc2/Tanh"])
# 将新的图保存到"/pretrained/graph.bytes"文件中
tf.train.write_graph(output_graph_def, "../" + game_dir + "/" + model_dir + "/" + batch, "graph.bytes", as_text=False) | StarcoderdataPython |
3373577 | <filename>src/generator.py
import torch
import torch.nn as nn
import torch.nn.functional as F
# Base Class for Generator CNN
class Generator(nn.Module):
def __init__(self, z_size, conv_dim):
super(Generator, self).__init__()
self.conv_dim = conv_dim
self.t_conv1 = nn.ConvTranspose2d(conv_dim, conv_dim*8, kernel_size=4, stride=2, padding=1, bias=False)
self.batch_norm1 = nn.BatchNorm2d(conv_dim*8)
self.t_conv2 = nn.ConvTranspose2d(conv_dim*8, conv_dim*4, kernel_size=4, stride=2, padding=1, bias=False)
self.batch_norm2 = nn.BatchNorm2d(conv_dim*4)
self.t_conv3 = nn.ConvTranspose2d(conv_dim*4, conv_dim*2, kernel_size=4, stride=2, padding=1, bias=False)
self.batch_norm3 = nn.BatchNorm2d(conv_dim*2)
self.t_conv4 = nn.ConvTranspose2d(conv_dim*2, 3, kernel_size=4, stride=2, padding=1, bias=False)
self.fc = nn.Linear(z_size, conv_dim*4)
print('z_size', z_size)
def forward(self, x):
batch_s = x.shape[0]
x = self.fc(x)
x = x.view(batch_s, self.conv_dim, 2, 2)
x = F.relu(self.batch_norm1(self.t_conv1(x)))
x = F.relu(self.batch_norm2(self.t_conv2(x)))
x = F.relu(self.batch_norm3(self.t_conv3(x)))
x = self.t_conv4(x)
x = F.tanh(x)
return x | StarcoderdataPython |
109062 | # Generated by Django 3.0.6 on 2020-05-23 10:49
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('photos', '0002_auto_20200523_1317'),
]
operations = [
migrations.AlterModelOptions(
name='image',
options={},
),
migrations.RenameField(
model_name='image',
old_name='category',
new_name='image_category',
),
]
| StarcoderdataPython |
1774946 | <reponame>yarenty/mindsdb
import gunicorn.app.base
class StandaloneApplication(gunicorn.app.base.BaseApplication):
def __init__(self, app, options=None):
self.options = options or {}
self.application = app
super().__init__()
def load_config(self):
config = {key: value for key, value in self.options.items()
if key in self.cfg.settings and value is not None}
for key, value in config.items():
self.cfg.set(key.lower(), value)
def load(self):
return self.application
| StarcoderdataPython |
1669836 | from random import randint
vitorias = 0
print("Vamos jogar um jogo!")
while True:
while True:
jogador = int(input("Escolha um número: ").strip())
if jogador in range(0, 11):
break
computador = randint(0, 10)
while True:
escolha = str(input("Você quer par (P) ou ímpar (I)? ").strip().upper()[0])
if escolha in 'PI':
break
resultado = jogador + computador
if resultado % 2 == 0:
if escolha == 'P':
print(f"Parabéns, você venceu! Escolhes-te {jogador} e eu, {computador}, o que deu {resultado}.")
vitorias += 1
else:
print(f"Desculpe, você perdeu. Escolhes-te {jogador} e eu, {computador}, o que deu {resultado}.")
print(f"Você venceu {vitorias} partida(s).")
break
else:
if escolha == 'I':
print(f"Parabéns, você venceu! Escolhes-te {jogador} e eu, {computador}, o que deu {resultado}.")
vitorias += 1
else:
print(f"Desculpe, você perdeu. Escolhes-te {jogador} e eu, {computador}, o que deu {resultado}.")
print(f"Você venceu {vitorias} partida(s).")
break
| StarcoderdataPython |
183480 | """Script that finds faces and blurs using FaceDetection and blurring APIs."""
import argparse
import cv2
import numpy as np
import torch
import kornia as K
from kornia.contrib import FaceDetector, FaceDetectorResult, FaceKeypoint
def draw_keypoint(img: np.ndarray, det: FaceDetectorResult, kpt_type: FaceKeypoint) -> np.ndarray:
kpt = det.get_keypoint(kpt_type).int().tolist()
return cv2.circle(img, kpt, 2, (255, 0, 0), 2)
def scale_image(img: np.ndarray, size: int) -> np.ndarray:
h, w = img.shape[:2]
scale = 1. * size / w
return cv2.resize(img, (int(w * scale), int(h * scale)))
def apply_blur_face(img: torch.Tensor, img_vis: np.ndarray, det: FaceDetectorResult):
# crop the face
x1, y1 = det.xmin.int(), det.ymin.int()
x2, y2 = det.xmax.int(), det.ymax.int()
roi = img[..., y1:y2, x1:x2]
# apply blurring and put back to the visualisation image
roi = K.filters.gaussian_blur2d(roi, (21, 21), (35., 35.))
roi = K.color.rgb_to_bgr(roi)
img_vis[y1:y2, x1:x2] = K.tensor_to_image(roi)
def my_app(args):
# select the device
device = torch.device('cpu')
if args.cuda and torch.cuda.is_available():
device = torch.device('cuda:0')
# load the image and scale
img_raw = cv2.imread(args.image_file, cv2.IMREAD_COLOR)
img_raw = scale_image(img_raw, args.image_size)
# preprocess
img = K.image_to_tensor(img_raw, keepdim=False).to(device)
img = K.color.bgr_to_rgb(img.float())
# create the detector and find the faces !
face_detection = FaceDetector().to(device)
with torch.no_grad():
dets = face_detection(img)
dets = [FaceDetectorResult(o) for o in dets]
# show image
img_vis = img_raw.copy()
for b in dets:
if b.score < args.vis_threshold:
continue
# draw face bounding box
img_vis = cv2.rectangle(
img_vis, b.top_left.int().tolist(), b.bottom_right.int().tolist(), (0, 255, 0), 4)
if args.blur_faces:
apply_blur_face(img, img_vis, b)
if args.vis_keypoints:
# draw facial keypoints
img_vis = draw_keypoint(img_vis, b, FaceKeypoint.EYE_LEFT)
img_vis = draw_keypoint(img_vis, b, FaceKeypoint.EYE_RIGHT)
img_vis = draw_keypoint(img_vis, b, FaceKeypoint.NOSE)
img_vis = draw_keypoint(img_vis, b, FaceKeypoint.MOUTH_LEFT)
img_vis = draw_keypoint(img_vis, b, FaceKeypoint.MOUTH_RIGHT)
# draw the text score
cx = int(b.xmin)
cy = int(b.ymin + 12)
img_vis = cv2.putText(
img_vis, f"{b.score:.2f}", (cx, cy), cv2.FONT_HERSHEY_DUPLEX, 0.5, (255, 255, 255))
# save and show image
cv2.imwrite(args.image_out, img_vis)
cv2.namedWindow('face_detection', cv2.WINDOW_NORMAL)
cv2.imshow('face_detection', img_vis)
cv2.waitKey(0)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Face and Landmark Detection')
parser.add_argument('--image_file', required=True, type=str, help='the image file to be detected.')
parser.add_argument('--image_out', required=True, type=str, help='the file path to write the output.')
parser.add_argument('--image_size', default=320, type=int, help='the image size to process.')
parser.add_argument('--vis_threshold', default=0.8, type=float, help='visualization_threshold')
parser.add_argument('--vis_keypoints', dest='vis_keypoints', action='store_true')
parser.add_argument('--cuda', dest='cuda', action='store_true')
parser.add_argument('--blur_faces', dest='blur_faces', action='store_true')
args = parser.parse_args()
my_app(args)
| StarcoderdataPython |
1645272 | <reponame>jarret/prototype
#!/usr/bin/env python3
# Copyright (c) 2020 <NAME>
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php
import os
import sys
import time
import json
import argparse
import logging
from configparser import ConfigParser
from twisted.internet import reactor
from lnd_grpc import Client
from moneysocket.lightning.lnd import Lnd
from terminus.app import Terminus
DEFAULT_LND_DIR = os.path.join(os.path.expanduser("~"), ".lnd")
print("lnd dir: %s" % DEFAULT_LND_DIR)
DEFAULT_WALLET_CONFIG = os.path.join(DEFAULT_LND_DIR,
"./moneysocket-terminus.conf")
CONFIG_FILE_HELP = """ Configuration settings to app run instance with. """
parser = argparse.ArgumentParser(prog="terminus-lnd-app.py")
parser.add_argument('-c', '--config', type=str,
default=DEFAULT_WALLET_CONFIG,
help=CONFIG_FILE_HELP)
settings = parser.parse_args()
settings.config = os.path.abspath(settings.config)
if not os.path.exists(settings.config):
sys.exit("*** can't use config: %s" % settings.config)
config = ConfigParser()
config.read(settings.config)
logging.basicConfig(level=logging.DEBUG)
lnd_dir = config['LND']['LndDir']
macaroon_path = config['LND']['MacaroonPath']
tls_cert_path = config['LND']['TlsCertPath']
network = config['LND']['Network']
grpc_host = config['LND']['GrpcHost']
grpc_port = int(config['LND']['GrpcPort'])
c = Client(lnd_dir, macaroon_path, tls_cert_path, network, grpc_host, grpc_port)
print(c.get_info())
lnd = Lnd(c)
app = Terminus(config, lnd)
app.run_app()
reactor.run()
| StarcoderdataPython |
1626150 | from utils import *
from utils import DatasetFolderV12 as DatasetFolder
import numpy as np
from fastprogress import master_bar,progress_bar
import time
import h5py
import os
import argparse
def write_data(data, filename):
f = h5py.File(filename, 'w', libver='latest')
dset = f.create_dataset('array', shape=(data.shape), data = data, compression='gzip', compression_opts=9)
f.close()
def getArgs():
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input_dir', type=str, default='./processed/')
parser.add_argument('-o', '--output_dir', type=str, default='./')
parser.add_argument('-m', '--model_dir', type=str, default='./')
parser.add_argument('-c', '--city', type=str, default='Berlin')
#parser.add_argument('--device', type=int, default=0)
parser.add_argument('--step', type=int, default=12)
parser.add_argument('--version', type=str, default='v17')
#parser.add_argument('-nl','--no_leak',action='store_false')
#parser.add_argument('--activation',type=str,default='relu')
args = parser.parse_args()
return args
#IN_PATH = '/data/data20180901/processed/'
#OUT_PATH = './'
#CITY = 'Berlin'
#DEVICE = 'cuda:0'
#STEP = 3
#VERSION = '0'
args = getArgs()
IN_PATH = args.input_dir
OUT_PATH = args.output_dir
MODEL_PATH = args.model_dir
CITY = args.city
#DEVICE = f'cuda:{args.device}'
STEP = args.step
VERSION = args.version
#IS_LEAK = args.no_leak
#LEAK_STEP = 18 if IS_LEAK else 0
#ACTIVATION = args.activation
VERSION_MAP={
'Moscow':{0:'v13',1:VERSION,2:VERSION},
'Berlin':{0:'v13',1:VERSION,2:VERSION},
'Istanbul':{0:'v13',1:VERSION,2:VERSION},
}
if __name__=='__main__':
index = getPredictIndex(CITY)
#index = [i+j for i in index for j in range(3)]
print(index)
folder = DatasetFolder(IN_PATH,CITY,'test',index,STEP,0,is_transform=False,predict_length=1,skip=0)
for DATE in folder.meta:
d_arr=[]
#CHANNEL = 0
for CHANNEL in [0,1,2]:
arr = []
for ids in index:
arr.append(np.load(f'{OUT_PATH}/result/numpy/{VERSION_MAP[CITY][CHANNEL]}/{CITY}/{DATE}/{CHANNEL}/{ids}.npy')[None,:])
arr = np.concatenate(arr)
#print(arr.shape)
d_arr.append(arr)
"""
for CHANNEL in [2]:
arr = []
for ids in index:
t_arr=[]
for i in range(3):
t_arr.append(np.load(f'{OUT_PATH}/result/numpy/{VERSION_MAP[CITY][CHANNEL]}/{CITY}/{DATE}/{CHANNEL}/{ids+i}.npy')[None,:])
t_arr = np.concatenate(t_arr,1)
arr.append(t_arr)
arr = np.concatenate(arr)
#print(arr.shape)
d_arr.append(arr)
"""
d_arr = np.concatenate(d_arr,-1)
#print(d_arr.shape)
try:
os.makedirs(f'{OUT_PATH}/result/output/{VERSION}/{CITY}/{CITY}_test/')
except:
pass
filename = f'{OUT_PATH}/result/output/{VERSION}/{CITY}/{CITY}_test/{DATE}_100m_bins.h5'
write_data(d_arr,filename)
| StarcoderdataPython |
1692177 | # Generated by Django 2.2.6 on 2019-10-10 10:05
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('project_core', '0028_person_physical_person_person_position_small_changes'),
]
operations = [
migrations.AlterField(
model_name='physicalperson',
name='gender',
field=models.ForeignKey(blank=True, help_text='Gender with which the person identifies', null=True, on_delete=django.db.models.deletion.PROTECT, to='project_core.Gender'),
),
]
| StarcoderdataPython |
4802487 | <filename>tests/bgp_commands_input/bgp_network_test_vector.py
bgp_v4_network = \
"""
BGP table version is 6405, local router ID is 10.1.0.32, vrf id 0
Default local pref 100, local AS 65100
Status codes: s suppressed, d damped, h history, * valid, > best, = multipath,
i internal, r RIB-failure, S Stale, R Removed
Nexthop codes: @NNN nexthop's vrf id, < announce-nh-self
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
*= 0.0.0.0/0 10.0.0.63 0 64600 65534 6666 6667 i
*= 10.0.0.61 0 64600 65534 6666 6667 i
*= 10.0.0.59 0 64600 65534 6666 6667 i
*> 10.0.0.57 0 64600 65534 6666 6667 i
*> 10.1.0.32/32 0.0.0.0 0 32768 i
*> 172.16.58.3/32 10.0.0.57 0 64600 i
*> 192.168.3.11/32 10.0.0.59 0 64600 i
*> 172.16.58.3/32 10.0.0.61 0 64600 i
*> 192.168.3.11/32 10.0.0.63 0 64600 i
*> 192.168.0.0/21 0.0.0.0 0 32768 i
*= 192.168.8.0/25 10.0.0.63 0 64600 65501 i
*= 10.0.0.61 0 64600 65501 i
*= 10.0.0.59 0 64600 65501 i
*> 10.0.0.57 0 64600 65501 i
*= 192.168.8.128/25 10.0.0.63 0 64600 65501 i
*= 10.0.0.61 0 64600 65501 i
*= 10.0.0.59 0 64600 65501 i
*> 10.0.0.57 0 64600 65501 i
*= 192.168.16.0/25 10.0.0.63 0 64600 65502 i
*= 10.0.0.61 0 64600 65502 i
*= 10.0.0.59 0 64600 65502 i
*> 10.0.0.57 0 64600 65502 i
*= 192.168.16.128/25
10.0.0.63 0 64600 65502 i
*= 10.0.0.61 0 64600 65502 i
*= 10.0.0.59 0 64600 65502 i
*> 10.0.0.57 0 64600 65502 i
*= 192.168.24.0/25 10.0.0.63 0 64600 65503 i
*= 10.0.0.61 0 64600 65503 i
*= 10.0.0.59 0 64600 65503 i
*> 10.0.0.57 0 64600 65503 i
*= 192.168.24.128/25
10.0.0.63 0 64600 65503 i
*= 10.0.0.61 0 64600 65503 i
*= 10.0.0.59 0 64600 65503 i
*> 10.0.0.57 0 64600 65503 i
*= 192.168.32.0/25 10.0.0.63 0 64600 65504 i
*= 10.0.0.61 0 64600 65504 i
*= 10.0.0.59 0 64600 65504 i
*> 10.0.0.57 0 64600 65504 i
"""
bgp_v4_network_ip_address = \
"""
BGP routing table entry for 192.168.127.12/25
Paths: (4 available, best #4, table default)
Advertised to non peer-group peers:
10.0.0.57 10.0.0.59 10.0.0.61 10.0.0.63
64600 65534 64799 65515
10.0.0.61 from 10.0.0.61 (172.16.58.3)
Origin IGP, valid, external, multipath
Community: 5060:12345
Last update: Tue Apr 20 05:54:41 2021
64600 65534 64799 65515
10.0.0.59 from 10.0.0.59 (192.168.3.11)
Origin IGP, valid, external, multipath
Community: 5060:12345
Last update: Tue Apr 20 05:54:19 2021
64600 65534 64799 65515
10.0.0.63 from 10.0.0.63 (192.168.3.11)
Origin IGP, valid, external, multipath
Community: 5060:12345
Last update: Tue Apr 20 05:54:16 2021
64600 65534 64799 65515
10.0.0.57 from 10.0.0.57 (172.16.58.3)
Origin IGP, valid, external, multipath, best (Router ID)
Community: 5060:12345
Last update: Tue Apr 20 05:54:16 2021
"""
bgp_v4_network_longer_prefixes_error = \
"""The parameter option: "longer-prefixes" only available if passing a network prefix
EX: 'show ip bgp network 10.0.0.0/24 longer-prefixes'
Aborted!
"""
bgp_v4_network_bestpath = \
"""
BGP routing table entry for 192.168.127.12/25
Paths: (4 available, best #4, table default)
Advertised to non peer-group peers:
10.0.0.57 10.0.0.59 10.0.0.61 10.0.0.63
64600 65534 64799 65515
10.0.0.57 from 10.0.0.57 (172.16.58.3)
Origin IGP, valid, external, multipath, best (Router ID)
Community: 5060:12345
Last update: Tue Apr 20 05:54:15 2021
"""
bgp_v6_network = \
"""
BGP table version is 6407, local router ID is 10.1.0.32, vrf id 0
Default local pref 100, local AS 65100
Status codes: s suppressed, d damped, h history, * valid, > best, = multipath,
i internal, r RIB-failure, S Stale, R Removed
Nexthop codes: @NNN nexthop's vrf id, < announce-nh-self
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
*= ::/0 fc00::7e 0 64600 65534 6666 6667 i
*= fc00::7a 0 64600 65534 6666 6667 i
*= fc00::76 0 64600 65534 6666 6667 i
*> fc00::72 0 64600 65534 6666 6667 i
*> 2064:100::1d/128 fc00::72 0 64600 i
*> 2064:100::1e/128 fcfc00:db20:35b:7399::5 0 64600 i
*> 206fc00:db20:35b:7399::5/128 fcfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b 0 64600 i
*> 2064:100::20/128 fc00::7e 0 64600 i
*= 20c0:a808::/64 fcfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b 0 64600 65501 i
*= fc00::7a 0 64600 65501 i
*= fc00::76 0 64600 65501 i
*> fc00::72 0 64600 65501 i
*= 20c0:a808:0:80::/64
fc00::7e 0 64600 65501 i
*= fc00::7a 0 64600 65501 i
*= fc00::76 0 64600 65501 i
*> fc00::72 0 64600 65501 i
*= 20c0:a810::/64 fcfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b 0 64600 65502 i
*= fc00::7a 0 64600 65502 i
*= fc00::76 0 64600 65502 i
*> fc00::72 0 64600 65502 i
*= 20c0:a810:0:80::/64
fcfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b 0 64600 65502 i
*= fc00::7a 0 64600 65502 i
*= fc00::76 0 64600 65502 i
*> fc00::72 0 64600 65502 i
*= 20c0:a818::/64 fc00::7e 0 64600 65503 i
*= fc00::7a 0 64600 65503 i
*= fc00::76 0 64600 65503 i
*> fc00::72 0 64600 65503 i
*= 20c0:a818:0:80::/64
fc00::7e 0 64600 65503 i
*= fc00::7a 0 64600 65503 i
*= fc00::76 0 64600 65503 i
*> fc00::72 0 64600 65503 i
*= 20c0:a820::/64 fc00::7e 0 64600 65504 i
*= fc00::7a 0 64600 65504 i
*= fc00::76 0 64600 65504 i
*> fc00::72 0 64600 65504 i
*= 20c0:a820:0:80::/64
fc00::7e 0 64600 65504 i
*= fc00::7a 0 64600 65504 i
*= fc00::76 0 64600 65504 i
*> fc00::72 0 64600 65504 i
"""
bgp_v6_network_ip_address = \
"""
BGP routing table entry for 20c0:a820:0:80::/64
Paths: (4 available, best #4, table default)
Advertised to non peer-group peers:
fc00::72 fcfc00:db20:35b:7399::5 fcfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b fcfd00:c2b6:b24b:be67:2827:688d:e6a1:6a3b
64600 65504
fc00::7e from fc00::7e (192.168.3.11)
(fe80::1850:e9ff:fef9:27cb) (prefer-global)
Origin IGP, valid, external, multipath
Community: 5060:12345
Last update: Tue Apr 20 05:54:17 2021
64600 65504
fc00::7a from fc00::7a (172.16.58.3)
(fe80::1810:25ff:fe01:c153) (prefer-global)
Origin IGP, valid, external, multipath
Community: 5060:12345
Last update: Tue Apr 20 05:54:17 2021
64600 65504
fc00::76 from fc00::76 (192.168.3.11)
(fe80::80a7:74ff:fee1:d66d) (prefer-global)
Origin IGP, valid, external, multipath
Community: 5060:12345
Last update: Tue Apr 20 05:54:17 2021
64600 65504
fc00::72 from fc00::72 (172.16.58.3)
(fe80::90ec:bcff:fe4b:1e3e) (prefer-global)
Origin IGP, valid, external, multipath, best (Router ID)
Community: 5060:12345
Last update: Tue Apr 20 05:54:16 2021
"""
bgp_v6_network_longer_prefixes_error = \
"""The parameter option: "longer-prefixes" only available if passing a network prefix
EX: 'show ipv6 bgp network fc00:1::/64 longer-prefixes'
Aborted!
"""
bgp_v6_network_longer_prefixes = \
"""
BGP table version is 6407, local router ID is 10.1.0.32, vrf id 0
Default local pref 100, local AS 65100
Status codes: s suppressed, d damped, h history, * valid, > best, = multipath,
i internal, r RIB-failure, S Stale, R Removed
Nexthop codes: @NNN nexthop's vrf id, < announce-nh-self
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
*= 20c0:a820:0:80::/64
fc00::7e 0 64600 65504 i
*= fc00::7a 0 64600 65504 i
*= fc00::76 0 64600 65504 i
*> fc00::72 0 64600 65504 i
Displayed 1 routes and 25602 total paths
"""
bgp_v6_network_bestpath = \
"""
BGP routing table entry for 20c0:a820:0:80::/64
Paths: (4 available, best #4, table default)
Advertised to non peer-group peers:
fc00::72 fcfc00:db20:35b:7399::5 fcfd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b fc00::7e
64600 65504
fc00::72 from fc00::72 (172.16.58.3)
(fe80::90ec:bcff:fe4b:1e3e) (prefer-global)
Origin IGP, valid, external, multipath, best (Router ID)
Community: 5060:12345
Last update: Tue Apr 20 05:54:15 2021
"""
multi_asic_bgp_network_err = \
"""Error: -n/--namespace option required. provide namespace from list ['asic0', 'asic1']"""
bgp_v4_network_asic0 = \
"""
BGP table version is 11256, local router ID is 10.1.0.32, vrf id 0
Default local pref 100, local AS 65100
Status codes: s suppressed, d damped, h history, * valid, > best, = multipath,
i internal, r RIB-failure, S Stale, R Removed
Nexthop codes: @NNN nexthop's vrf id, < announce-nh-self
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
* i0.0.0.0/0 10.1.0.2 100 0 65200 6666 6667 i
* i 10.1.0.0 100 0 65200 6666 6667 i
*= 10.0.0.5 0 65200 6666 6667 i
*> 10.0.0.1 0 65200 6666 6667 i
* i8.0.0.0/32 10.1.0.2 0 100 0 i
* i 10.1.0.0 0 100 0 i
* 0.0.0.0 0 32768 ?
*> 0.0.0.0 0 32768 i
*=i8.0.0.1/32 10.1.0.2 0 100 0 i
*>i 10.1.0.0 0 100 0 i
*=i8.0.0.2/32 10.1.0.2 0 100 0 i
*>i 10.1.0.0 0 100 0 i
*=i8.0.0.3/32 10.1.0.2 0 100 0 i
*>i 10.1.0.0 0 100 0 i
*>i8.0.0.4/32 10.1.0.0 0 100 0 i
*>i8.0.0.5/32 10.1.0.2 0 100 0 i
* i10.0.0.0/31 10.1.0.2 0 100 0 ?
* i 10.1.0.0 0 100 0 ?
*> 0.0.0.0 0 32768 ?
* i10.0.0.4/31 10.1.0.2 0 100 0 ?
* i 10.1.0.0 0 100 0 ?
*> 0.0.0.0 0 32768 ?
*=i10.0.0.8/31 10.1.0.2 0 100 0 ?
*>i 10.1.0.0 0 100 0 ?
*=i10.0.0.12/31 10.1.0.2 0 100 0 ?
*>i 10.1.0.0 0 100 0 ?
*=i10.0.0.32/31 10.1.0.2 0 100 0 ?
*>i 10.1.0.0 0 100 0 ?
*=i10.0.0.34/31 10.1.0.2 0 100 0 ?
*>i 10.1.0.0 0 100 0 ?
*=i10.0.0.36/31 10.1.0.2 0 100 0 ?
*>i 10.1.0.0 0 100 0 ?
*=i10.0.0.38/31 10.1.0.2 0 100 0 ?
*>i 10.1.0.0 0 100 0 ?
*=i10.0.0.40/31 10.1.0.2 0 100 0 ?
*>i 10.1.0.0 0 100 0 ?
*=i10.0.0.42/31 10.1.0.2 0 100 0 ?
*>i 10.1.0.0 0 100 0 ?
*=i10.0.0.44/31 10.1.0.2 0 100 0 ?
*>i 10.1.0.0 0 100 0 ?
"""
bgp_v4_network_ip_address_asic0 = \
"""
BGP routing table entry for 10.0.0.44/31
Paths: (2 available, best #2, table default, not advertised outside local AS)
Not advertised to any peer
Local
10.1.0.2 from 10.1.0.2 (8.0.0.5)
Origin incomplete, metric 0, localpref 100, valid, internal, multipath
Community: local-AS
Originator: 192.168.127.12, Cluster list: 192.168.127.12
Last update: Thu Apr 22 02:13:31 2021
Local
10.1.0.0 from 10.1.0.0 (8.0.0.4)
Origin incomplete, metric 0, localpref 100, valid, internal, multipath, best (Router ID)
Community: local-AS
Originator: 8.0.0.4, Cluster list: 8.0.0.4
Last update: Thu Apr 22 02:13:31 2021
"""
bgp_v4_network_bestpath_asic0 = \
"""
BGP routing table entry for 10.0.0.44/31
Paths: (2 available, best #2, table default, not advertised outside local AS)
Not advertised to any peer
Local
10.1.0.0 from 10.1.0.0 (8.0.0.4)
Origin incomplete, metric 0, localpref 100, valid, internal, multipath, best (Router ID)
Community: local-AS
Originator: 8.0.0.4, Cluster list: 8.0.0.4
Last update: Thu Apr 22 02:13:30 2021
"""
bgp_v6_network_asic0 = \
"""
BGP table version is 12849, local router ID is 10.1.0.32, vrf id 0
Default local pref 100, local AS 65100
Status codes: s suppressed, d damped, h history, * valid, > best, = multipath,
i internal, r RIB-failure, S Stale, R Removed
Nexthop codes: @NNN nexthop's vrf id, < announce-nh-self
Origin codes: i - IGP, e - EGP, ? - incomplete
Network Next Hop Metric LocPrf Weight Path
* i::/0 fdf8:f53e:61e4::18
100 0 65200 6666 6667 i
* i fc00:db20:35b:7399::5
100 0 65200 6666 6667 i
*= fc00::6 0 65200 6666 6667 i
*> fc00::2 0 65200 6666 6667 i
* i2064:100::1/128 2603:fc00:e968:6179::de52:7100
100 0 65200 i
* i fc00:db20:35b:7399::5
100 0 65200 i
*> fc00::2 0 65200 i
* i2064:100::3/128 fdf8:f53e:61e4::18
100 0 65200 i
* i fc00:db20:35b:7399::5
100 0 65200 i
*> fc00::6 0 65200 i
*=i2064:100::5/128 fc00:db20:35b:7399::5
100 0 65200 i
*>i fdf8:f53e:61e4::18
100 0 65200 i
*>i2064:100::7/128 fdf8:f53e:61e4::18
100 0 65200 i
*=i fc00:db20:35b:7399::5
100 0 65200 i
*>i20c0:a800::/64 fdf8:f53e:61e4::18
100 0 64004 i
*=i fc00:db20:35b:7399::5
100 0 64004 i
*>i20c0:a800:0:80::/64
fdf8:f53e:61e4::18
100 0 64004 i
*=i fc00:db20:35b:7399::5
100 0 64004 i
*>i20c0:a808::/64 fdf8:f53e:61e4::18
100 0 64004 i
*=i fc00:db20:35b:7399::5
100 0 64004 i
"""
bgp_v6_network_ip_address_asic0 = \
"""
BGP routing table entry for 20c0:a808:0:80::/64
Paths: (2 available, best #1, table default)
Advertised to non peer-group peers:
fc00::2 fc00::6
64004
fdf8:f53e:61e4::18 from fdf8:f53e:61e4::18 (8.0.0.4)
Origin IGP, localpref 100, valid, internal, multipath, best (Router ID)
Community: 8075:8823
Originator: 8.0.0.4, Cluster list: 8.0.0.4
Last update: Thu Apr 22 02:13:31 2021
64004
fc00:db20:35b:7399::5 from fc00:db20:35b:7399::5 (8.0.0.5)
Origin IGP, localpref 100, valid, internal, multipath
Community: 8075:8823
Originator: 8.0.0.5, Cluster list: 8.0.0.5
Last update: Thu Apr 22 02:13:31 2021
"""
bgp_v6_network_ip_address_asic0_bestpath = \
"""
BGP routing table entry for 20c0:a808:0:80::/64
Paths: (2 available, best #1, table default)
Advertised to non peer-group peers:
fc00::2 fc00::6
64004
fdf8:f53e:61e4::18 from fdf8:f53e:61e4::18 (8.0.0.4)
Origin IGP, localpref 100, valid, internal, multipath, best (Router ID)
Community: 8075:8823
Originator: 8.0.0.4, Cluster list: 8.0.0.4
Last update: Thu Apr 22 02:13:30 2021
"""
def mock_show_bgp_network_single_asic(request):
param = request.param
if param == 'bgp_v4_network':
return bgp_v4_network
elif param == 'bgp_v4_network_ip_address':
return bgp_v4_network_ip_address
elif param == 'bgp_v4_network_bestpath':
return bgp_v4_network_bestpath
elif param == 'bgp_v6_network':
return bgp_v6_network
elif param == 'bgp_v6_network_ip_address':
return bgp_v6_network_ip_address
elif param == 'bgp_v6_network_longer_prefixes':
return bgp_v6_network_longer_prefixes
elif param == 'bgp_v6_network_bestpath':
return bgp_v6_network_bestpath
else:
return ""
def mock_show_bgp_network_multi_asic(param):
if param == "bgp_v4_network_asic0":
return bgp_v4_network_asic0
elif param == 'bgp_v4_network_ip_address_asic0':
return bgp_v4_network_ip_address_asic0
elif param == 'bgp_v4_network_bestpath_asic0':
return bgp_v4_network_bestpath_asic0
if param == "bgp_v6_network_asic0":
return bgp_v4_network_asic0
elif param == 'bgp_v6_network_ip_address_asic0':
return bgp_v6_network_ip_address_asic0
elif param == 'bgp_v6_network_bestpath_asic0':
return bgp_v6_network_ip_address_asic0_bestpath
else:
return ''
testData = {
'bgp_v4_network': {
'args': [],
'rc': 0,
'rc_output': bgp_v4_network
},
'bgp_v4_network_ip_address': {
'args': [' 192.168.127.12/25'],
'rc': 0,
'rc_output': bgp_v4_network_ip_address
},
'bgp_v4_network_bestpath': {
'args': [' 192.168.127.12/25', 'bestpath'],
'rc': 0,
'rc_output': bgp_v4_network_bestpath
},
'bgp_v4_network_longer_prefixes_error': {
'args': [' 192.168.127.12', 'longer-prefixes'],
'rc': 1,
'rc_output': bgp_v4_network_longer_prefixes_error
},
'bgp_v6_network': {
'args': [],
'rc': 0,
'rc_output': bgp_v6_network
},
'bgp_v6_network_ip_address': {
'args': [' 20c0:a820:0:80::/64'],
'rc': 0,
'rc_output': bgp_v6_network_ip_address
},
'bgp_v6_network_bestpath': {
'args': [' 20c0:a820:0:80::/64', 'bestpath'],
'rc': 0,
'rc_output': bgp_v6_network_bestpath
},
'bgp_v6_network_longer_prefixes_error': {
'args': [' 20c0:a820:0:80::', 'longer-prefixes'],
'rc': 1,
'rc_output': bgp_v6_network_longer_prefixes_error
},
'bgp_v6_network_longer_prefixes': {
'args': [' 20c0:a820:0:80::/64', 'longer-prefixes'],
'rc': 0,
'rc_output': bgp_v6_network_longer_prefixes
},
'bgp_v4_network_multi_asic': {
'args': [],
'rc': 2,
'rc_err_msg': multi_asic_bgp_network_err
},
'bgp_v4_network_asic0': {
'args': ['-nasic0'],
'rc': 0,
'rc_output': bgp_v4_network_asic0
},
'bgp_v4_network_ip_address_asic0': {
'args': ['-nasic0', '10.0.0.44'],
'rc': 0,
'rc_output': bgp_v4_network_ip_address_asic0
},
'bgp_v4_network_bestpath_asic0': {
'args': ['-nasic0', '10.0.0.44', 'bestpath'],
'rc': 0,
'rc_output': bgp_v4_network_bestpath_asic0
},
'bgp_v6_network_multi_asic': {
'args': [],
'rc': 2,
'rc_err_msg': multi_asic_bgp_network_err
},
'bgp_v6_network_asic0': {
'args': ['-nasic0'],
'rc': 0,
'rc_output': bgp_v4_network_asic0
},
'bgp_v6_network_ip_address_asic0': {
'args': ['-nasic0', '20c0:a808:0:80::/64'],
'rc': 0,
'rc_output': bgp_v6_network_ip_address_asic0
},
'bgp_v6_network_bestpath_asic0': {
'args': ['-nasic0', '20c0:a808:0:80::/64', 'bestpath'],
'rc': 0,
'rc_output': bgp_v6_network_ip_address_asic0_bestpath
}
} | StarcoderdataPython |
1651129 | import networkx as nx
def remove(network):
nodes_isolated = []
for node in nx.nodes_iter(network): # find the ndoes without edges
try:
nx.dijkstra_path_length(network, node, 'newcomer')
except:
nodes_isolated.append(node)
network.remove_nodes_from(nodes_isolated) | StarcoderdataPython |
1608819 | <reponame>parasj/contracode<filename>representjs/pretrain_horovod.py
import os
import random
import time
import fire
import numpy as np
import sentencepiece as spm
import torch
import torch.nn.functional as F
import tqdm
import wandb
from loguru import logger
import torch.distributed as dist
import torch.multiprocessing as mp
from torch.nn.utils.rnn import pad_sequence
import horovod.torch as hvd
from models.code_mlm import CodeMLM, CodeContrastiveMLM
from representjs import RUN_DIR, CSNJS_DIR
from data.precomputed_dataset import PrecomputedDataset
from models.code_moco import CodeMoCo
from utils import accuracy, count_parameters, get_linear_schedule_with_warmup
DEFAULT_CSNJS_TRAIN_FILEPATH = str(CSNJS_DIR / "javascript_dedupe_definitions_nonoverlap_v2_train.jsonl.gz")
DEFAULT_SPM_UNIGRAM_FILEPATH = str(CSNJS_DIR / "csnjs_8k_9995p_unigram_url.model")
def training_step(model, batch, use_cuda=False):
imgs, lengths, _ = batch
if use_cuda:
imgs = imgs.cuda(non_blocking=True)
imgs_k, imgs_q = imgs[:, 0, :], imgs[:, 1, :]
lengths_k, lengths_q = lengths[:, 0], lengths[:, 1]
output, target = model(imgs_q, imgs_k, lengths_k, lengths_q)
loss = F.cross_entropy(output, target)
acc1, acc5 = accuracy(output, target, topk=(1, 5))
logs = {
"pretrain/loss": loss.item(),
"pretrain/acc@1": acc1[0].item(),
"pretrain/acc@5": acc5[0].item(),
"pretrain/queue_ptr": model.queue_ptr.item(),
}
return {"loss": loss, "log": logs}
def mask_mlm(seq, pad_id, mask_id, vocab_start_range, vocab_end_range):
# The training data generator chooses 15% of the token positions at random for prediction.
# If the i-th token is chosen, we replace the i-th token with
# (0) not masked
# (1) the [MASK] token 80% of the time (0.12)
# (2) a random token 10% of the time (0.015)
# (3) the unchanged i-th token 10% of the time (0.015)
#
# https://github.com/codertimo/BERT-pytorch/blob/master/bert_pytorch/dataset/dataset.py#L63
rand_replacements = torch.zeros_like(seq, dtype=torch.long).random_(vocab_start_range, vocab_end_range)
masked_tokens = (torch.rand_like(seq, dtype=torch.float) < 0.15) & (seq != pad_id)
mask_type_prob = torch.rand_like(seq, dtype=torch.float)
mask_token_prob = (mask_type_prob < 0.8) & masked_tokens
random_token_prob = (mask_type_prob < 0.9) & (mask_type_prob >= 0.8) & masked_tokens
identity_token_prob = (mask_type_prob >= 0.9) & masked_tokens
assert torch.sum(masked_tokens) == torch.sum(mask_token_prob | random_token_prob | identity_token_prob)
targets = torch.zeros_like(seq).fill_(pad_id)
targets[masked_tokens] = seq[masked_tokens]
seq[mask_token_prob] = mask_id
seq[random_token_prob] = rand_replacements[random_token_prob]
return seq, targets
def training_step_mlm(sp, model, batch, mask_id: int, pad_id: int, vocab_start_idx: int, vocab_end_idx: int, use_cuda=True):
seq, lengths, _ = batch # B x L
if use_cuda:
seq = seq.cuda()
B, L = seq.shape
seq_masked, targets = mask_mlm(seq, pad_id, mask_id, vocab_start_idx, vocab_end_idx)
# logger.debug(f"Example transform:\t{sp.DecodeIds(seq_masked[0].cpu().numpy().tolist())}")
output = model(seq_masked, lengths) # B x L x Vocab
assert targets.shape == (B, L), f"{targets.shape} versus {B}x{L}"
assert output.shape == (B, L, output.shape[-1]), output.shape
loss = F.cross_entropy(output.flatten(end_dim=1), targets.flatten(), ignore_index=pad_id)
acc1, acc5 = accuracy(output[targets != pad_id], targets[targets != pad_id], topk=(1, 5))
return {
"loss": loss,
"log": {"pretrain/loss": loss.item(), "pretrain/acc@1": acc1[0].item(), "pretrain/acc@5": acc5[0].item()},
}
def training_step_hybrid(sp, model, batch, mask_id, pad_id, vocab_start_idx, vocab_end_idx, use_cuda):
imgs, _lengths, _ = batch
# TODO: implement LSTM for hybrid model and pass lengths to model call
imgs_k, imgs_q = imgs[:, 0, :], imgs[:, 1, :]
imgs_q, mlm_targets = mask_mlm(imgs_q, pad_id, mask_id, vocab_start_idx, vocab_end_idx)
if use_cuda:
imgs_k = imgs_k.cuda(non_blocking=True)
imgs_q = imgs_q.cuda(non_blocking=True)
mlm_targets = mlm_targets.cuda(non_blocking=True)
predicted_masked_tokens, moco_logits, moco_targets = model(imgs_k, imgs_q)
moco_loss = F.cross_entropy(moco_logits, moco_targets)
moco_acc1, moco_acc5 = accuracy(moco_logits, moco_targets, topk=(1, 5))
mlm_loss = F.cross_entropy(predicted_masked_tokens.flatten(end_dim=1), mlm_targets.flatten(), ignore_index=pad_id)
mlm_acc1, mlm_acc5 = accuracy(predicted_masked_tokens[mlm_targets != pad_id], mlm_targets[mlm_targets != pad_id], topk=(1, 5))
loss = 4 * moco_loss + mlm_loss
logs = {
"pretrain/moco/loss": moco_loss.item(),
"pretrain/moco/acc@1": moco_acc1[0].item(),
"pretrain/moco/acc@5": moco_acc5[0].item(),
"pretrain/moco/queue_ptr": model.queue_ptr.item(),
"pretrain/mlm/loss": mlm_loss.item(),
"pretrain/mlm/acc@1": mlm_acc1[0].item(),
"pretrain/mlm/acc@5": mlm_acc5[0].item(),
"pretrain/hybrid_loss": loss,
}
return {"loss": loss, "log": logs}
def pad_collate_contrastive(batch):
B = len(batch)
X1, X2 = zip(*batch)
X = X1 + X2
# Create tensor of sequence lengths, [B] or [2B]
lengths = torch.tensor([len(x) for x in X], dtype=torch.long)
# Create padded tensor for batch, [B, T] or [2B, T]
X = pad_sequence(X, batch_first=True, padding_value=0)
# Reshape X to [B, 2, T]
T = X.size(-1)
X = torch.reshape(X, (2, B, -1))
X = torch.transpose(X, 0, 1)
assert X.shape == (B, 2, T)
lengths = torch.reshape(lengths, (2, B)).transpose(0, 1)
assert lengths.shape == (B, 2)
return X, lengths, None
def pad_collate(batch):
B = len(batch)
X = batch
# Create tensor of sequence lengths, [B] or [2B]
lengths = torch.tensor([len(x) for x in X], dtype=torch.long)
# Create padded tensor for batch, [B, T] or [2B, T]
X = pad_sequence(X, batch_first=True, padding_value=0)
return X, lengths, None
def pretrain(
run_name: str,
#
# Data
train_filepath: str = DEFAULT_CSNJS_TRAIN_FILEPATH,
spm_filepath: str = DEFAULT_SPM_UNIGRAM_FILEPATH,
num_workers=1,
limit_dataset_size=-1,
max_length=1024,
subword_regularization_alpha: float = 0,
program_mode="contrastive",
loss_mode="infonce", # infonce, mlm, or hybrid
min_alternatives=1,
#
# Model
resume_path: str = "",
encoder_type: str = "transformer",
lstm_project_mode: str = "hidden",
n_encoder_layers: int = 6,
d_model: int = 512,
n_head: int = 8,
#
# Optimization
num_epochs: int = 100,
save_every: int = 1,
batch_size: int = 256,
lr: float = 8e-4,
weight_decay: float = 0,
adam_betas=(0.9, 0.98),
warmup_steps: int = 5000,
num_steps: int = 600000,
#
# Horovod
use_adasum: bool = False,
fp16_allreduce: bool = False,
gradient_predivide_factor: float = 1.0,
#
# Computational
use_cuda: bool = True,
seed: int = 0,
):
hvd.init()
logger.info("L:", n_encoder_layers, type(n_encoder_layers))
logger.info("H:", d_model, type(d_model))
logger.info("A:", n_head, type(n_head))
run_name = str(run_name) # support numerical run ids
slurm_job_id = os.environ.get("SLURM_JOB_ID")
slurm_job_hostname = os.environ.get("SLURM_JOB_NODELIST")
config = locals()
logger.info(f"Config = \n{config}")
logger.info("Training configuration: {}".format(config))
logger.info(f"CUDA_VISIBLE_DEVICES = '{os.environ.get('CUDA_VISIBLE_DEVICES')}'")
logger.info(f"CUDA_DEVICE_ORDER = '{os.environ.get('CUDA_DEVICE_ORDER')}'")
assert program_mode in ["contrastive", "identity", "augmentation"]
assert loss_mode == "infonce" or loss_mode == "mlm" or loss_mode == "hybrid"
assert not (program_mode == "contrastive" and loss_mode == "mlm")
assert not (program_mode != "contrastive" and (loss_mode == "hybrid" or loss_mode == "infonce"))
assert not use_cuda or torch.cuda.is_available()
torch.manual_seed(seed)
np.random.seed(seed)
random.seed(seed)
run_dir = RUN_DIR / "{}_{}".format(run_name, int(time.time()))
run_dir.mkdir(exist_ok=True, parents=True)
config["run_dir"] = str(run_dir.resolve())
logger.add(str((run_dir / "train.log").resolve()))
logger.info(f"Saving logs, model checkpoints to {run_dir}")
# Create training dataset and dataloader
assert train_filepath.endswith(".pickle") or train_filepath.endswith(".gz")
# Setup distributed
gpu = hvd.local_rank()
ngpus_per_node = 1
chief_node = gpu == 0
assert gpu is not None
if chief_node:
if config["loss_mode"] == "mlm":
project = "bert-pretrain"
elif config["loss_mode"] == "infonce":
project = "moco-pretrain"
elif config["loss_mode"] == "hybrid":
project = "hybrid"
wandb.init(name=config["run_name"], config=config, job_type="training", project=project, entity="ml4code")
logger.info("Use GPU: {} for training".format(gpu))
torch.cuda.set_device(gpu)
# Horovod: limit # of CPU threads to be used per worker.
torch.set_num_threads(1)
kwargs = {"num_workers": 1, "pin_memory": True}
# When supported, use 'forkserver' to spawn dataloader workers instead of 'fork' to prevent
# issues with Infiniband implementations that are not fork-safe
if (
kwargs.get("num_workers", 0) > 0
and hasattr(mp, "_supports_context")
and mp._supports_context
and "forkserver" in mp.get_all_start_methods()
):
kwargs["multiprocessing_context"] = "forkserver"
sp = spm.SentencePieceProcessor()
sp.Load(config["spm_filepath"])
pad_id = sp.PieceToId("[PAD]")
logger.info("pad_id {}", pad_id)
assert pad_id == 0 # hard coded in pad_collate
mask_id = sp.PieceToId("[MASK]")
# Create model
if config["loss_mode"] == "infonce":
# TODO(ajay): Support n_head argument, check how d_model is being used (why not in encoder config dict?)
model = CodeMoCo(
sp.GetPieceSize(),
pad_id=pad_id,
d_model=config["d_model"],
encoder_config=dict(
encoder_type=config["encoder_type"],
lstm_project_mode=config["lstm_project_mode"],
n_encoder_layers=config["n_encoder_layers"],
),
)
logger.info(f"Created CodeMoCo model with {count_parameters(model)} params")
elif config["loss_mode"] == "mlm":
model = CodeMLM(
sp.GetPieceSize(),
pad_id=pad_id,
encoder_type=config["encoder_type"],
n_encoder_layers=config["n_encoder_layers"],
d_model=config["d_model"],
n_head=config["n_head"],
d_ff=4 * config["d_model"],
)
logger.info(f"Created CodeMLM model with {count_parameters(model)} params")
elif config["loss_mode"] == "hybrid":
model = CodeContrastiveMLM(
sp.GetPieceSize(),
pad_id=pad_id,
n_encoder_layers=config["n_encoder_layers"],
d_model=config["d_model"],
n_head=config["n_head"],
d_ff=4 * config["d_model"],
use_horovod=True,
)
logger.info(f"Created CodeContrastiveMLM model with {count_parameters(model)} params")
else:
raise ValueError(f"Bad loss mode {config['loss_mode']}")
assert config["use_cuda"]
model.cuda()
# When using a single GPU per process and per
# DistributedDataParallel, we need to divide the batch size
# ourselves based on the total number of GPUs we have
# config["batch_size"] = int(config["batch_size"] / ngpus_per_node)
# config["num_workers"] = int((config["num_workers"] + ngpus_per_node - 1) / ngpus_per_node)
# model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[gpu])
# define optimizer
# By default, Adasum doesn't need scaling up learning rate.
lr_scaler = hvd.size() if not config["use_adasum"] else 1
# If using GPU Adasum allreduce, scale learning rate by local_size.
if config["use_adasum"] and hvd.nccl_built():
lr_scaler = hvd.local_size()
# Horovod: scale learning rate by lr_scaler.
optimizer = torch.optim.Adam(
model.parameters(), lr=config["lr"] * lr_scaler, betas=config["adam_betas"], eps=1e-6, weight_decay=config["weight_decay"]
)
sched = get_linear_schedule_with_warmup(optimizer, config["warmup_steps"], config["num_steps"])
# Horovod: broadcast parameters & optimizer state.
hvd.broadcast_parameters(model.state_dict(), root_rank=0)
hvd.broadcast_optimizer_state(optimizer, root_rank=0)
# Horovod: (optional) compression algorithm.
compression = hvd.Compression.fp16 if config["fp16_allreduce"] else hvd.Compression.none
# Horovod: wrap optimizer with DistributedOptimizer.
optimizer = hvd.DistributedOptimizer(
optimizer,
named_parameters=model.named_parameters(),
compression=compression,
op=hvd.Adasum if config["use_adasum"] else hvd.Average,
gradient_predivide_factor=config["gradient_predivide_factor"],
)
# Load checkpoint
if config["resume_path"]:
logger.info(f"Loading parameters from {config['resume_path']}")
# configure map_location properly
map_location = {"cuda:%d" % 0: "cuda:%d" % hvd.rank()}
checkpoint = torch.load(config["resume_path"], map_location=map_location)
model.load_state_dict(checkpoint["model_state_dict"])
optimizer.load_state_dict(checkpoint["optimizer_state_dict"])
start_epoch = checkpoint["epoch"] + 1
start_global_step = checkpoint["global_step"]
else:
start_epoch = 1
start_global_step = 0
# Setup data
train_dataset = PrecomputedDataset(
config["train_filepath"],
min_alternatives=config["min_alternatives"],
program_mode=config["program_mode"],
limit_size=config["limit_dataset_size"],
sp=sp,
subword_regularization_alpha=config["subword_regularization_alpha"],
max_length=config["max_length"],
)
train_sampler = torch.utils.data.distributed.DistributedSampler(train_dataset, num_replicas=hvd.size(), rank=hvd.rank())
train_loader = torch.utils.data.DataLoader(
train_dataset,
batch_size=config["batch_size"],
shuffle=False,
collate_fn=pad_collate_contrastive if config["program_mode"] == "contrastive" else pad_collate,
drop_last=True,
sampler=train_sampler,
**kwargs,
)
# Train
global_step = 0
while global_step < start_global_step:
sched.step()
global_step += 1
for epoch in tqdm.trange(start_epoch, config["num_epochs"] + 1, desc="training", unit="epoch", leave=False):
logger.info(f"Starting epoch {epoch}\n")
train_sampler.set_epoch(epoch)
model.train()
pbar = tqdm.tqdm(train_loader, desc=f"epoch {epoch}")
for batch in pbar:
optimizer.zero_grad()
if config["loss_mode"] == "infonce":
train_metrics = training_step(model, batch, use_cuda=config["use_cuda"])
elif config["loss_mode"] == "mlm":
# replace tokens randomly with tokens from _ (8)
train_metrics = training_step_mlm(
sp, model, batch, pad_id=pad_id, mask_id=mask_id, vocab_start_idx=8, vocab_end_idx=7999, use_cuda=config["use_cuda"]
)
elif config["loss_mode"] == "hybrid":
train_metrics = training_step_hybrid(
sp, model, batch, mask_id=mask_id, pad_id=pad_id, vocab_start_idx=0, vocab_end_idx=7999, use_cuda=config["use_cuda"]
)
else:
raise ValueError("Bad loss type")
loss = train_metrics["loss"]
loss.backward()
optimizer.step()
sched.step()
global_step += 1
pbar.set_description(f"epoch {epoch} gpu {gpu} step {global_step} loss {loss.item():.4f}")
if chief_node:
wandb.log(dict(lr=sched.get_last_lr()[0]))
wandb.log(dict(epoch=epoch, **train_metrics["log"]), step=global_step)
# Save checkpoint
if config["save_every"] and global_step % config["save_every"] == 0:
checkpoint = {
"model_state_dict": model.state_dict(),
"optimizer_state_dict": optimizer.state_dict(),
"epoch": epoch,
"global_step": global_step,
"config": config,
}
model_file = os.path.join(config["run_dir"], f"ckpt_pretrain_ep{epoch:04d}_step{global_step:07d}.pth")
logger.info(f"Saving checkpoint to {model_file}...")
torch.save(checkpoint, model_file)
wandb.save(str(model_file))
logger.info("Done.")
if __name__ == "__main__":
fire.Fire(pretrain)
| StarcoderdataPython |
59587 | import pytest
from os.path import join
from EPPs.common import StepEPP
from tests.test_common import TestCommon, TestEPP, NamedMock
from unittest.mock import Mock, patch, PropertyMock
from scripts.convert_and_dispatch_genotypes import GenotypeConversion, UploadVcfToSamples
class TestGenotypeConversion(TestCommon):
small_reference_fai = join(TestCommon.assets, 'genotype_32_SNPs_genome_600bp.fa.fai')
test_records = {
'id1': {'test_sample': '0/1', 'SNP': ['chr2', '120', 'id1', 'T', 'C', '.', '.', '.', 'GT']},
'id2': {'test_sample': '1/1', 'SNP': ['chr1', '601', 'id2', 'C', 'A', '.', '.', '.', 'GT']},
'id3': {'test_sample': '1/1', 'SNP': ['chr2', '72', 'id3', 'C', 'T', '.', '.', '.', 'GT']},
'id4': {'test_sample': '0/1', 'SNP': ['chr1', '200', 'id4', 'A', 'G', '.', '.', '.', 'GT']},
}
def setUp(self):
self.geno_conversion = GenotypeConversion(
[open(self.genotype_quantstudio)], self.small_reference_fai, flank_length=600
)
def test_generate_vcf(self):
# header_lines = ['##header line1', '##header line2']
# snp_ids = ['id4', 'id2', 'id3', 'id1']
# TODO: make assertions on what header lines, snp IDs, etc. have been written
path = join(self.assets, 'test_generate')
vcf_file = path + '.vcf'
assert self.geno_conversion.generate_vcf('V0001P001C01', new_name=path) == vcf_file
with open(vcf_file) as f:
assert 26 == len([l for l in f.readlines() if not l.startswith('#')])
def test_get_genotype_from_call(self):
genotype = self.geno_conversion.get_genotype_from_call('A', 'T', 'Both', )
assert genotype == '0/1'
genotype = self.geno_conversion.get_genotype_from_call('A', 'T', 'Undefined')
assert genotype == './.'
with pytest.raises(ValueError) as e:
self.geno_conversion.get_genotype_from_call('G', 'T', 'A')
assert str(e) == 'Call G does not match any of the alleles (ref:T, alt:A)'
def test_vcf_header_from_ref_length(self):
expected_vcf_headers = ['##contig=<ID=test1,length=48>', '##contig=<ID=test2,length=656>',
'##contig=<ID=test3,length=35>', '##contig=<ID=test4,length=10>']
reference_length = [('test1', '48'), ('test2', '656'), ('test3', '35'), ('test4', '10')]
observed_vcf_headers = self.geno_conversion.vcf_header_from_ref_length(reference_length)
assert expected_vcf_headers == observed_vcf_headers
def test_order_from_fai(self):
reference_length = [('chr1', '2000'), ('chr2', '2000')]
expected_records = ['id4', 'id2', 'id3', 'id1']
assert self.geno_conversion.order_from_fai(self.test_records, reference_length) == expected_records
def test_parse_genome_fai(self):
refence_length = self.geno_conversion._parse_genome_fai()
expected_ref_length = [
(i, '1201') for i in (
'C___2728408_10', 'C___1563023_10', 'C__15935210_10', 'C__33211212_10', 'C___3227711_10',
'C__30044763_10', 'C__11821218_10', 'C___1670459_10', 'C__29619553_10', 'C___1007630_10',
'C__26546714_10', 'C___7421900_10', 'C__27402849_10', 'C___2953330_10', 'C__16205730_10',
'C___8850710_10', 'C___1801627_20', 'C___7431888_10', 'C___1250735_20', 'C___1902433_10',
'C__31386842_10', 'C__26524789_10', 'C___8924366_10', 'C_____43852_10', 'C__11522992_10',
'C__10076371_10', 'C___7457509_10', 'C___1122315_10', 'C__11710129_10', 'C___1027548_20',
'C___8938211_20', 'C___1083232_10')
]
assert refence_length == expected_ref_length
def test_parse_quantstudio_flex_genotype(self):
assert self.geno_conversion.sample_names == {'V0001P001C01', 'V0001P001A01'}
assert len(self.geno_conversion.all_records) == 26
assert self.geno_conversion.all_records['rs1567612'] == {
'SNP': ['C___7457509_10', '601', 'rs1567612', 'G', 'A', '.', '.', '.', 'GT'],
'V0001P001A01': './.', 'V0001P001C01': '0/1'
}
assert self.geno_conversion.all_records['rs6598531'] == {
'SNP': ['C__11522992_10', '601', 'rs6598531', 'T', 'G', '.', '.', '.', 'GT'],
'V0001P001A01': './.', 'V0001P001C01': './.'
}
def test_find_field(self):
observed_fieldnames = ('__this__', 'that', 'OTHER')
valid_this_fieldnames = ('this', 'THIS', '__this__')
valid_that_fieldnames = ('that', 'THAT', '__that__')
valid_other_fieldnames = ('other', 'OTHER', '__other__')
find = self.geno_conversion._find_field
assert find(valid_this_fieldnames, observed_fieldnames) == '__this__'
assert find(valid_that_fieldnames, observed_fieldnames) == 'that'
assert find(valid_other_fieldnames, observed_fieldnames) == 'OTHER'
class TestUploadVcfToSamples(TestEPP):
def setUp(self):
self.epp = UploadVcfToSamples(self.default_argv + ['--input_genotypes', self.genotype_quantstudio])
self.lims_sample1 = NamedMock(real_name='V0001P001A01', udf={})
self.lims_sample2 = NamedMock(real_name='V0001P001C01', udf={})
fake_all_inputs = Mock(
return_value=[
Mock(samples=[self.lims_sample1]),
Mock(samples=[self.lims_sample2])
]
)
# all output artifacts
self.outputs = {}
def fake_find_output_art(inart):
if inart.samples[0] not in self.outputs:
self.outputs[inart.samples[0]] = Mock(samples=inart.samples, udf={})
return [self.outputs[inart.samples[0]]]
self.patched_process = patch.object(StepEPP, 'process', new_callable=PropertyMock(
return_value=Mock(all_inputs=fake_all_inputs)
))
self.patched_find_output_art = patch.object(UploadVcfToSamples, '_find_output_art',
side_effect=fake_find_output_art)
def test_upload_first_time(self):
patched_log = patch.object(UploadVcfToSamples, 'info')
patched_generate_vcf = patch.object(GenotypeConversion, 'generate_vcf', return_value='uploaded_file')
patched_remove = patch('scripts.convert_and_dispatch_genotypes.remove')
exp_log_msgs = (
('Matching %s samples from file against %s artifacts', 2, 2),
('Matching %s', 'V0001P001A01'),
('Matching %s', 'V0001P001C01'),
('Matched and uploaded %s artifacts against %s genotype results', 2, 2),
('%s artifacts did not match', 0),
('%s genotyping results were not used', 0)
)
with patched_log as p, patched_generate_vcf, patched_remove, self.patched_lims as mlims, self.patched_process,\
self.patched_find_output_art:
mlims.upload_new_file.return_value = Mock(id='file_id')
self.epp._run()
for m in exp_log_msgs:
p.assert_any_call(*m)
mlims.upload_new_file.assert_any_call(self.lims_sample1, 'uploaded_file')
mlims.upload_new_file.assert_called_with(self.lims_sample2, 'uploaded_file')
self.lims_sample1.put.assert_called_once_with()
self.lims_sample2.put.assert_called_once_with()
assert self.lims_sample1.udf == {
'QuantStudio Data Import Completed #': 1,
'Number of Calls (Best Run)': 6,
'Genotyping results file id': 'file_id'
}
assert self.outputs[self.lims_sample1].udf == {'Number of Calls (This Run)': 6}
assert self.lims_sample2.udf == {
'QuantStudio Data Import Completed #': 1,
'Number of Calls (Best Run)': 22,
'Genotyping results file id': 'file_id'
}
assert self.outputs[self.lims_sample2].udf == {'Number of Calls (This Run)': 22}
def test_upload_second_time(self):
patched_log = patch.object(UploadVcfToSamples, 'info')
patched_generate_vcf = patch.object(GenotypeConversion, 'generate_vcf', return_value='uploaded_file')
patched_remove = patch('scripts.convert_and_dispatch_genotypes.remove')
with patched_log, patched_generate_vcf, patched_remove, self.patched_lims as mlims, self.patched_process, \
self.patched_find_output_art:
self.lims_sample1.udf = {
'QuantStudio Data Import Completed #': 1,
'Number of Calls (Best Run)': 12,
'Genotyping results file id': 'old_file_id'
}
self.lims_sample2.udf = {
'QuantStudio Data Import Completed #': 1,
'Number of Calls (Best Run)': 0,
'Genotyping results file id': 'old_file_id'
}
mlims.upload_new_file.return_value = Mock(id='file_id')
self.epp._run()
assert self.lims_sample1.udf == {
'QuantStudio Data Import Completed #': 2,
'Number of Calls (Best Run)': 12,
'Genotyping results file id': 'old_file_id'
}
assert self.outputs[self.lims_sample1].udf == {'Number of Calls (This Run)': 6}
self.lims_sample1.put.assert_called_once()
assert self.lims_sample2.udf == {
'QuantStudio Data Import Completed #': 2,
'Number of Calls (Best Run)': 22,
'Genotyping results file id': 'file_id'
}
assert self.outputs[self.lims_sample2].udf == {'Number of Calls (This Run)': 22}
self.lims_sample2.put.assert_called_once()
| StarcoderdataPython |
1696014 | <reponame>likeanaxon/django-polymorphic
from django.contrib import admin
from pexp.models import *
from polymorphic.admin import (
PolymorphicChildModelAdmin,
PolymorphicChildModelFilter,
PolymorphicParentModelAdmin,
)
class ProjectAdmin(PolymorphicParentModelAdmin):
base_model = Project # Can be set explicitly.
list_filter = (PolymorphicChildModelFilter,)
child_models = (Project, ArtProject, ResearchProject)
class ProjectChildAdmin(PolymorphicChildModelAdmin):
base_model = Project # Can be set explicitly.
# On purpose, only have the shared fields here.
# The fields of the derived model should still be displayed.
base_fieldsets = (("Base fields", {"fields": ("topic",)}),)
admin.site.register(Project, ProjectAdmin)
admin.site.register(ArtProject, ProjectChildAdmin)
admin.site.register(ResearchProject, ProjectChildAdmin)
class UUIDModelAAdmin(PolymorphicParentModelAdmin):
list_filter = (PolymorphicChildModelFilter,)
child_models = (UUIDModelA, UUIDModelB)
class UUIDModelAChildAdmin(PolymorphicChildModelAdmin):
pass
admin.site.register(UUIDModelA, UUIDModelAAdmin)
admin.site.register(UUIDModelB, UUIDModelAChildAdmin)
admin.site.register(UUIDModelC, UUIDModelAChildAdmin)
class ProxyAdmin(PolymorphicParentModelAdmin):
list_filter = (PolymorphicChildModelFilter,)
child_models = (ProxyA, ProxyB)
class ProxyChildAdmin(PolymorphicChildModelAdmin):
pass
admin.site.register(ProxyBase, ProxyAdmin)
admin.site.register(ProxyA, ProxyChildAdmin)
admin.site.register(ProxyB, ProxyChildAdmin)
| StarcoderdataPython |
3303588 | from tensorflow.core.framework.attr_value_pb2 import AttrValue
import pytest
@pytest.fixture(scope='session')
def int_list():
return AttrValue.ListValue(i=[1, 2, 3])
@pytest.fixture(scope='session')
def bool_list():
return AttrValue.ListValue(b=[True, False])
| StarcoderdataPython |
3226673 | <gh_stars>1-10
"""
Plugin for Czech TV (Ceska televize).
Following channels are working:
* CT1 - http://www.ceskatelevize.cz/ct1/zive/
* CT2 - http://www.ceskatelevize.cz/ct2/zive/
* CT24 - http://www.ceskatelevize.cz/ct24/
* CT sport - http://www.ceskatelevize.cz/sport/zive-vysilani/
* CT Decko - http://decko.ceskatelevize.cz/zive/
* CT Art - http://www.ceskatelevize.cz/art/zive/
Additionally, videos from iVysilani archive should work as well.
"""
import re
from livecli.plugin import Plugin
from livecli.plugin.api import http, validate
from livecli.stream import HLSStream
from livecli.exceptions import PluginError
__livecli_docs__ = {
"domains": [
"ceskatelevize.cz",
],
"geo_blocked": [
"CZ",
],
"notes": "",
"live": True,
"vod": True,
"last_update": "2017-02-02",
}
_url_re = re.compile(
r'http(s)?://([^.]*.)?ceskatelevize.cz'
)
_player_re = re.compile(
r'ivysilani/embed/iFramePlayer[^"]+'
)
_hash_re = re.compile(
r'hash:"([0-9a-z]+)"'
)
_playlist_info_re = re.compile(
r'{"type":"([a-z]+)","id":"([0-9]+)"'
)
_playlist_url_schema = validate.Schema({
"url": validate.any(
validate.url(),
"error_region"
)
})
_playlist_schema = validate.Schema({
"playlist": [{
"streamUrls": {
"main": validate.url(),
}
}]
})
def _find_playlist_info(response):
"""
Finds playlist info (type, id) in HTTP response.
:param response: Response object.
:returns: Dictionary with type and id.
"""
values = {}
matches = _playlist_info_re.search(response.text)
if matches:
values['type'] = matches.group(1)
values['id'] = matches.group(2)
return values
def _find_player_url(response):
"""
Finds embedded player url in HTTP response.
:param response: Response object.
:returns: Player url (str).
"""
url = ''
matches = _player_re.search(response.text)
if matches:
tmp_url = matches.group(0).replace('&', '&')
if 'hash' not in tmp_url:
# there's no hash in the URL, try to find it
matches = _hash_re.search(response.text)
if matches:
url = tmp_url + '&hash=' + matches.group(1)
else:
url = tmp_url
return 'http://ceskatelevize.cz/' + url
class Ceskatelevize(Plugin):
@classmethod
def can_handle_url(cls, url):
return _url_re.match(url)
def _get_streams(self):
# fetch requested url and find playlist info
response = http.get(self.url)
info = _find_playlist_info(response)
if not info:
# playlist info not found, let's try to find player url
player_url = _find_player_url(response)
if not player_url:
raise PluginError('Cannot find playlist info or player url!')
# get player url and try to find playlist info in it
response = http.get(player_url)
info = _find_playlist_info(response)
if not info:
raise PluginError('Cannot find playlist info in the player url!')
data = {
'playlist[0][type]': info['type'],
'playlist[0][id]': info['id'],
'requestUrl': '/ivysilani/embed/iFramePlayerCT24.php',
'requestSource': 'iVysilani',
'type': 'html'
}
headers = {
'x-addr': '127.0.0.1',
}
# fetch playlist url
response = http.post(
'http://www.ceskatelevize.cz/ivysilani/ajax/get-client-playlist',
data=data,
headers=headers
)
json_data = http.json(response, schema=_playlist_url_schema)
if json_data['url'] == "error_region":
self.logger.error("This stream is not available in your territory")
return
# fetch playlist
response = http.post(json_data['url'])
json_data = http.json(response, schema=_playlist_schema)
playlist = json_data['playlist'][0]['streamUrls']['main']
return HLSStream.parse_variant_playlist(self.session, playlist)
__plugin__ = Ceskatelevize
| StarcoderdataPython |
156090 | <reponame>jamesbrobb/dj-stripe
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^djstripe/', include('djstripe.urls',
namespace="djstripe", app_name="djstripe")),
url(r'^testapp/', include('tests.apps.testapp.urls')),
url(
r'^testapp_namespaced/',
include('tests.apps.testapp_namespaced.urls',
namespace="testapp_namespaced",
app_name="testapp_namespaced")),
# Represents protected content
url(r'^testapp_content/', include('tests.apps.testapp_content.urls')),
)
| StarcoderdataPython |
3272429 |
import unittest
from mock import patch
from foundations_core_rest_api_components.v1.controllers.projects_controller import ProjectsController
class TestProjectsController(unittest.TestCase):
@patch('foundations_core_rest_api_components.v1.models.project.Project.all')
def test_index_returns_all_projects(self, mock):
mock.return_value = self._make_lazy_result('snowbork drones')
controller = ProjectsController()
expected_result = [{'name': '<NAME>', 'created_at': None, 'owner': None}]
self.assertEqual(expected_result, controller.index().as_json())
@patch('foundations_core_rest_api_components.v1.models.project.Project.all')
def test_index_returns_all_projects_different_projects(self, mock):
mock.return_value = self._make_lazy_result('space2vec')
controller = ProjectsController()
expected_result = [{'name': 'space2vec', 'created_at': None, 'owner': None}]
self.assertEqual(expected_result, controller.index().as_json())
def _make_lazy_result(self, name):
from foundations_core_rest_api_components.lazy_result import LazyResult
from foundations_core_rest_api_components.v1.models.project import Project
def _callback():
return [Project(name=name)]
return LazyResult(_callback)
| StarcoderdataPython |
282 | <reponame>djaodjin/djaodjin-survey
# Copyright (c) 2020, DjaoDjin inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging, re
from collections import OrderedDict
from django.db.models import F
from django.http import Http404
from django.shortcuts import get_object_or_404
from extra_views.contrib.mixins import SearchableListMixin
from rest_framework import generics
from rest_framework.pagination import PageNumberPagination
from rest_framework import response as http
from ..compat import reverse
from ..mixins import MatrixMixin
from ..models import Answer, Matrix, EditableFilter
from ..utils import (get_account_model, get_account_serializer,
get_question_serializer)
from .serializers import EditableFilterSerializer, MatrixSerializer
LOGGER = logging.getLogger(__name__)
class MatrixCreateAPIView(generics.ListCreateAPIView):
"""
Filtered list of ``Question``.
**Examples**:
.. code-block:: http
GET /api/matrix/
Response:
{
"slug": "all",
"title": "All accounts against all questions",
"metric": {
"slug": "all-questions",
"title": "All questions",
"predicates": []
},
"cohorts": [{
"slug": "all-accounts",
"title": "All accounts",
"predicates": []
}]
}
.. code-block:: http
POST /api/matrix/
{
"slug": "all",
"title": "All accounts against all questions",
"metric": {
"slug": "all-questions",
"title": "All questions",
"predicates": []
},
"cohorts": [{
"slug": "all-accounts",
"title": "All accounts",
"predicates": []
}]
}
Response:
201 CREATED
{
"slug": "all",
"title": "All accounts against all questions",
"metric": {
"slug": "all-questions",
"title": "All questions",
"predicates": []
},
"cohorts": [{
"slug": "all-accounts",
"title": "All accounts",
"predicates": []
}]
}
"""
serializer_class = MatrixSerializer
def get_queryset(self):
return Matrix.objects.all()
class MatrixDetailAPIView(MatrixMixin, generics.RetrieveUpdateDestroyAPIView):
"""
A table of scores for cohorts aganist a metric.
**Examples**:
.. code-block:: http
GET /api/matrix/languages
Response:
[{
"slug": "languages",
"title": "All cohorts for all questions"
"scores":{
"portfolio-a": "0.1",
"portfolio-b": "0.5",
}
}]
"""
serializer_class = MatrixSerializer
lookup_field = 'slug'
lookup_url_kwarg = 'path'
question_model = get_question_serializer().Meta.model
def aggregate_scores(self, metric, cohorts, cut=None, accounts=None):
#pylint:disable=unused-argument,too-many-locals
if accounts is None:
accounts = get_account_model().objects.all()
scores = {}
if metric:
assert 'metric' in metric.tags, \
"filter '%s' is not tagged as a metric" % str(metric)
includes, excludes = metric.as_kwargs()
questions = self.question_model.objects.filter(
**includes).exclude(**excludes)
nb_questions = len(questions)
if nb_questions > 0:
for cohort in cohorts:
if isinstance(cohort, EditableFilter):
includes, excludes = cohort.as_kwargs()
qs_accounts = accounts.filter(
**includes).exclude(**excludes)
else:
# If `matrix.cohorts is None`, the `cohorts` argument
# will be a list of single account objects.
qs_accounts = [cohort]
nb_accounts = len(qs_accounts)
if nb_accounts > 0:
nb_correct_answers = Answer.objects.filter(
question__in=questions,
sample__account__in=qs_accounts).filter(
measured=F('question__correct_answer')).count()
score = nb_correct_answers * 100 / (
nb_questions * nb_accounts)
LOGGER.debug("score for '%s' = (%d * 100) "\
"/ (%d * %d) = %f", str(cohort), nb_correct_answers,
nb_questions, nb_accounts, score)
assert score <= 100
scores.update({str(cohort): score})
return {"scores": scores}
@property
def matrix(self):
if not hasattr(self, '_matrix'):
self._matrix = Matrix.objects.filter(
slug=self.kwargs.get(self.matrix_url_kwarg)).first()
return self._matrix
def get_accounts(self):
#pylint:disable=unused-argument,no-self-use
return get_account_model().objects.all()
def get_likely_metric(self, cohort_slug):
"""
Returns a URL to a ``Matrix`` derived from *cohort*.
Many times people will use the same name to either mean a cohort
or a metric and expect the system will magically switch between
both meaning. This is an attempt at magic.
"""
likely_metric = None
look = re.match(r"(\S+)(-\d+)", cohort_slug)
if look:
try:
likely_metric = self.request.build_absolute_uri(
reverse('matrix_chart', args=(
EditableFilter.objects.get(slug=look.group(1)).slug,)))
except EditableFilter.DoesNotExist:
pass
return likely_metric
def get(self, request, *args, **kwargs):
#pylint:disable=unused-argument,too-many-locals
matrix = self.matrix
if matrix:
metric = self.matrix.metric
else:
parts = self.kwargs.get(self.matrix_url_kwarg).split('/')
metric = get_object_or_404(EditableFilter, slug=parts[-1])
matrix = Matrix.objects.filter(slug=parts[0]).first()
if not matrix:
raise Http404()
cohort_serializer = EditableFilterSerializer
cohorts = matrix.cohorts.exclude(tags__contains='aggregate')
public_cohorts = matrix.cohorts.filter(tags__contains='aggregate')
cut = matrix.cut
if not cohorts:
# We don't have any cohorts, let's show individual accounts instead.
if cut:
includes, excludes = cut.as_kwargs()
accounts = self.get_accounts().filter(
**includes).exclude(**excludes)
else:
accounts = self.get_accounts()
cohort_serializer = get_account_serializer()
# Implementation Note: switch cohorts from an queryset
# of `EditableFilter` to a queryset of `Account` ...
cohorts = accounts
result = []
scores = {}
val = {
'slug': metric.slug,
'title': metric.title,
'metric': EditableFilterSerializer().to_representation(metric),
'cut': EditableFilterSerializer().to_representation(cut),
'cohorts': cohort_serializer(many=True).to_representation(cohorts)}
# In some case, a metric and cohort have a connection
# and could have the same name.
for cohort in val['cohorts']:
likely_metric = self.get_likely_metric(cohort['slug'])
if likely_metric:
cohort['likely_metric'] = likely_metric
scores.update(val)
scores.update({"values": self.aggregate_scores(
metric, cohorts, cut, accounts=self.get_accounts())})
result += [scores]
if public_cohorts:
public_scores = {}
public_scores.update(val)
public_scores.update(
{"cohorts": EditableFilterSerializer(
public_cohorts, many=True).data,
"values": self.aggregate_scores(metric, public_cohorts)})
result += [public_scores]
return http.Response(result)
class EditableFilterQuerysetMixin(object):
@staticmethod
def get_queryset():
return EditableFilter.objects.all()
class EditableFilterListAPIView(SearchableListMixin,
EditableFilterQuerysetMixin, generics.ListCreateAPIView):
"""
List fitlers
**Tags**: survey
**Examples**
.. code-block:: http
GET /api/xia/matrix/filters/ HTTP/1.1
responds
.. code-block:: json
{
"count": 2,
previous: null,
next: null,
results: [
{
"slug": "all",
"title": "All",
"tags": "",
"predicates": [
"rank": 1,
"operator": "",
"operand": "",
"field": "",
"selector": ""
],
"likely_metric": ""
},
{
"slug": "none",
"title": "None",
"tags": "",
"predicates": [
"rank": 1,
"operator": "",
"operand": "",
"field": "",
"selector": ""
],
"likely_metric": ""
}
]
}
"""
search_fields = ['tags']
serializer_class = EditableFilterSerializer
def post(self, request, *args, **kwargs):
"""
Create a fitler
**Tags**: survey
**Examples**
.. code-block:: http
POST /api/xia/matrix/filters/ HTTP/1.1
responds
.. code-block:: json
{
"count": 2,
previous: null,
next: null,
results: [
{
"slug": "all",
"title": "All",
"tags": "",
"predicates": [
"rank": 1,
"operator": "",
"operand": "",
"field": "",
"selector": ""
],
"likely_metric": ""
},
{
"slug": "none",
"title": "None",
"tags": "",
"predicates": [
"rank": 1,
"operator": "",
"operand": "",
"field": "",
"selector": ""
],
"likely_metric": ""
}
]
}
"""
#pylint:disable=useless-super-delegation
return super(EditableFilterListAPIView, self).post(
request, *args, **kwargs)
class EditableFilterDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
"""
Retrieve a fitler
**Tags**: survey
**Examples**
.. code-block:: http
GET /api/xia/matrix/filters/all/ HTTP/1.1
responds
.. code-block:: json
{
"slug": "all",
"title": "All",
"tags": "",
"predicates": [
"rank": 1,
"operator": "",
"operand": "",
"field": "",
"selector": ""
],
"likely_metric": ""
}
"""
serializer_class = EditableFilterSerializer
lookup_field = 'slug'
lookup_url_kwarg = 'editable_filter'
def get_queryset(self):
return EditableFilter.objects.all()
def put(self, request, *args, **kwargs):
"""
Updates a fitler
**Tags**: survey
**Examples**
.. code-block:: http
PUT /api/xia/matrix/filters/all/ HTTP/1.1
.. code-block:: json
{
"slug": "all",
"title": "All",
"tags": "",
"predicates": [
"rank": 1,
"operator": "",
"operand": "",
"field": "",
"selector": ""
],
"likely_metric": ""
}
responds
.. code-block:: json
{
"slug": "all",
"title": "All",
"tags": "",
"predicates": [
"rank": 1,
"operator": "",
"operand": "",
"field": "",
"selector": ""
],
"likely_metric": ""
}
"""
#pylint:disable=useless-super-delegation
return super(EditableFilterDetailAPIView, self).put(
request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
"""
Deletes a fitler
**Tags**: survey
**Examples**
.. code-block:: http
DELETE /api/xia/matrix/filters/all/ HTTP/1.1
"""
#pylint:disable=useless-super-delegation
return super(EditableFilterDetailAPIView, self).delete(
request, *args, **kwargs)
class EditableFilterPagination(PageNumberPagination):
def paginate_queryset(self, queryset, request, view=None):
self.editable_filter = view.editable_filter
return super(EditableFilterPagination, self).paginate_queryset(
queryset, request, view=view)
def get_paginated_response(self, data):
return http.Response(OrderedDict([
('editable_filter', EditableFilterSerializer().to_representation(
self.editable_filter)),
('count', self.page.paginator.count),
('next', self.get_next_link()),
('previous', self.get_previous_link()),
('results', data)
]))
class EditableFilterObjectsAPIView(generics.ListAPIView):
"""
List filter objects
**Tags**: survey
**Examples**
.. code-block:: http
GET /api/xia/matrix/filters/ HTTP/1.1
responds
.. code-block:: json
{
"created_at": "2020-01-01T00:00:00Z",
"measured": 12
}
"""
pagination_class = EditableFilterPagination
serializer_class = None # override in subclasses
lookup_field = 'slug'
lookup_url_kwarg = 'editable_filter'
def get_queryset(self):
return self.get_serializer_class().Meta.model.objects.all()
def get(self, request, *args, **kwargs): #pylint: disable=unused-argument
self.editable_filter = generics.get_object_or_404(
EditableFilter.objects.all(),
slug=self.kwargs[self.lookup_url_kwarg])
return super(EditableFilterObjectsAPIView, self).get(
request, *args, **kwargs)
class AccountListAPIView(EditableFilterObjectsAPIView):
"""
Filtered list of ``EditableFilter``.
**Examples**:
.. code-block:: http
GET /api/questions/languages
Response:
{
"slug": "languages",
"title": "All questions related to languages"
"predicates":[{
"operator": "contains",
"operand": "language",
"field": "text",
"selector":"keepmatching"
}]
}
"""
serializer_class = get_account_serializer()
class QuestionListAPIView(EditableFilterObjectsAPIView):
"""
Filtered list of ``Question``.
**Examples**:
.. code-block:: http
GET /api/questions/languages
Response:
{
"slug": "languages",
"title": "All questions related to languages"
"predicates":[{
"operator": "contains",
"operand": "language",
"field": "text",
"selector":"keepmatching"
}]
}
"""
serializer_class = get_question_serializer()
| StarcoderdataPython |
3216457 | <reponame>BerenLuthien/ReAgent<gh_stars>1000+
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
from typing import List
from reagent.core.dataclasses import dataclass, field
from reagent.core.parameters import NormalizationData, param_hash
from reagent.models.base import ModelBase
from reagent.models.dqn import FullyConnectedDQN
from reagent.net_builder.quantile_dqn_net_builder import QRDQNNetBuilder
@dataclass
class Quantile(QRDQNNetBuilder):
__hash__ = param_hash
sizes: List[int] = field(default_factory=lambda: [256, 128])
activations: List[str] = field(default_factory=lambda: ["relu", "relu"])
dropout_ratio: float = 0.0
def __post_init_post_parse__(self):
super().__init__()
assert len(self.sizes) == len(self.activations), (
f"Must have the same numbers of sizes and activations; got: "
f"{self.sizes}, {self.activations}"
)
def build_q_network(
self,
state_normalization_data: NormalizationData,
output_dim: int,
num_atoms: int,
) -> ModelBase:
state_dim = self._get_input_dim(state_normalization_data)
return FullyConnectedDQN(
state_dim=state_dim,
action_dim=output_dim,
sizes=self.sizes,
num_atoms=num_atoms,
activations=self.activations,
dropout_ratio=self.dropout_ratio,
)
| StarcoderdataPython |
3231942 | from enum import Enum
from .follow_trajectory_controller import FollowTrajectoryController
from .manoeuvre_controller import DevelopLaneChangeController
class ActController:
class Mode(Enum):
IDLE = 0
FOLLOW_TRAJECTORY = 1
CHANGE_LANE = 2
def __init__(self):
self.car = None
self.mode = self.Mode.FOLLOW_TRAJECTORY
self.controller = None
def bind(self, car):
self.car = car
self._ensure_mode_controller(self.mode)
def unbind(self, car):
self.car = None
def set_mode(self, mode):
self._ensure_mode_controller(mode)
def _ensure_mode_controller(self, mode):
if mode != self.mode or self.controller is None:
if self.controller is not None:
self.controller.unbind(self.car)
self.controller = None
self.mode = mode
if self.mode == self.Mode.FOLLOW_TRAJECTORY:
self.controller = FollowTrajectoryController()
elif self.mode == self.Mode.CHANGE_LANE:
self.controller = DevelopLaneChangeController(self.car.trajectory)
if self.controller is not None:
self.controller.bind(self.car)
def step(self, t, dt):
if self.controller is not None:
self.controller.step(t, dt)
| StarcoderdataPython |
3342995 | #!/usr/bin/env python
# coding: utf-8
from xumm.resource import XummResource
class XrplTxResource(XummResource):
@classmethod
def get_url(cls, tx_hash: str) -> str:
"""
Gets the GET url of this XrplTxResource
:param tx_hash: A string contain transaction hash.
:type: str
:return: The GET url of this XrplTxResource.
:rtype: str
"""
return super(XrplTxResource, cls).platform_url() + 'xrpl-tx' + '/' + tx_hash # noqa: E501
| StarcoderdataPython |
1785122 | """
Moodstocks API Client
---------------------
- Copyright (C) 2014 by Moodstocks SAS.
- Licensed under MIT/X11
- See https://moodstocks.com/ for more information.
"""
DEFAULT_EP = "http://api.moodstocks.com/v2"
from requests.auth import HTTPDigestAuth
import requests
import json
import os
import base64
version = '0.1'
codes = requests.codes
def b64_encode(s):
"""
Encode input string with base64url safe without padding scheme.
"""
return base64.urlsafe_b64encode(s).strip("=")
def b64_decode(s):
"""
Decode input string with base64url safe without padding scheme.
"""
mod = len(s) % 4
if mod >= 2:
s += (4 - mod) * "="
return base64.urlsafe_b64decode(s)
class APIError(Exception):
"""
An exception raised if the API returns an unexpected response.
"""
def __init__(self, code, body):
self.code = code
self.body = body
def __str__(self):
return "%d - %s" % (self.code, self.body)
class APIClient:
"""
Represents a Moodstocks HTTP API Client.
"""
def __init__(self, api_key, api_secret, ep=None):
"""
Constructor keyword arguments:
:param api_key: a valid Moodstocks API key
:param api_secret: a valid Moodstocks API secret
.. note::
You must first create a developer account on
`Moodstocks <https://moodstocks.com/>`_ to obtain a valid API key /
secret pair.
"""
self.auth = HTTPDigestAuth(api_key, api_secret)
self.ep = ep or DEFAULT_EP
def _request(self, method, resource, files=None, params=None, **kwargs):
"""
Internal method for HTTP requests.
"""
url = self.ep + resource
r = requests.request(
method,
url,
params=params,
files=files,
auth=self.auth
)
if r.status_code != codes.ok:
raise APIError(r.status_code, r.text)
return r.json()
def add_image(self, image_id, filename=None, image_url=None):
"""
Index a reference image on your API key to make it searchable.
:param image_id: reference image unique identifier.
:param filename: full path to the image file
:param image_url: remote image URL
:return: a dict, e.g `{'id': 'my_id', 'is_update': False}`
.. note::
This operation makes your image available **only** through
server-side search - see :func:`search_image`. To make it available
on the client-side local image database - thanks to the
`Moodstocks SDK <https://moodstocks.com/docs/>`_ - you must use
:func:`make_image_offline`.
"""
files = None
if filename:
with open(filename, 'rb') as f:
files = {'image_file': ('ref.jpg', f.read())}
params = None
if image_url:
params = {'image_url': image_url}
return self._request(
'PUT',
'/ref/' + image_id,
files=files,
params=params
)
def remove_image(self, image_id):
"""
Remove a reference image from your API key.
:param image_id: reference image unique identifier
:return: a dict, e.g `{'existed': False, 'id': 'my_id'}`
"""
return self._request('DELETE', '/ref/' + image_id)
def make_image_offline(self, image_id):
"""
Flag a reference image as *offline*.
Use this to make a reference image synchronizable and searchable
on-device through the local image database thanks to the
`Moodstocks SDK <https://moodstocks.com/docs/>`_.
:param image_id: reference image unique identifier
:return: a dict, e.g `{'was_offline': False, 'id': 'my_id'}`
"""
return self._request('POST', '/ref/%s/offline' % image_id)
def remove_image_offline(self, image_id):
"""
Unflag an offline reference image.
This does not completely remove the reference image, i.e it will remain
searchable only through a server-side search.
:param image_id: reference image unique identifier
:return: a dict, e.g `{'was_offline': True, 'id': 'my_id'}`
"""
return self._request('DELETE', '/ref/%s/offline' % image_id)
def image_info(self, image_id):
"""
Show the status of a given reference image.
This method raises a :class:`APIError` if the corresponding reference
image does not exist.
:param image_id: reference image unique identifier
:return: a dict, e.g `{'is_offline': True, 'id': 'my_id'}`
"""
return self._request('GET', '/ref/%s' % image_id)
def list_images(self, offline=False):
"""
Get the global number of reference images available, as well as the
list of their IDs.
:param offline: whether to consider offline images only or not (default)
:return: a dict, e.g `{'count': 3, 'ids': ['my_id', 'foo', 'bar']}`
"""
if offline:
return self._request('GET', '/stats/offline/refs')
else:
return self._request('GET', '/stats/refs')
def search_image(self, filename=None, image_url=None):
"""
Looking up an image using a server-side search (a.k.a online image
recognition).
:param filename: local image file's full path
:param image_url: image's url
:return: a dict, e.g `{'found': True, 'id': 'my_id'}` or `{'found': False}`
"""
files = None
if filename:
with open(filename, 'rb') as f:
files = {'image_file': ('qry.jpg', f.read())}
params = None
if image_url:
params = {'image_url': image_url}
return self._request(
'POST',
'/search',
files=files,
params=params
)
def echo(self, params=None):
"""
Perform an echo request with optional parameters.
:param params: optional query string parameters
:return: a dict, e.g `{'http_verb': 'GET', 'results': {}}`
"""
return self._request('GET', '/echo', params=params)
| StarcoderdataPython |
1659516 | # -*- coding: utf-8 -*-
"""
Description: Reads the "metadata.json" file and downloads the subtitle for each title, given a language of preference.
"""
import hashlib
import json
import os
import requests
class SubtitleFinder:
def __init__(
self,
directory=None,
metadata_filename="metadata.json",
language="en",
verbose=False,
):
self._directory = directory
self._metadata_filename = metadata_filename
self._language = language
self._verbose = verbose
self._action_counter = 0
if self._verbose:
print("[CURRENT ACTION: LOCATING MOVIE SUBTITLES]\n")
def _is_movie_file(self, filename):
"""
:param filename: The filename to assess.
:return bool: Whether the given filename is a movie file or not.
This method returns True if the given filename is a movie file and False if not.
"""
if self._verbose:
print(f'[{self._action_counter}] [PROCESSING FILE] "{filename}"\n')
self._action_counter += 1
movie_file_extensions = [".avi", ".mp4", ".mkv", ".mov"]
filename, extension = os.path.splitext(filename)
if extension in movie_file_extensions:
if self._verbose:
print(f'[INFO] "{filename}" [IS A] [MOVIE FILE]\n')
return True
else:
if self._verbose:
print(f'[INFO] "{filename}" [IS NOT A] [MOVIE FILE]\n')
return False
def _get_movie_file_paths(self, directory):
"""
:param str directory: A directory containing movie files.
:return list: A list of movie file paths.
This method takes a directory that contains files and returns all files that are movie files.
"""
movie_file_paths = []
if os.path.exists(directory):
for filename in os.listdir(directory):
if self._is_movie_file(filename=filename):
movie_file_path = os.path.join(directory, filename)
movie_file_paths.append(movie_file_path)
return movie_file_paths
def _get_hash(self, filepath, size=64):
"""
:param str filepath: The path to the file to be hashed.
:param int size: The size (in KB) of the chunks to hash.
:return str: The `md5` hash of the end chunks from the file at the given filepath.
This hash function receives the name of the file and returns the `md5` hash of the beginning
and end `size` KB sized chunks.
i.e. If `size=64`, we will take a 64KB chunk from the beginning and end of the file and return
the `md5` hash of those chunks.
"""
if self._verbose:
print(
f'[{self._action_counter}] [PROCESSING FILE] [HASHING] [FILEPATH] "{filepath}"\n'
)
self._action_counter += 1
readsize = size * 1024
with open(filepath, "rb") as f:
data = f.read(readsize)
f.seek(-readsize, os.SEEK_END)
data += f.read(readsize)
file_hash = hashlib.md5(data).hexdigest()
if self._verbose:
if self._verbose:
print(f'[INFO] "{filepath}": [HASH] "{file_hash}"\n')
return file_hash
def _download(self, url="http://api.thesubdb.com/", payload=None, headers=None):
"""
:param str url: The SubDb API URL.
:param dict headers: A dictionary containing custom headers. Default only contains the `User-Agent`.
:return requests.Response: A `requests.Response` object containing the file being requested.
This method performs a GET request on the given URL, using the given payload and headers (if desired).
"""
if headers is None:
headers = {
"User-Agent": "SubDB/1.0 (Windows; U; Windows NT 5.1; it; rv:1.8.1.11) Gecko/20071127 Firefox/2.0.0.11; https://github.com/blairg23/movie-file-fixer"
}
if self._verbose:
print(f'[{self._action_counter}] [DOWNLOADING] [FILE] from [URL] "{url}"\n')
self._action_counter += 1
return requests.get(url=url, params=payload, headers=headers)
def _search_subtitles(self, hashcode=None):
"""
:param str hashcode: The `md5` hash of the file to use to search for available subtitles.
:return str: A comma-separated list of available languages (two character language code).
This method searches the SubDB API for the given subtitles by `hashcode` and returns all available languages
the subtitle exists in.
"""
if self._verbose:
print(
f'[{self._action_counter}] [SEARCHING] [SUBTITLE] for [HASHCODE] "{hashcode}"\n'
)
self._action_counter += 1
payload = {"action": "search", "hash": hashcode}
response = self._download(payload=payload)
return response
def _download_subtitles(self, language="en", hashcode=None):
"""
:param str language: The language of the subtitle to download (as a two character language code, i.e., 'en' for English).
:param str hashcode: The `md5` hash of the file to download the subtitle for.
:return file: An `.srt` file containing the subtitle for the given file, named `<hashcode>.srt`.
This method downloads subtitles from the SubDB API, given the specified `hashcode` and `language`.
"""
if self._verbose:
print(
f'[{self._action_counter}] [DOWNLOADING] [SUBTITLE] for [HASHCODE] "{hashcode}"\n'
)
self._action_counter += 1
payload = {"action": "download", "hash": hashcode, "language": language}
response = self._download(payload=payload)
return response
def get_subtitles(self, directory=None, metadata_filename=None, language="en"):
"""
:param str directory: The movie file directory to download subtitles for.
:param str metadata_filename: The metadata filename.
:param str language: The two character language code representing the language to download the subtitle in.
:return None:
"""
if directory is None:
directory = self._directory
if metadata_filename is None:
metadata_filename = self._metadata_filename
full_filepath = os.path.join(directory, metadata_filename)
if os.path.exists(full_filepath):
if self._verbose:
print(f'[{self._action_counter}] [PROCESSING FILE] "{full_filepath}"\n')
self._action_counter += 1
# Open file for reading:
with open(full_filepath, mode="rb") as infile:
# Load existing data into titles index list:
titles = json.load(infile)
for title in titles.get("titles", []):
title_filename = title.get("title")
title_folder_path = os.path.join(directory, title_filename)
subtitle_filename = f"{language}_subtitles.srt"
subtitle_path = os.path.join(title_folder_path, subtitle_filename)
movie_file_paths = self._get_movie_file_paths(
directory=title_folder_path
)
for movie_file_path in movie_file_paths:
if self._verbose:
print(f'[PROCESSING TITLE] "{title_filename}"\n')
if not os.path.exists(subtitle_path):
subtitles_available = None
hashcode = self._get_hash(filepath=movie_file_path)
response = self._search_subtitles(hashcode=hashcode)
if response.status_code == 200:
subtitles_available = response.text
if (
subtitles_available not in ["", None, " "]
and language in subtitles_available
):
if self._verbose:
print(
f'[ADDING SUBTITLE FILE] "{language}_subtitles.srt" at [FILEPATH] "{subtitle_path}"\n'
)
response = self._download_subtitles(
language=language, hashcode=hashcode
)
if response.status_code == 200:
subtitles = response.text
if self._verbose:
print("[INFO] [DOWNLOAD COMPLETE]\n")
print(
f'[WRITING SUBTITLE FILE] "{language}_subtitles.srt" at [FILEPATH] "{subtitle_path}"\n'
)
with open(
subtitle_path, "w+", encoding="UTF-8"
) as outfile:
outfile.writelines(subtitles)
if self._verbose:
print("[WRITE COMPLETE]")
else:
print(
f'[ERROR] [RESPONSE STATUS CODE] "{response.status_code}".\n'
f'[SUBTITLE] for [MOVIE FILE] "{movie_file_path}" [MAY NOT EXIST]\n'
)
else:
if self._verbose:
print(
f'[ERROR] No Subtitles Available for [LANGUAGE] "{language}".\n'
)
else:
print("[INFO] Subtitle already exists. Skipping...\n")
print("[COMPLETE]")
| StarcoderdataPython |
109276 | import pickle
import operator
import numpy as np
import csv
import os.path
with open ('y_test', 'rb') as f:
y_test=pickle.load(f)
dicvocab={}
f=open("data/vocab.csv")
vocab=csv.reader(f)
for word in vocab:
if word[0]!='':
dicvocab[int(word[0])-1]=word[1]
f.close()
label_size=y_test.shape[1]
topics=["/Artificial_Intelligence/Machine_Learning/Case-Based/", "/Artificial_Intelligence/Machine_Learning/Genetic_Algorithms/", "/Artificial_Intelligence/Machine_Learning/Neural_Networks/", "/Artificial_Intelligence/Machine_Learning/Probabilistic_Methods/", "/Artificial_Intelligence/Machine_Learning/Reinforcement_Learning/", "/Artificial_Intelligence/Machine_Learning/Rule_Learning/", "/Artificial_Intelligence/Machine_Learning/Theory/"]
maxlabel=np.argmax(y_test, axis=1)
for ind in range(label_size+1):
st='dictionary' + str(ind)
if not os.path.isfile(st):
continue
if ind<label_size: print("Class ",ind, "enabled")
else: print("All Classes enabled")
with open(st, 'rb') as f:
dic= pickle.load(f)
for i in range(label_size):
dic2={}
print("Top 5 Highest Relevance Features for Class ", topics[i], "->", end='')
for x in dic[i]:
if x not in dic2: dic2[x]=0
dic2[x]+=1
k=sorted(dic2.items(), key=lambda kv: (kv[1], kv[0]), reverse=True)
for z in k[:15]:
print((dicvocab[z[0]],z[1]),end=',')
print()
print()
print() | StarcoderdataPython |
138499 | <reponame>rohit04saluja/genielibs
# Python
import time
import logging
# Unicon
from unicon import Connection
from unicon.eal.dialogs import Dialog, Statement
from unicon.core.errors import (
SubCommandFailure,
StateMachineError,
TimeoutError,
ConnectionError,
)
# Logger
log = logging.getLogger(__name__)
def write_erase_reload_device_without_reconfig(
device,
via_console,
reload_timeout,
username=None,
password=<PASSWORD>,
reload_creds=None,
reload_hostname='Router',
):
"""Execute 'write erase' on device and reload without reconfiguring.
Args:
device(`obj`): Device object
via_console(`str`): Via to use to reach the device console.
reload_timeout(`int`): Maximum time to wait for reload to complete
reload_creds(`str or list`): Creds to apply if reloading device asks
"""
# Set 'write erase' dialog
wr_dialog = Dialog(
[
Statement(
pattern=r'.*Do you wish to proceed anyway\? \(y/n\)\s*\[n\]',
action="sendline(y)",
loop_continue=True,
continue_timer=False)
]
)
# Execute 'write erase' command
log.info("\n\nExecuting 'write erase' on device '{}'".format(device.name))
try:
device.execute("write erase", reply=wr_dialog)
except Exception as e:
raise Exception(
"Error while executing 'write erase' on device '{}' : {}".format(
device.name, e
)
) from e
else:
log.info(
"Successfully executed 'write erase' command on device '{}'".format(
device.name
)
)
# Collect device base information before reload
os = device.os
hostname = device.name
username, password = device.api.get_username_password(
device = device,
username = username,
password = password,
creds = reload_creds)
ip = str(device.connections[via_console]["ip"])
port = str(device.connections[via_console]["port"])
# Execute 'reload' command
log.info("\n\nExecuting 'reload' on device '{}'".format(device.name))
try:
device.reload(
prompt_recovery=True, reload_creds=reload_creds,
timeout = reload_timeout)
device.disconnect()
except SubCommandFailure:
# Disconnect and destroy the connection
log.info(
"Sucessfully executed 'reload' command on device {}".format(
device.name
)
)
log.info(
"Disconnecting and destroying handle to device {}".format(
device.name
)
)
device.destroy()
except Exception as e:
raise Exception(
"Error while reloading device '{}'".format(device.name)
) from e
# Wait until reload has completed and device can be reachable
log.info(
"\n\nWaiting '{}' seconds for device to reboot after reload...".format(
reload_timeout
)
)
time.sleep(reload_timeout)
# Reconnect to device
log.info(
"\n\nReconnecting to device '{}' after reload...".format(hostname)
)
new_device = Connection(
credentials=dict(default=dict(username=username, password=password)),
os=os,
hostname=reload_hostname,
start=["telnet {ip} {port}".format(ip=ip, port=port)],
prompt_recovery=True,
)
try:
new_device.connect()
except (ConnectionError, TimeoutError) as e:
# Connection or Timeout Error but 'no' has been sent
# simply destroy handle at this point
new_device.disconnect()
log.info(
"Reconnected to device '{}' after 'write erase' and reload'".format(
hostname
)
)
except Exception as e:
raise Exception(
"Error reconnecting to device '{}' after 'write erase'"
" and reload".format(hostname)
) from e
else:
new_device.disconnect()
log.info(
"Successully reconnected to device '{}' after 'write erase' "
"and reload'".format(hostname)
)
| StarcoderdataPython |
1618193 | # RUN: %PYTHON %s | iree-dialects-opt -split-input-file | FileCheck --enable-var-scope --dump-input-filter=all %s
from typing import List
from iree.compiler.dialects.iree_pydm.importer import *
from iree.compiler.dialects.iree_pydm.importer.test_util import *
from iree.compiler.dialects import iree_pydm as d
from iree.compiler import ir
################################################################################
# Pyfunc intrinsics
################################################################################
@def_pyfunc_intrinsic(symbol="__return_one")
def intrinsic_return_one() -> int:
return 1
@def_pyfunc_intrinsic(symbol="__return_first_true")
def intrinsic_return_first_true(a: int, b: int) -> int:
return a or b
# CHECK-LABEL: @test_intrinsic_function_no_args
# CHECK: dynamic_call @__return_one() : () -> (!iree_pydm.exception_result, !iree_pydm.object)
# CHECK: func @__return_one()
@test_import_global
def test_intrinsic_function_no_args():
value = intrinsic_return_one()
return value
# CHECK-LABEL: @test_intrinsic_function_double_call
# No need to check anything: verifier will fail if double emitted.
@test_import_global
def test_intrinsic_function_double_call():
value = intrinsic_return_one()
value2 = intrinsic_return_one()
return value
# CHECK-LABEL: @test_intrinsic_function_args
# CHECK: %[[ZERO:.*]] = constant 0 : i64 -> !iree_pydm.integer
# CHECK: %[[ONE:.*]] = constant 1 : i64 -> !iree_pydm.integer
# CHECK: dynamic_call @__return_first_true(%[[ZERO]], %[[ONE]]) : (!iree_pydm.integer, !iree_pydm.integer) -> (!iree_pydm.exception_result, !iree_pydm.object)
# CHECK: func @__return_first_true
@test_import_global
def test_intrinsic_function_args():
value = intrinsic_return_first_true(0, 1)
return value
################################################################################
# IR macro intrinsics
################################################################################
@def_ir_macro_intrinsic
def macro_return_none(stage: ImportStage) -> ir.Value:
return d.NoneOp(d.NoneType.get()).result
# Boxing isn't load bearing here: It is just something we can do/test.
@def_ir_macro_intrinsic
def macro_box_arg(stage: ImportStage, arg: ir.Value) -> ir.Value:
return stage.ic.box(arg)
# CHECK-LABEL: @test_intrinsic_macro_no_args
# CHECK: %[[ONE:.*]] = constant 1
# CHECK: box %[[ONE]] : !iree_pydm.integer -> <!iree_pydm.integer>
@test_import_global
def test_intrinsic_macro_no_args() -> int:
return macro_box_arg(1)
################################################################################
# Test multi func intrinsic.
# There is nothing special about a logical not. It is just something we can
# test.
################################################################################
@def_pyfunc_intrinsic(symbol="__logical_not_bool")
def logical_not_bool(x: bool) -> bool:
return not x
@def_pyfunc_intrinsic(symbol="__logical_not_generic")
def logical_not_generic(x):
return not x
logical_not = def_pattern_call_intrinsic(match_generic=[logical_not_generic],
match_specific=[logical_not_bool])
# CHECK-LABEL: @test_pattern_call
# CHECK: %[[TRUE:.*]] = constant true
# CHECK: pattern_match_call(%[[TRUE]]) : (!iree_pydm.bool) -> (!iree_pydm.exception_result, !iree_pydm.object)
# CHECK-SAME: matching generic [@__logical_not_generic] specific [@__logical_not_bool]
# CHECK-DAG: func @__logical_not_generic
# CHECK-DAG: func @__logical_not_bool
@test_import_global
def test_pattern_call():
return logical_not(True)
| StarcoderdataPython |
1643424 | # User-defined data types can be defined through classes.
class Student:
def __init__(self, name, major, gpa):
self.name = name # Name of the "Student" object is going to be equal to the "name" variable.
self.major = major # Major of the "Student" object is going to be equal to the "major" variable.
self.gpa = gpa # GPA of the "Student" object is going to be equal to the "gpa" variable.
# Functions for classes can be defined inside the classes.
@property
def on_honor_roll(self):
if self.gpa >= 3.5:
return True
else:
return False
| StarcoderdataPython |
1693683 | <reponame>rsdoherty/azure-sdk-for-python
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class NetworkAdapter(Model):
"""Represents the networkAdapter on a device.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar adapter_id: Instance ID of network adapter.
:vartype adapter_id: str
:ivar adapter_position: Hardware position of network adapter.
:vartype adapter_position:
~azure.mgmt.edgegateway.models.NetworkAdapterPosition
:ivar index: Logical index of the adapter.
:vartype index: int
:ivar node_id: Node ID of the network adapter.
:vartype node_id: str
:ivar network_adapter_name: Network adapter name.
:vartype network_adapter_name: str
:ivar label: Hardware label for the adapter.
:vartype label: str
:ivar mac_address: MAC address.
:vartype mac_address: str
:ivar link_speed: Link speed.
:vartype link_speed: long
:ivar status: Value indicating whether this adapter is valid. Possible
values include: 'Inactive', 'Active'
:vartype status: str or
~azure.mgmt.edgegateway.models.NetworkAdapterStatus
:param rdma_status: Value indicating whether this adapter is RDMA capable.
Possible values include: 'Incapable', 'Capable'
:type rdma_status: str or
~azure.mgmt.edgegateway.models.NetworkAdapterRDMAStatus
:param dhcp_status: Value indicating whether this adapter has DHCP
enabled. Possible values include: 'Disabled', 'Enabled'
:type dhcp_status: str or
~azure.mgmt.edgegateway.models.NetworkAdapterDHCPStatus
:ivar ipv4_configuration: The IPv4 configuration of the network adapter.
:vartype ipv4_configuration: ~azure.mgmt.edgegateway.models.Ipv4Config
:ivar ipv6_configuration: The IPv6 configuration of the network adapter.
:vartype ipv6_configuration: ~azure.mgmt.edgegateway.models.Ipv6Config
:ivar ipv6_link_local_address: The IPv6 local address.
:vartype ipv6_link_local_address: str
:ivar dns_servers: The list of DNS Servers of the device.
:vartype dns_servers: list[str]
"""
_validation = {
'adapter_id': {'readonly': True},
'adapter_position': {'readonly': True},
'index': {'readonly': True},
'node_id': {'readonly': True},
'network_adapter_name': {'readonly': True},
'label': {'readonly': True},
'mac_address': {'readonly': True},
'link_speed': {'readonly': True},
'status': {'readonly': True},
'ipv4_configuration': {'readonly': True},
'ipv6_configuration': {'readonly': True},
'ipv6_link_local_address': {'readonly': True},
'dns_servers': {'readonly': True},
}
_attribute_map = {
'adapter_id': {'key': 'adapterId', 'type': 'str'},
'adapter_position': {'key': 'adapterPosition', 'type': 'NetworkAdapterPosition'},
'index': {'key': 'index', 'type': 'int'},
'node_id': {'key': 'nodeId', 'type': 'str'},
'network_adapter_name': {'key': 'networkAdapterName', 'type': 'str'},
'label': {'key': 'label', 'type': 'str'},
'mac_address': {'key': 'macAddress', 'type': 'str'},
'link_speed': {'key': 'linkSpeed', 'type': 'long'},
'status': {'key': 'status', 'type': 'str'},
'rdma_status': {'key': 'rdmaStatus', 'type': 'str'},
'dhcp_status': {'key': 'dhcpStatus', 'type': 'str'},
'ipv4_configuration': {'key': 'ipv4Configuration', 'type': 'Ipv4Config'},
'ipv6_configuration': {'key': 'ipv6Configuration', 'type': 'Ipv6Config'},
'ipv6_link_local_address': {'key': 'ipv6LinkLocalAddress', 'type': 'str'},
'dns_servers': {'key': 'dnsServers', 'type': '[str]'},
}
def __init__(self, *, rdma_status=None, dhcp_status=None, **kwargs) -> None:
super(NetworkAdapter, self).__init__(**kwargs)
self.adapter_id = None
self.adapter_position = None
self.index = None
self.node_id = None
self.network_adapter_name = None
self.label = None
self.mac_address = None
self.link_speed = None
self.status = None
self.rdma_status = rdma_status
self.dhcp_status = dhcp_status
self.ipv4_configuration = None
self.ipv6_configuration = None
self.ipv6_link_local_address = None
self.dns_servers = None
| StarcoderdataPython |
135011 | def calc_posession(df):
df['Wposs'] = df.apply(lambda row: row.WFGA + 0.475 * row.WFTA + row.WTO - row.WOR, axis=1)
df['Lposs'] = df.apply(lambda row: row.LFGA + 0.475 * row.LFTA + row.LTO - row.LOR, axis=1)
| StarcoderdataPython |
3272356 | import unittest
import pyast as ast
class BaseASTTestCase(unittest.TestCase):
def test_basic_template(self):
class Entity(ast.Node):
_debug = True
id = ast.field(str)
value = ast.field(str)
_template = '<%(id)s %(value)s>'
e = Entity('foo', 'val')
self.assertEqual(str(e), '<foo val>')
def test_nested_template(self):
class Value(ast.Node):
content = ast.field(str)
_template = '"%(content)s"'
class Entity(ast.Node):
_debug = True
id = ast.field(str)
value = ast.field(Value)
_template = '<%(id)s %(value)s>'
e = Entity('foo', Value('val'))
self.assertEqual(str(e), '<foo "val">')
def test_nested_template_callable(self):
class Value(ast.Node):
content = ast.field(str)
@property
def _template(self):
return '"%(content)s"'
class Entity(ast.Node):
_debug = True
id = ast.field(str)
value = ast.field(Value)
_template = '<%(id)s %(value)s>'
e = Entity('foo', Value('val'))
self.assertEqual(str(e), '<foo "val">')
def test_nested_repr(self):
class Value(ast.Node):
content = ast.field(str)
_template = '"%(content)s"'
def __repr__(self):
content = self.content.replace('"', '\\"')
return super(Value, self).__repr__(fields={'content': content})
class Entity(ast.Node):
_debug = True
id = ast.field(str)
value = ast.field(Value)
_template = '<%(id)s %(value)s>'
e = Entity('foo', Value('val"val2'))
self.assertEqual(str(e), '<foo "val\\"val2">')
def test_null_value(self):
class Value(ast.Node):
content = ast.field(str)
_template = '"%(content)s"'
def __repr__(self):
content = self.content.replace('"', '\\"')
return super(Value, self).__repr__(fields={'content': content})
class Entity(ast.Node):
_debug = True
id = ast.field(str)
value = ast.field(Value, null=True)
@property
def _template(self):
if self.value:
return '<%(id)s %(value)s>'
return '<%(id)s>'
e = Entity('foo')
self.assertEqual(str(e), '<foo>')
e.value = Value("hey")
self.assertEqual(str(e), '<foo "hey">')
def test_empty_list(self):
class KVP(ast.Node):
key = ast.field(str)
value = ast.field(str)
class Hash(ast.Node):
_debug = True
content = ast.seq(KVP, null=True)
@property
def _template(self):
if len(self.content):
return "{\n%(content)s\n}"
return "{}"
e = Hash()
self.assertEqual(str(e), '{}')
def test_list(self):
class KVP(ast.Node):
key = ast.field(str)
value = ast.field(str)
class Hash(ast.Node):
_debug = True
content = ast.seq(KVP, null=True)
@property
def _template(self):
if len(self.content):
return "{\n%(content)s\n}"
return "{}"
e = Hash([KVP('key1', 'val1')])
self.assertEqual(str(e), '{"key1": "val1"}')
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
1684662 | import jinja2
import os
class SilentUndefined(jinja2.Undefined):
def _fail_with_undefined_error(self, *args, **kwargs):
return None
class Jinja2(object):
def __init__(self, app, **config):
self.app = app
self.root_dir = config.get('root_dir')
self.env = jinja2.Environment(
loader=jinja2.FileSystemLoader(self.root_dir),
undefined=SilentUndefined
)
self.app.add_middleware(self)
def get_template(self, name):
return self.env.get_template(name)
def after(self, ctx):
if '__drongo_template' in ctx:
ctx.response.set_content(
self.get_template(ctx['__drongo_template']).render(ctx))
@classmethod
def template(cls, name):
def _inner1(method):
def _inner2(*args, **kwargs):
ctx = args[-1]
ctx['__drongo_template'] = name
result = method(*args, **kwargs)
return result
return _inner2
return _inner1
| StarcoderdataPython |
3355027 | <filename>clam/config.py
import yaml
# NOTE: This is not a config file
# This is only a helper class for the actual
# config file
class DebugMode:
def __init__(self, mode):
if type(mode) is not int:
raise TypeError("Debug mode must be an int.")
if not 0 <= mode <= 2:
raise ValueError("Debug mode must be between 0 and 2.")
self.mode = mode
def __bool__(self):
return bool(self.mode)
def __int__(self):
return self.mode
def __str__(self):
mode_map = {0: "off", 1: "partial", 2: "full"}
return mode_map[self.mode]
@property
def off(self):
return self.mode == 0
@property
def partial(self):
return self.mode == 1
@property
def full(self):
return self.mode == 2
class Config:
"""config.yml helper class"""
def __init__(self, file_path):
self._file_path = file_path
with open(file_path, "r") as config:
self._data = yaml.safe_load(config)
# Required config stuff
self.bot_token = self._data["bot-token"] # Bot token
self.console = self._data["console"] # Console channel ID
self.google_api_key = self._data["google-api-key"] # Google api key
self.database_uri = self._data["database-uri"] # Postgres database URI
self.cleverbot_api_key = self._data["cleverbot-api-key"] # Cleverbot API key
self.wolfram_api_key = self._data["wolfram-api-key"] # wolframalpha api key
# Optional config stuff
# Run the bot in debug mode or not
# 0: Off | 1: Test acc | 2: Same acc
self.debug = DebugMode(self._data.get("debug", 0))
# Webhook for status messages
self.status_hook = self._data.get("status-hook")
| StarcoderdataPython |
3209607 | <reponame>UWSEDS/homework-2-python-functions-and-modules-czarakas<gh_stars>0
### HW2
### <NAME>
import ReadInData
thisurl = 'https://data.seattle.gov/api/views/65db-xm6k/rows.csv?accessType=DOWNLOAD'
columnNames_true = ['Date','Fremont Bridge East Sidewalk','Fremont Bridge West Sidewalk']
df = ReadInData.create_dataframe(url=thisurl)
def test_one_shot():
assert(ReadInData.test_create_dataframe(df,columnNames_true))
## Same column names in different order
def test_columnNames_diffOrder():
columnNames = ['Fremont Bridge East Sidewalk', 'Date', 'Fremont Bridge West Sidewalk']
assert(ReadInData.test_create_dataframe(df,columnNames))
## Same column names but missing one
def test_columnNames_missingOne():
columnNames = ['Date', 'Fremont Bridge East Sidewalk']
assert(not ReadInData.test_create_dataframe(df,columnNames))
## Same column names but added one
def test_columnNames_addedOne():
columnNames = ['Date', 'Fremont Bridge East Sidewalk', 'Fremont Bridge West Sidewalk', 'Extra Entry']
assert(not ReadInData.test_create_dataframe(df,columnNames))
## Missing one column names, one extra column name
def test_columnNames_missingOne_addedOne():
columnNames = ['Date', 'Fremont Bridge East Sidewalk', 'Extra Entry']
assert(not ReadInData.test_create_dataframe(df,columnNames))
## Only 5 rows
def test_enoughRows():
df_alt = df.iloc[0:5]
assert(not ReadInData.test_create_dataframe(df_alt,columnNames_true))
## First column has one row with an inconsistent type
def test_consistentType_firstColumn():
df_alt = df
df_alt['Date'][5]=7
assert(not ReadInData.test_create_dataframe(df_alt,columnNames_true))
## Last column has one row with an inconsistent type
def test_consistentType_lastColumn():
df_alt = df
df_alt['Fremont Bridge West Sidewalk'][5]='No Data'
assert(not ReadInData.test_create_dataframe(df_alt,columnNames_true)) | StarcoderdataPython |
3235266 | """
Implements a network visualization in PyTorch.
WARNING: you SHOULD NOT use ".to()" or ".cuda()" in each implementation block.
"""
# import os
import torch
# import torchvision
# import torchvision.transforms as T
# import random
# import numpy as np
import matplotlib.pyplot as plt
from PIL import Image
from a4_helper import *
def hello():
"""
This is a sample function that we will try to import and run to ensure that
our environment is correctly set up on Google Colab.
"""
print('Hello from network_visualization.py!')
def compute_saliency_maps(X, y, model):
"""
Compute a class saliency map using the model for images X and labels y.
Input:
- X: Input images; Tensor of shape (N, 3, H, W)
- y: Labels for X; LongTensor of shape (N,)
- model: A pretrained CNN that will be used to compute the saliency map.
Returns:
- saliency: A Tensor of shape (N, H, W) giving the saliency maps for the input
images.
"""
# Make input tensor require gradient
X.requires_grad_()
saliency = None
##############################################################################
# TODO: Implement this function. Perform a forward and backward pass through #
# the model to compute the gradient of the correct class score with respect #
# to each input image. You first want to compute the loss over the correct #
# scores (we'll combine losses across a batch by summing), and then compute #
# the gradients with a backward pass. #
# Hint: X.grad.data stores the gradients #
##############################################################################
# Replace "pass" statement with your code
model.eval()
output = model(X)
target_score = torch.gather(output, 1, y.view(-1,1)).squeeze()
# print(target_score.shape)
loss = torch.sum(target_score)
loss.backward()
saliency = X.grad.data
saliency = torch.max(saliency, dim=1).values
##############################################################################
# END OF YOUR CODE #
##############################################################################
return saliency
def make_adversarial_attack(X, target_y, model, max_iter=100, verbose=True):
"""
Generate an adversarial attack that is close to X, but that the model classifies
as target_y.
Inputs:
- X: Input image; Tensor of shape (1, 3, 224, 224)
- target_y: An integer in the range [0, 1000)
- model: A pretrained CNN
- max_iter: Upper bound on number of iteration to perform
- verbose: If True, it prints the pogress (you can use this flag for debugging)
Returns:
- X_adv: An image that is close to X, but that is classifed as target_y
by the model.
"""
# Initialize our adversarial attack to the input image, and make it require gradient
X_adv = X.clone()
X_adv = X_adv.requires_grad_()
learning_rate = 1
##############################################################################
# TODO: Generate an adversarial attack X_adv that the model will classify #
# as the class target_y. You should perform gradient ascent on the score #
# of the target class, stopping when the model is fooled. #
# When computing an update step, first normalize the gradient: #
# dX = learning_rate * g / ||g||_2 #
# #
# You should write a training loop. #
# #
# HINT: For most examples, you should be able to generate an adversarial #
# attack in fewer than 100 iterations of gradient ascent. #
# You can print your progress over iterations to check your algorithm. #
##############################################################################
# Replace "pass" statement with your code
model.eval()
for i in range(max_iter):
output = model(X_adv)
max_score_index = torch.max(output, dim=1).indices
max_score = output[0][max_score_index]
target_score = output[0][target_y]
loss = target_score
print('Iteration %d: target score %.3f, max score %.3f'%(i, target_score, max_score))
if max_score_index == target_y:
print("Interation has finished in advance")
break
loss.backward()
X_grad = X_adv.grad.data
dX = learning_rate * X_grad / torch.norm(X_grad, 2)
# 这里一定要加data!!!!
X_adv.grad.zero_()
X_adv.data = X_adv.data + dX.data
##############################################################################
# END OF YOUR CODE #
##############################################################################
return X_adv
def class_visualization_step(img, target_y, model, **kwargs):
"""
Performs gradient step update to generate an image that maximizes the
score of target_y under a pretrained model.
Inputs:
- img: random image with jittering as a PyTorch tensor
- target_y: Integer in the range [0, 1000) giving the index of the class
- model: A pretrained CNN that will be used to generate the image
Keyword arguments:
- l2_reg: Strength of L2 regularization on the image
- learning_rate: How big of a step to take
"""
l2_reg = kwargs.pop('l2_reg', 1e-3)
learning_rate = kwargs.pop('learning_rate', 25)
########################################################################
# TODO: Use the model to compute the gradient of the score for the #
# class target_y with respect to the pixels of the image, and make a #
# gradient step on the image using the learning rate. Don't forget the #
# L2 regularization term! #
# Be very careful about the signs of elements in your code. #
# Hint: You have to perform inplace operations on img.data to update #
# the generated image using gradient ascent & reset img.grad to zero #
# after each step. #
########################################################################
# Replace "pass" statement with your code
model.eval()
output = model(img)
loss = output[0][target_y] - l2_reg * torch.norm(img, 2)
loss.backward()
img_grad = img.grad.data
img.data += learning_rate * img_grad / torch.norm(img_grad, 2)
img.grad.zero_()
########################################################################
# END OF YOUR CODE #
########################################################################
return img
| StarcoderdataPython |
99191 | <reponame>sevyharris/autoscience_workflow
# Functions for running a thermo job using this workflow
import pandas as pd
import os
import sys
import glob
import datetime
import time
import subprocess
import job_manager
try:
DFT_DIR = os.environ['DFT_DIR']
except KeyError:
DFT_DIR = '/work/westgroup/harris.se/autoscience/autoscience_workflow/results/dft'
def get_num_species():
"""Function to lookup number of species in the species_list.csv
"""
species_csv = os.path.join(DFT_DIR, '..', '..', 'resources', 'species_list.csv')
species_df = pd.read_csv(species_csv)
return species_df.i.values[-1]
def index2smiles(species_index):
"""Function to return species smiles given a species index
looks up the results in the species_list.csv
"""
species_csv = os.path.join(DFT_DIR, '..', '..', 'resources', 'species_list.csv')
species_df = pd.read_csv(species_csv)
species_smiles = species_df.SMILES.values[species_index]
return species_smiles
def arkane_complete(species_index):
"""Function to check whether the arkane job is complete for a species
Expects to find the following directory structure:
DFT_DIR/thermo/species_XXXX/arkane/RMG_libraries/thermo.py
Returns True if complete, False otherwise
"""
species_dir = os.path.join(DFT_DIR, 'thermo', f'species_{species_index:04}')
arkane_result = os.path.join(species_dir, 'arkane', 'RMG_libraries', 'thermo.py')
return os.path.exists(arkane_result)
def termination_status(log_file):
"""Returns:
0 for Normal termination
1 for Error termination
-1 for no termination
"""
with open(log_file, 'rb') as f:
f.seek(0, os.SEEK_END)
normal_termination = False
error_termination = False
for i in range(0, 5):
try:
f.seek(-2, os.SEEK_CUR)
while f.read(1) != b'\n':
f.seek(-2, os.SEEK_CUR)
except OSError:
f.seek(0)
saved_position = f.tell()
last_line = f.readline().decode()
f.seek(saved_position, os.SEEK_SET)
if 'Normal termination' in last_line:
return 0
elif 'Error termination' in last_line:
return 1
return -1
def get_n_runs(slurm_array_file):
"""Reads the run.sh file to figure out how many conformers or rotors were meant to run
"""
with open(slurm_array_file, 'r') as f:
for line in f:
if 'SBATCH --array=' in line:
token = line.split('-')[-1]
n_runs = 1 + int(token.split('%')[0])
return n_runs
return 0
def incomplete_conformers(species_index):
"""Returns a list of indices of incomplete conformers that need to be rerun
count 'Error termination' as well as 'normal termination'
Does not work on restart.sh, which has ','
"""
conformer_dir = os.path.join(DFT_DIR, 'thermo', f'species_{species_index:04}', 'conformers')
# Get #conformers from the array job script
slurm_array_file = os.path.join(conformer_dir, 'run.sh')
if not os.path.exists(slurm_array_file):
return True # no conformers run yet
n_conformers = get_n_runs(slurm_array_file)
incomplete_cfs = []
for cf_index in range(0, n_conformers):
conformer_file = os.path.join(conformer_dir, f'conformer_{cf_index:04}.log')
if not os.path.exists(conformer_file):
incomplete_cfs.append(cf_index)
continue
status = termination_status(conformer_file)
if status == -1:
incomplete_cfs.append(cf_index)
return incomplete_cfs
def incomplete_rotors(species_index):
"""Returns a list of indices of incomplete rotors that need to be rerun
count 'Error termination' as well as 'normal termination'
Does not work on restart.sh, which has ','
"""
rotor_dir = os.path.join(DFT_DIR, 'thermo', f'species_{species_index:04}', 'rotors')
# Get #rotors from the array job script
slurm_array_file = os.path.join(rotor_dir, 'run.sh')
if not os.path.exists(slurm_array_file):
return True # no rotors run yet
n_rotors = get_n_runs(slurm_array_file)
incomplete_rs = []
for r_index in range(0, n_rotors):
rotor_file = os.path.join(rotor_dir, f'rotor_{r_index:04}.log')
if not os.path.exists(rotor_file):
incomplete_rs.append(r_index)
continue
status = termination_status(rotor_file)
if status == -1:
incomplete_rs.append(r_index)
return incomplete_rs
def conformers_complete(species_index):
"""Function to check whether all of the Gaussian conformer jobs have finished running.
Looks at the run.sh script to find the highest conformer index, then searches each .log file
for Normal termination
"""
if incomplete_conformers(species_index):
return False
return True
def rotors_complete(species_index):
"""Function to check whether all of the Gaussian rotor jobs have finished running.
Looks at the run.sh script to find the highest rotor index, then searches each .log file
for Normal termination
"""
if incomplete_rotors(species_index):
return False
return True
def restart_conformers(species_index):
"""Function to rerun the conformers that didn't converge in time
"""
# create a new slurm job file to run on west partition, 10 at a time, 2 week max
missing_conformers = incomplete_conformers(species_index)
missing_conformers_str = [str(i) for i in missing_conformers]
indices_str = ','.join(missing_conformers_str)
species_dir = os.path.join(DFT_DIR, 'thermo', f'species_{species_index:04}')
conformer_dir = os.path.join(species_dir, 'conformers')
# TODO put restart in the gaussian job file
slurm_run_file = os.path.join(conformer_dir, 'restart.sh')
slurm_settings = {
'--job-name': f'g16_cf_{species_index}',
'--error': 'error.log',
'--nodes': 1,
'--partition': 'west',
'--exclude': 'c5003',
'--mem': '20Gb',
'--time': '14-00:00:00',
'--cpus-per-task': 16,
'--array': f'{indices_str}%10',
}
slurm_file_writer = job_manager.SlurmJobFile(full_path=slurm_run_file)
slurm_file_writer.settings = slurm_settings
slurm_file_writer.content = [
'export GAUSS_SCRDIR=/scratch/harris.se/guassian_scratch\n',
'mkdir -p $GAUSS_SCRDIR\n',
'module load gaussian/g16\n',
'source /shared/centos7/gaussian/g16/bsd/g16.profile\n\n',
'RUN_i=$(printf "%04.0f" $(($SLURM_ARRAY_TASK_ID)))\n',
'fname="conformer_${RUN_i}.com"\n\n',
'g16 $fname\n',
]
slurm_file_writer.write_file()
# copy the file and add a restart? this is so messy, but I'm gonna do it
for cf_idx in missing_conformers:
pass
# TODO see if conditions are right to restart in Gaussian:
# chk file exists
# previous run made it at least one step in the optimization
# restart the conformers
# submit the job
start_dir = os.getcwd()
os.chdir(conformer_dir)
gaussian_conformers_job = job_manager.SlurmJob()
slurm_cmd = f"sbatch {slurm_run_file}"
gaussian_conformers_job.submit(slurm_cmd)
os.chdir(start_dir)
gaussian_conformers_job.wait_all(check_interval=600)
def restart_rotors(species_index):
"""Function to rerun the conformers that didn't converge in time
"""
# create a new slurm job file to run on west partition, 10 at a time, 2 week max
missing_rotors = incomplete_rotors(species_index)
missing_rotors_str = [str(i) for i in missing_rotors]
indices_str = ','.join(missing_rotors_str)
species_dir = os.path.join(DFT_DIR, 'thermo', f'species_{species_index:04}')
rotor_dir = os.path.join(species_dir, 'rotors')
# TODO put restart in the gaussian job file
slurm_run_file = os.path.join(rotor_dir, 'restart.sh')
slurm_settings = {
'--job-name': f'g16_rotor_{species_index}',
'--error': 'error.log',
'--nodes': 1,
'--partition': 'west',
'--exclude': 'c5003',
'--mem': '20Gb',
'--time': '14-00:00:00',
'--cpus-per-task': 16,
'--array': f'{indices_str}%10',
}
slurm_file_writer = job_manager.SlurmJobFile(full_path=slurm_run_file)
slurm_file_writer.settings = slurm_settings
slurm_file_writer.content = [
'export GAUSS_SCRDIR=/scratch/harris.se/guassian_scratch\n',
'mkdir -p $GAUSS_SCRDIR\n',
'module load gaussian/g16\n',
'source /shared/centos7/gaussian/g16/bsd/g16.profile\n\n',
'RUN_i=$(printf "%04.0f" $(($SLURM_ARRAY_TASK_ID)))\n',
'fname="rotor_${RUN_i}.com"\n\n',
'g16 $fname\n',
]
slurm_file_writer.write_file()
# submit the job
start_dir = os.getcwd()
os.chdir(rotor_dir)
gaussian_rotors_job = job_manager.SlurmJob()
slurm_cmd = f"sbatch {slurm_run_file}"
gaussian_rotors_job.submit(slurm_cmd)
os.chdir(start_dir)
gaussian_rotors_job.wait_all(check_interval=600)
def run_conformers_job(species_index):
"""Function to call snakemake rule to run conformers
This function waits until all SLURM jobs are done, so it could take days
"""
species_dir = os.path.join(DFT_DIR, 'thermo', f'species_{species_index:04}')
conformer_dir = os.path.join(species_dir, 'conformers')
os.makedirs(conformer_dir, exist_ok=True)
logfile = os.path.join(conformer_dir, 'conformers.log')
start = time.time()
timestamp = datetime.datetime.now()
with open(logfile, 'a') as f:
f.write(f'Starting conformers job: {timestamp}' + '\n')
# check if the run was already completed
if conformers_complete(species_index):
print('Conformers already ran')
with open(logfile, 'a') as f:
f.write('Conformers already ran\n')
return True
workflow_dir = os.path.join(DFT_DIR, '..', '..', 'workflow')
# start a job that calls snakemake to run conformers
os.chdir(workflow_dir)
conformer_cmd = f'snakemake -c1 species_thermo --config species_index={species_index}'
print(f'Running {conformer_cmd}')
cmd_pieces = conformer_cmd.split()
proc = subprocess.Popen(cmd_pieces)
print(proc)
# RUN HOTBIT
time.sleep(300)
g16_job_number = ''
# look for the hotbit slurm file
hotbit_slurm = glob.glob(os.path.join(species_dir, 'slurm-*'))
if len(hotbit_slurm) == 0:
print('Hotbit slurm file not found. Hotbit did not start.')
exit(3)
hotbit_complete = False
while not hotbit_complete:
with open(hotbit_slurm[0], 'r') as f:
lines = f.readlines()
for line in lines:
if 'Submitted batch job' in line:
hotbit_complete = True
g16_job_number = line.split()[-1]
break
time.sleep(300) # This wait is to make sure the job is on the SLURM queue
print('Hotbit conformer screening complete')
with open(logfile, 'a') as f:
f.write('Hotbit conformer screening complete\n')
# wait 10 minutes for the conformer jobs to finish
gaussian_job = job_manager.SlurmJob()
gaussian_job.job_id = g16_job_number
print(f'Waiting on job {gaussian_job}')
with open(logfile, 'a') as f:
f.write(f'Waiting on job {g16_job_number}' + '\n')
gaussian_job.wait_all(check_interval=600)
# rerun any conformer jobs that failed to converge in time:
if not conformers_complete(species_index):
with open(logfile, 'a') as f:
f.write('Setting up conformer restart job\n')
restart_conformers(species_index) # this waits for jobs to finish
if not conformers_complete(species_index):
with open(logfile, 'a') as f:
f.write('Conformer restart failed\n')
return False
end = time.time()
duration = end - start
print(f'Gaussian conformer jobs completed in {duration} seconds' + '\n')
with open(logfile, 'a') as f:
f.write(f'Gaussian conformer jobs completed in {duration} seconds' + '\n')
return True
def read_gaussian_energy(logfile):
with open(logfile, 'r') as f:
for line in f:
if 'Sum of electronic and zero-point Energies= ' in line:
energy = float(line.split()[-1])
return energy
return 0
def get_lowest_conformer(species_index):
"""Returns the filepath of the lowest energy conformer logfile
"""
conformer_dir = os.path.join(DFT_DIR, 'thermo', f'species_{species_index:04}', 'conformers')
slurm_array_file = os.path.join(conformer_dir, 'run.sh')
if not os.path.exists(slurm_array_file):
return None # no conformers run yet
n_conformers = get_n_runs(slurm_array_file)
lowest_energy = 999999
best_conformer_file = None
for cf_index in range(0, n_conformers):
conformer_file = os.path.join(conformer_dir, f'conformer_{cf_index:04}.log')
status = termination_status(conformer_file)
if status != 0:
continue
energy = read_gaussian_energy(conformer_file)
print(cf_index, energy)
if energy < lowest_energy:
lowest_energy = energy
best_conformer_file = conformer_file
return best_conformer_file
def run_rotors_job(species_index):
# start a job that calls snakemake to run rotors
species_dir = os.path.join(DFT_DIR, 'thermo', f'species_{species_index:04}')
rotor_dir = os.path.join(species_dir, 'rotors')
os.makedirs(rotor_dir, exist_ok=True)
logfile = os.path.join(rotor_dir, 'rotors.log')
start = time.time()
timestamp = datetime.datetime.now()
with open(logfile, 'a') as f:
f.write(f'Starting rotors job: {timestamp}' + '\n')
# check if a rotor job was already completed
if rotors_complete(species_index):
print('Rotors already ran')
with open(logfile, 'a') as f:
f.write('Rotors already ran\n')
return True
elif os.path.exists(os.path.join(rotor_dir, 'NO_ROTORS.txt')):
print('No rotors to run')
with open(logfile, 'a') as f:
f.write('No rotors to run\n')
return True
rotor_cmd = f'snakemake -c1 run_rotors --config species_index={species_index}'
print(f'Running {rotor_cmd}')
cmd_pieces = rotor_cmd.split()
proc = subprocess.Popen(cmd_pieces, stdin=None, stdout=None, stderr=None, close_fds=True)
print(proc)
# wait 5 minutes for the rotor gaussian job to begin
time.sleep(300)
g16_job_number = ''
rotor_slurm_files = glob.glob(os.path.join(rotor_dir, 'slurm-*'))
if len(rotor_slurm_files) == 0:
print('Rotor slurm file not found')
exit(3)
rotor_slurm_file = os.path.basename(rotor_slurm_files[0])
rotor_slurm_id = rotor_slurm_file[6:14]
rotor_job = job_manager.SlurmJob()
rotor_job.job_id = rotor_slurm_id
print(f'Waiting on job {rotor_slurm_id}')
with open(logfile, 'a') as f:
f.write(f'Waiting on job {rotor_slurm_id}' + '\n')
rotor_job.wait_all(check_interval=600)
# rerun any rotor jobs that failed to converge in time:
if not rotors_complete(species_index):
with open(logfile, 'a') as f:
f.write('Setting up rotor restart job\n')
restart_rotors(species_index) # this waits for jobs to finish
if not rotors_complete(species_index):
with open(logfile, 'a') as f:
f.write('Rotor restart failed\n')
return False
end = time.time()
duration = end - start
print(f'Gaussian rotor jobs completed in {duration} seconds' + '\n')
with open(logfile, 'a') as f:
f.write(f'Gaussian rotor jobs completed in {duration} seconds' + '\n')
return True
def run_arkane_job(species_index):
# start a job that calls snakemake to run arkane
species_dir = os.path.join(DFT_DIR, 'thermo', f'species_{species_index:04}')
arkane_result = os.path.join(species_dir, 'arkane', 'RMG_libraries', 'thermo.py')
if arkane_complete(species_index):
print('Arkane job already ran')
return True
arkane_cmd = f'snakemake -c1 run_arkane_thermo --config species_index={species_index}'
print(f'Running {arkane_cmd}')
cmd_pieces = arkane_cmd.split()
proc = subprocess.Popen(cmd_pieces, stdin=None, stdout=None, stderr=None, close_fds=True)
print(proc)
# wait 10 minutes for Arkane start/finish
# try to read the slurm file in
print('Waiting for arkane job')
with open(logfile, 'a') as f:
f.write('Waiting for arkane job\n')
while not os.path.exists(arkane_result):
time.sleep(300)
# TODO, give up if it has started running but hasn't completed in twenty minutes
print('Arkane complete')
with open(logfile, 'a') as f:
f.write('Arkane complete\n')
end = time.time()
duration = end - start
print(f'COMPLETED {species_smiles} IN {duration} SECONDS')
with open(logfile, 'a') as f:
f.write(f'COMPLETED {species_smiles} IN {duration} SECONDS' + '\n')
| StarcoderdataPython |
1772484 | from django.db import models
from django.urls import reverse
from django.utils.text import slugify
from django.forms import ModelForm
from django.contrib.auth import get_user_model
User = get_user_model()
# Create your models here.
class Category(models.Model):
name = models.CharField(max_length = 155, unique = True)
def __str__(self):
return self.name
class Product(models.Model):
user = models.ForeignKey(User, related_name = "user_products", on_delete = models.CASCADE, null = True)
name = models.CharField(max_length = 255)
categoryID = models.ForeignKey(Category, null = True, blank = True, on_delete = models.CASCADE)
price = models.DecimalField(max_digits = 8, decimal_places = 2)
slug = models.SlugField(editable = False,blank = False)
description = models.TextField()
image = models.ImageField(upload_to='images/')
def __str__(self):
return self.name
def _get_unique_slug(self):
slug = slugify(self.name)
unique_slug = slug
num = 1
while Product.objects.filter(slug=unique_slug).exists():
unique_slug = '{}-{}'.format(slug, num)
num += 1
return unique_slug
def save(self, *args, **kwargs):
if not self.slug:
self.slug = self._get_unique_slug()
super().save(*args, **kwargs)
def get_absolute_url(self):
return reverse("products:detail", kwargs={"slug": self.slug})
class Meta:
ordering = ['name']
| StarcoderdataPython |
3231878 | from Student import Student
## std can now be store here
## student object represent below
student1 = Student("Michel", "Computer", 4.5, False)
print(student1.is_on_probation)
| StarcoderdataPython |
1762610 | import re
from w3af.plugins.attack.payloads.base_payload import Payload
from w3af.core.ui.console.tables import table
class ssh_version(Payload):
"""
This payload shows the current SSH Server Version
"""
def api_read(self):
result = {}
result['ssh_version'] = ''
def parse_binary(bin_ssh):
version = re.search('(?<=OpenSSH)(.*?)\x00', bin_ssh)
if version:
return version.group(1)
else:
return ''
# TODO: Add more binaries
# Please note that this only works IF the remote end allows us to use
# php wrappers and read the binary file with base64
version = self.shell.read('/usr/sbin/sshd')
if version:
result['ssh_version'] = 'OpenSSH' + parse_binary(version)
return result
def run_read(self):
api_result = self.api_read()
if not api_result['ssh_version']:
return 'SSH version could not be identified.'
else:
rows = []
rows.append(['SSH version'])
rows.append([])
rows.append([api_result['ssh_version'], ])
result_table = table(rows)
result_table.draw(80)
return rows
| StarcoderdataPython |
13391 | <gh_stars>0
from .backend import Backend
from .thread import HttpPool
| StarcoderdataPython |
59087 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2018/3/1 13:23
# @Author : Dengsc
# @Site :
# @File : quickstart.py
# @Software: PyCharm
from scrapy import cmdline
cmdline.execute('scrapy crawl lagou'.split())
| StarcoderdataPython |
3234933 | <gh_stars>1-10
"""Default HTTP client selection proxy"""
import os
from .http_common import (
StreamDecodeIteratorSync,
addr_t, auth_t, cookies_t, headers_t, params_t, reqdata_sync_t, timeout_t,
workarounds_t,
)
__all__ = (
"addr_t", "auth_t", "cookies_t", "headers_t", "params_t", "reqdata_sync_t",
"timeout_t", "workarounds_t",
"ClientSync",
"StreamDecodeIteratorSync",
)
PREFER_HTTPX = (os.environ.get("PY_IPFS_HTTP_CLIENT_PREFER_HTTPX", "no").lower()
not in ("0", "f", "false", "n", "no"))
if PREFER_HTTPX: # pragma: http-backend=httpx
try: #PY36+
from . import http_httpx as _backend
except (ImportError, SyntaxError): #PY35
from . import http_requests as _backend
else: # pragma: http-backend=requests
try:
from . import http_requests as _backend
except ImportError: # pragma: no cover
from . import http_httpx as _backend
ClientSync = _backend.ClientSync | StarcoderdataPython |
3250397 | #!/usr/bin/env python
# https://oj.leetcode.com/problems/palindrome-partitioning-ii/
class Solution:
# @param s, a string
# @return an integer
def minCut(self, s):
slen = len(s)
subPalindrome = [[False for i in range(slen)] for j in range(slen)]
cuts = [0] * slen
for i in range(0, slen):
cuts[i] = slen - i - 1
for i in range(slen - 1, -1, -1):
for j in range(i, slen):
if s[i] == s[j] and (j - i in (0, 1) or subPalindrome[i + 1][j - 1]):
if j == slen - 1:
cuts[i] = 0
else:
cuts[i] = min(cuts[i], 1 + cuts[j + 1])
subPalindrome[i][j] = True
return cuts[0]
if __name__ == '__main__':
import sys
print Solution().minCut(sys.argv[1])
| StarcoderdataPython |
158698 | VALID_TASK_TYPES = {"transcription", "find", "fix", "verify"}
class TaskProfile:
def __init__(self, project=str, task_name=str, task_type=str, priority,
segment_size):
self.project = project
self.task_name = task_name
self.task_type = task_type
self.segment_size = segment_size
self.priority = 0
self.difficulty = 0
self.data_input = None
self.data_output = None
self.ui_tools = []
selft.task_url = ""
self.description = ""
self.assess_difficulty()
self.get_url()
| StarcoderdataPython |
4802688 | """
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import scraper
import urllib
import urlparse
import re
import xbmcaddon
from salts_lib.constants import VIDEO_TYPES
from salts_lib.constants import QUALITIES
BASE_URL = 'https://movieshd.eu'
class MoviesHD_Scraper(scraper.Scraper):
base_url = BASE_URL
def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
self.timeout = timeout
self.base_url = xbmcaddon.Addon().getSetting('%s-base_url' % (self.get_name()))
@classmethod
def provides(cls):
return frozenset([VIDEO_TYPES.MOVIE])
@classmethod
def get_name(cls):
return 'MoviesHD'
def resolve_link(self, link):
if 'videomega' in link:
html = self._http_get(link, cache_limit=.5)
match = re.search('ref="([^"]+)', html)
if match:
return 'http://videomega.tv/iframe.php?ref=%s' % (match.group(1))
else:
return link
def format_source_label(self, item):
return '[%s] %s' % (item['quality'], item['host'])
def get_sources(self, video):
source_url = self.get_url(video)
hosters = []
if source_url:
url = urlparse.urljoin(self.base_url, source_url)
html = self._http_get(url, cache_limit=.5)
match = re.search("(?:'|\")([^'\"]+hashkey=[^'\"]+)", html)
stream_url = ''
if match:
stream_url = match.group(1)
if stream_url.startswith('//'): stream_url = 'http:' + stream_url
host = 'videomega.tv'
else:
match = re.search('iframe[^>]*src="([^"]+)', html)
if match:
stream_url = match.group(1)
host = urlparse.urlparse(stream_url).hostname
if stream_url:
hoster = {'multi-part': False, 'url': stream_url, 'host': host, 'class': self, 'quality': QUALITIES.HD720, 'views': None, 'rating': None, 'up': None, 'down': None, 'direct': False}
hosters.append(hoster)
return hosters
def get_url(self, video):
return super(MoviesHD_Scraper, self)._default_get_url(video)
def search(self, video_type, title, year):
search_url = urlparse.urljoin(self.base_url, '/?s=')
search_url += urllib.quote_plus(title)
html = self._http_get(search_url, cache_limit=.25)
results = []
if not re.search('nothing matched your search criteria', html, re.I):
pattern = 'href="([^"]+)"\s+title="([^"]+)\s+\((\d{4})\)'
for match in re.finditer(pattern, html):
url, title, match_year = match.groups('')
if not year or not match_year or year == match_year:
result = {'url': url.replace(self.base_url, ''), 'title': title, 'year': match_year}
results.append(result)
return results
def _http_get(self, url, cache_limit=8):
return super(MoviesHD_Scraper, self)._cached_http_get(url, self.base_url, self.timeout, cache_limit=cache_limit)
| StarcoderdataPython |
1704399 | <reponame>amplify-nation/django-ajax<filename>tests/example/tests.py
from django.test import TestCase
from django.contrib.auth.models import User
import json
from .models import Widget
from .endpoints import WidgetEndpoint
class BaseTest(TestCase):
fixtures = ['users.json', 'categories.json', 'widgets.json']
def setUp(self):
self.login('jstump')
def login(self, username, password='<PASSWORD>'):
user = User.objects.get(username=username)
login_successful = self.client.login(username=user.username,
password=password)
self.assertTrue(login_successful)
def post(self, uri, data={}, debug=False, status_code=200):
"""Send an AJAX request.
This handles sending the AJAX request via the built-in Django test
client and then decodes the response.
``status_code`` lets you define what you expect the status code
to be which will be tested before returning the response object
and the decoded JSON content.
``debug`` if set to True will spit out the response and content.
"""
response = self.client.post(uri, data)
if debug:
print(response.__class__.__name__)
print (response)
self.assertEquals(status_code, response.status_code)
return response, json.loads(response.content)
class EncodeTests(BaseTest):
def test_encode(self):
from ajax.encoders import encoder
widget = Widget.objects.get(pk=1)
self.assertEquals(widget.title,'Iorem lipsum color bit amit')
encoded = encoder.encode(widget)
for k in ('title','active','description'):
self.assertEquals(encoded[k],getattr(widget,k))
widgets = Widget.objects.all()
all_encoded = encoder.encode(widgets)
for encoded in all_encoded:
widget = Widget.objects.get(pk=encoded['pk'])
for k in ('title','active','description'):
self.assertEquals(encoded[k],getattr(widget,k))
class EndpointTests(BaseTest):
def test_echo(self):
"""Test the ad-hoc echo endpoint."""
resp, content = self.post('/ajax/example/echo.json',
{'name': '<NAME>', 'age': 31})
self.assertEquals('<NAME>', content['data']['name'])
self.assertEquals('31', content['data']['age'])
def test_empty_foreign_key(self):
"""Test that nullable ForeignKey fields can be set to null"""
resp, content = self.post('/ajax/example/widget/3/update.json',
{'category': ''})
self.assertEquals(None, content['data']['category'])
self.assertEquals(None, Widget.objects.get(pk=3).category)
def test_false_foreign_key(self):
"""Test that nullable ForeignKey fields can be set to null by setting it to false"""
resp, content = self.post('/ajax/example/widget/6/update.json',
{'category': False})
self.assertEquals(None, content['data']['category'])
self.assertEquals(None, Widget.objects.get(pk=6).category)
def test_logged_out_user_fails(self):
"""Make sure @login_required rejects requests to echo."""
self.client.logout()
resp, content = self.post('/ajax/example/echo.json', {},
status_code=403)
class MockRequest(object):
def __init__(self, **kwargs):
self.POST = kwargs
class ModelEndpointTests(BaseTest):
def setUp(self):
self.list_endpoint = WidgetEndpoint('example', Widget, 'list')
def test_list_returns_all_items(self):
results = self.list_endpoint.list(MockRequest())
self.assertEqual(len(results), Widget.objects.count())
def test_list_obeys_endpoint_pagination_amount(self):
self.list_endpoint.max_per_page = 1
results = self.list_endpoint.list(MockRequest())
self.assertEqual(len(results), 1)
def test_out_of_range_returns_empty_list(self):
results = self.list_endpoint.list(MockRequest(current_page=99))
self.assertEqual(len(results), 0)
def test_request_doesnt_override_max_per_page(self):
self.list_endpoint.max_per_page = 1
results = self.list_endpoint.list(MockRequest(items_per_page=2))
self.assertEqual(len(results), 1)
| StarcoderdataPython |
84471 | <filename>epikjjh/baekjoon/15927.py
import sys
input = lambda: sys.stdin.readline().rstrip()
stream = input()
reverse = stream[::-1]
ans = len(stream) if stream != reverse else (len(stream)-1 if stream[1:]!=reverse[:-1] else -1)
print(ans) | StarcoderdataPython |
117507 | <filename>P3/app/model.py
from pickleshare import *
db=PickleShareDB('miBD')
def checkUser(user):
return user in db
def getUser(user):
if checkUser(user):
return db[user]
return none
def addUser(user,data):
if not checkUser(user):
db[user]=data
def delUser(user):
del db[user]
| StarcoderdataPython |
109560 | <reponame>east301/wsgiuseragentmobile-python3
# -*- coding: utf-8 -*-
from pkg_resources import resource_string
from IPy import IP
from uamobile.cidrdata import crawler, docomo, ezweb, softbank, willcom
__all__ = ['IP', 'get_ip_addrs', 'get_ip']
def get_ip_addrs(carrier):
carrier = carrier.lower()
if carrier not in ('docomo', 'ezweb', 'softbank', 'willcom', 'crawler', 'nonmobile'):
raise ValueError('invalid carrier name "%s"' % carrier)
return { 'docomo' : docomo.DATA,
'ezweb' : ezweb.DATA,
'softbank' : softbank.DATA,
'willcom' : willcom.DATA,
'crawler' : crawler.DATA,
'nonmobile': [ '0.0.0.0/0' ],
}[carrier]
def get_ip(carrier, _memo={}):
try:
return _memo[carrier]
except KeyError:
_memo[carrier] = [IP(x) for x in get_ip_addrs(carrier)]
return _memo[carrier]
| StarcoderdataPython |
105983 | <filename>code/set-app-package.py
from com.android.monkeyrunner import MonkeyRunner, MonkeyDevice, MonkeyImage
#import com.android.provider.Settings
import time, sys
refFile = './logs/passedScreens/ServiceScreen/serviceScreen'
ref_x=0
ref_y=20
ref_w=240
ref_h=380
ACCEPTANCE = 1.0
device = MonkeyRunner.waitForConnection()
count=1
logsdir=sys.argv[1]
filename="ServiceScreen/serviceScreen"
#device.press('KEYCODE_ENTER', MonkeyDevice.DOWN_AND_UP)
#runComponent='com.android.settings/.Settings'
#runComponent='com.android.settings/com.android.settings.ACCESSIBILITY_SETTINGS'
#open accessibility service
f = open('./code/accessPackageInfo.txt', 'r')
package = f.readline().split(":")[1]
#remove new line
package = package[:-1]
print package
activity = f.readline().split(":")[1]
activity = activity[:-1]
print activity
f.close()
runComponent = package + '/' + activity
print "1"
device.wake()
homeScreen = device.takeSnapshot()
count=1
# Runs the component
device.startActivity(component=runComponent)
#time.sleep(10)
print "runComponent: "+runComponent
time.sleep(10)
#compare to correct
#reference = MonkeyRunner.loadImageFromFile(refFile+str(count)+".png")
#reference = reference.getSubImage(ref_x, ref_y,ref_w,ref_h)
screenShot = device.takeSnapshot()
#subScreen = screenshot.getSubImage(ref_x, ref_y,ref_w,ref_h)
failcount = 0;
#print "comparing to " + refFile+str(count)+".png"
#try three times to get correct screenshot before giving up
#while subScreen.sameAs(reference, ACCEPTANCE) and failCount<2:
# print "compare failed"
# print "writing to ./"+logsdir+"/"+filename+str(count)+"_fail"+str(failCount)+".png"
# screenShot.writeToFile('./'+logsdir+"/"+filename+str(count)+"_fail"+str(failCount)+".png",'png')
# failCount=failCount+1
# #give extra time in case it's just slow to load
# time.sleep(10)
# screenShot=device.takeSnapshot()
# subScreen=subScreen = screenshot.getSubImage(ref_x, ref_y,ref_w,ref_h)
#if never succeeded, quit, else continue
#if failCount == 2:
# print "FAIL!"
print "writing to : ./"+logsdir+"/"+filename+str(count)+".png"
screenShot.writeToFile('./'+logsdir+'/'+filename+str(count)+".png",'png')
count=count+1
#navigate to filling in app package name
device.press('KEYCODE_DPAD_DOWN',MonkeyDevice.DOWN_AND_UP)
time.sleep(5)
screenShot = device.takeSnapshot()
#check if screenshot matches correct screenshot
print "writing to : ./"+logsdir+"/"+filename+str(count)+".png"
screenShot.writeToFile('./'+logsdir+'/'+filename+str(count)+".png",'png')
count=count+1
#find and fill in package info
f = open('./data/packageInfo.txt', 'r')
appPackage = f.readline().split(":")[1]
#delete new line
appPackage = appPackage[:-1]
f.close()
device.type(appPackage)
#delete new line
#device.press('KEYCODE_DEL',MonkeyDevice.DOWN_AND_UP)
time.sleep(5)
screenShot = device.takeSnapshot()
print "writing to : ./"+logsdir+"/"+filename+str(count)+".png"
screenShot.writeToFile('./'+logsdir+'/'+filename+str(count)+".png",'png')
count=count+1
#set package
device.press('KEYCODE_DPAD_DOWN',MonkeyDevice.DOWN_AND_UP)
time.sleep(6)
device.press('KEYCODE_ENTER',MonkeyDevice.DOWN_AND_UP)
time.sleep(5)
screenShot = device.takeSnapshot()
print "writing to : ./"+logsdir+"/"+filename+str(count)+".png"
screenShot.writeToFile('./'+logsdir+'/'+filename+str(count)+".png",'png')
count=count+1
| StarcoderdataPython |
3222557 | from optimizer import optimizer_SGD, AdaGrad, NormGrad, SGD
import numpy as np
from functions import sigmoid, sigmoid_back, clip_grads
class Loss:
def __init__(self):
self.Loss = None
self.dout = None
def forward(self, out, t):
self.Loss = 1/2 * np.sum((out - t)**2)
self.dout = out - t
return self.Loss
def backward(self):
return self.dout
class RNNneuron:
def __init__(self, W, Wh, b):
# 引数として受けた重みとバイアスをself.aramsに格納
self.params = [W, Wh, b]
# 更新前に勾配をまとめてオプティマイザーに送るための入れ物(中身はparamsに対応している必要あり)
self.grads = [np.zeros_like(W), np.zeros_like(Wh), np.zeros_like(b)]
# クラス外へ中身を持っていくための入れ物
self.F_container = np.empty(0)
self.B_container = np.empty(0)
# RNN層の中身の入れ物
self.dh_prev = None
# 学習率の格納
self.lr = 0.01
# オプティマイザーの定義(初期値SGD)
self.optimizer = SGD(self.lr)
# クリッピングの実行フラグ
self.clipper = 0
# 勾配クリッピングのしきい値(初期値0.02)
self.NormGrad = 0.02
def forward(self, x, h_prev):
# クラスの初期化時に格納した重みとバイアスの取り出し
W, Wh, b = self.params
# yはニューロン内部の値
#f = open("E:\研究一時ファイル\BP\TEST_1120\Fh.txt", mode="a")
if h_prev is None:
y = np.dot(x, W) + b
else:
y = np.dot(h_prev, Wh) + np.dot(x, W) + b
#w = "\nWh:" + str(Wh) + "\nh_prev:" + str(h_prev) + "\n:" + str(y)
# f.write(w)
# Zが出力
z = sigmoid(y)
self.h_prev = z
self.F_container = [W, Wh, b, x, y, z]
return z, self.F_container
def backward(self, dz, h_prev):
#f = open("E:\研究一時ファイル\BP\TEST_1120\Wh.txt", mode="a")
W, Wh, b, x, y, z = self.F_container
dh_prev = self.dh_prev
# 過去時刻からの勾配の合算
if dh_prev is None:
dz = dz
else:
dz = dh_prev + dz
# 出力部の逆伝搬(シグモイド版)
dy = sigmoid_back(z, dz)
db = dy
dW = np.dot(x.T, dy)
dx = np.dot(dy, W.T)
dWh = np.dot(h_prev.T, dy)
dh_prev = np.dot(dy, Wh.T)
#w = "\ndWh:" + str(dWh) + "\nh_prev:" + str(h_prev) + "\ndy:" + str(dy)
# f.write(w)
# 勾配クリッピングの実行
self.drads, self.clipper = clip_grads(self.grads, self.NormGrad)
self.dh_prev = dh_prev
# self.gradsに更新に行かう勾配を格納
self.grads[0][...] = dW
self.grads[1][...] = dWh
self.grads[2][...] = db
# オプティマイザーによりself.paramsの値を更新
# self.params = optimizer_SGD(self.lr, self.params, self.grads)
self.params = self.optimizer.update(self.params, self.grads)
# すべての結果をself.containerに格納
self.container = [dy, db, dW, dWh, dx]
# f.close
return dx, self.container
def setlr(self, lr, model=0):
self.lr = lr
if model == 0:
self.optimizer = SGD(self.lr)
elif model == 1:
self.optimizer = AdaGrad(self.lr)
elif model == 2:
self.optimizer = NormGrad(self.lr)
def viewlr(self):
return self.optimizer.viewlr()
def change_lr(self, New_lr):
self.optimizer.change_lr(New_lr)
def reset(self):
self.h_prev = None
self.dh_prev = None
def clipper_Chech(self):
return self.clipper
def change_NormGrad(self, NormGrad):
# 勾配クリッピングのしきい値の変更
self.NormGrad = NormGrad
class BPneuron:
def __init__(self, W, b):
# 引数として受けた重みとバイアスをself.aramsに格納
self.params = [W, b]
# 更新前に勾配をまとめてオプティマイザーに送るための入れ物(中身はparamsに対応している必要あり)
self.grads = [np.zeros_like(W), np.zeros_like(b)]
# クラス外へ中身を持っていくための入れ物
self.container = np.empty(0)
# 学習率の格納
self.lr = 0.01
self.optimizer = AdaGrad(self.lr)
def forward(self, x):
# クラスの初期化時に格納した重みとバイアスの取り出し
W, b = self.params
# yはニューロン内部の値
y = np.dot(x, W)+b
# Zが出力
z = sigmoid(y)
self.container = [W, b, x, y, z]
return z, self.container
def backward(self, dz):
W, b, x, y, z = self.container
# 出力部の逆伝搬(シグモイド版)
dy = sigmoid_back(z, dz)
db = dy
dW = np.dot(x.T, dy)
dx = np.dot(dy, W.T)
# self.gradsに更新に行かう勾配を格納
self.grads[0][...] = dW
self.grads[1][...] = db
# オプティマイザーによりself.paramsの値を更新
# self.params = optimizer_SGD(self.lr, self.params, self.grads)
self.params = self.optimizer.update(self.params, self.grads)
# すべての結果をself.containerに格納
self.container = [dy, db, dW, dx]
return dx, self.container
def setlr(self, lr, model=0):
self.lr = lr
if model == 0:
self.optimizer = SGD(self.lr)
elif model == 1:
self.optimizer = AdaGrad(self.lr)
elif model == 2:
self.optimizer = NormGrad(self.lr)
def viewlr(self):
return self.optimizer.viewlr()
def change_lr(self, New_lr):
self.optimizer.change_lr(New_lr)
| StarcoderdataPython |
125326 | from flask import request
from flask_restx import Resource, fields, Namespace
import jwt
import datetime
import functools
from models import Users, Admins
import subprocess
import os
from os.path import join, dirname
from dotenv import load_dotenv
from conf import const
load_dotenv(verbose=True)
dotenv_path = join(dirname(__file__), '.env')
load_dotenv(dotenv_path)
SECRET_KEY = os.environ.get("SECRET_KEY")
api = Namespace('auth', description="Authentication")
auth_request = api.model('authentication_request', {
'email': fields.String(default='<EMAIL>'),
'password': fields.String(default='password')
})
auth_response = api.model('authentication_response', {
'token': fields.String(default='JSON Web Token'),
'user_id': fields.Integer,
'role': fields.String,
'status': fields.String
})
@api.route('')
class Index(Resource):
@api.marshal_with(auth_response)
@api.doc(body=auth_request)
def post(self):
email = request.json['email']
password = request.json['password']
query_user = Users.select(
Users.user_id,
Users.email,
Users.password,
Users.status
).where(Users.email == email)
if len(query_user) == 0:
result = "Passsword or Email Incorrect"
return api.abort(400, result)
elif len(query_user) > 1: # not neccessary, just in case
result = "Duplicate Email Address"
return api.abort(400, result)
if query_user[0].status == 'RESIGNED':
result = "Login of Resigned User Not Allowed"
return api.abort(400, result)
script = "php -r 'echo password_verify(\"{0}\",\"{1}\") ? \"true\" : \"false\";'".format(
password, query_user[0].password.replace('$', '\$'))
ret = subprocess.Popen([script], stdout=subprocess.PIPE, shell=True)
(out, _) = ret.communicate()
if out.decode('utf-8') != "true":
result = "Passsword or Email Incorrect"
return api.abort(400, result)
# check if user is admin
query_admin = Admins.select(
Admins.user_id,
Admins.role
).where(Admins.user_id == query_user[0].user_id)
if len(query_admin) == 0:
admin_role = const.GENERAL
else:
admin_role = query_admin[0].role
exp = datetime.datetime.utcnow() + datetime.timedelta(hours=1)
encoded = jwt.encode({'name': query_user[0].user_id, 'exp': exp}, SECRET_KEY, algorithm="HS256")
result = {'user_id': query_user[0].user_id,
'token': encoded,
'role': admin_role,
'status': query_user[0].status}
return result
def login_required(method):
@functools.wraps(method)
def wrapper(*args, **kwargs):
header = request.headers.get('Authorization')
if header is None:
result = "Authorization Header Not Found"
return api.abort(400, result)
try:
_, token = header.split()
except ValueError:
result = "Token Not Valid"
return api.abort(400, result)
try:
decoded = jwt.decode(token, SECRET_KEY, algorithms='HS256')
user_id = decoded['name']
except jwt.DecodeError:
result = "Token Not Valid"
return api.abort(400, result)
except jwt.ExpiredSignatureError:
result = "Token Expired"
return api.abort(400, result)
return method(*args, user_id, **kwargs)
return wrapper
| StarcoderdataPython |
1773627 | #!/usr/bin/env python
# coding: utf-8
# In[77]:
import pandas as pd
import numpy as np
import requests
from datetime import datetime
from urllib.request import urlopen
from lxml import etree
import io
from alphacast import Alphacast
from dotenv import dotenv_values
API_KEY = dotenv_values(".env").get("API_KEY")
alphacast = Alphacast(API_KEY)
# In[78]:
url = 'https://www.estadisticaciudad.gob.ar/eyc/?p=113254'
r = requests.get(url, verify=False)
html = r.content
htmlparser = etree.HTMLParser()
tree = etree.fromstring(html, htmlparser)
xls_address = tree.xpath("//*[@id='post-113254']/div/a/@href")[0]
xls_address
# In[79]:
#Hago el request de la data y genero el dataframe con su contenido
r = requests.get(xls_address, allow_redirects=True, verify=False)
df = pd.read_excel(r.content, skiprows=2, sheet_name=0, header = [0,1])
# In[80]:
#Concateno los nombres de las columnas
df.columns = df.columns.map(' - '.join)
# In[81]:
df["Año - Unnamed: 0_level_1"] = df["Año - Unnamed: 0_level_1"].astype(str)
#Reemplazo los trimestres por formato %m-%d
df["Año - Unnamed: 0_level_1"] = df["Año - Unnamed: 0_level_1"].str.replace("1er. trimestre" , "-01-01")
df["Año - Unnamed: 0_level_1"] = df["Año - Unnamed: 0_level_1"].str.replace("2do. trimestre" , "-04-01")
df["Año - Unnamed: 0_level_1"] = df["Año - Unnamed: 0_level_1"].str.replace("3er. trimestre" , "-07-01")
df["Año - Unnamed: 0_level_1"] = df["Año - Unnamed: 0_level_1"].str.replace("4to. trimestre" , "-10-01")
# In[82]:
#Creo columnas separadas con el año, mes y dia para luego concatenarlas
df["year"] = df["Año - Unnamed: 0_level_1"].str.split("-", expand = True)[0].replace('',np.nan).fillna(method="ffill")
df["month"] = df["Año - Unnamed: 0_level_1"].str.split("-", expand = True)[1]
df["day"] = df["Año - Unnamed: 0_level_1"].str.split("-", expand = True)[2]
# In[83]:
#Armo la col de Date y elimino las auxiliares y la original
df["Date"] = pd.to_datetime(df[["year", "month", "day"]], errors="coerce")
df = df[df["Date"].notnull()]
del df["Año - Unnamed: 0_level_1"]
del df["day"]
del df["month"]
del df["year"]
df = df.set_index("Date")
df.columns = ['Total centrales','Central Térmica de vapor','Central Ciclo Combinado']
df['country'] = 'CABA'
alphacast.datasets.dataset(7449).upload_data_from_df(df,
deleteMissingFromDB = True, onConflictUpdateDB = True, uploadIndex=True)
| StarcoderdataPython |
3213389 | <filename>senpy/neyer.py
# -*- coding: utf-8 -*-
import numpy as np
from scipy.optimize import minimize, brute, fmin
from .confidence import (parametric_bootstrap, nonparametric_bootstrap,
delta, contour_walk, increase_bounds,
HomogeneousResult)
from .plotting import plot_probability as pp, plot_confidence_region as pcr
import copy
from .utils import (custom_log, _round, check_bounds, check_diff,
check_success, check_fail)
class Neyer():
"""
The Neyer model. Given an assumed form for the latent distribution,
either 'normal', 'logistic', or 'log-logistic', the maximum likelihood
estimates of the distribution parameters are computed. Neyer also provides
a sequential design algorithm.
Parameters
----------
latent : string, optional
DESCRIPTION. The form of the latent distribution. Either 'normal',
'logistic', or 'log-logistic'. The default is 'normal'.
inverted : boolean, optional
DESCRIPTION. If the probability of a 'go' increases as the stimulus
level decreases, then the data is 'inverted'. The default is False.
method : string, optional
DESCRIPTION. Name of the optimization routine called when computing
the maximum likelihood estimates. The default is 'L-BFGS-B'.
num_restarts : int, optional
DESCRIPTION. The number of random initializations to use when
maximizing the likelihood function. Note, the available latent
distributions only use two parameters. Consequently, the resulting
likelihood function is typically convex. The default is 3.
t1_min : flaot, optional
DESCRIPTION. When using the sequential design algorithm and starting
with no (or minimal) data, an intial guess on the lower bound of
the first parameter, theta_1, is required. For the normal and
logistic distributions theta_1 is mu. For the log-logistic distribution
theta_1 is alpoha. If None is provided and the sequential algorithm
is called, the program will prompt the user for the value.
The default is None.
t1_max : float, optional
DESCRIPTION. The initial guess for the upper bound of theta_1.
See t1_min for more details. The default is None.
t2_guess : float, optional
DESCRIPTION. The initial guess for theta_2. Required when using the
sequential design algorithm. See t1_min for more details. For the
normal and logisit distributions, theta_2 is sigma. For the log-logistic
distribution, theta_2 is beta. The default is None.
precision : int, optional
DESCRIPTION. Number of decimal points to incude in the final
output. The default is 8.
resolution : float, optional
DESCRIPTION. The smallest change in stimulus level available. For
example, a drop-weight apparatus may only have adjustments at
quarter inch intervals. Thus, the algorithm should not suggest testing
at 12.105 inches, etc. The default is None.
lower_bound : float, optional
DESCRIPTION. The lowest stimulus level a user can phsically test.
The default is None.
upper_bound : float, optional
DESCRIPTION. The highest stimulus level a user can phsically test.
The default is None.
hist : boolean, optional
DESCRIPTION. If True the determinant of the information matrix is
computed over a range of stimulus levels at each stage of the
sequential design. Typically used for debugging only!
The default is False.
log_file : str, optional
DESCRIPTION. File path for a log file. The log consists of the
steps taken during the sequential design algorithm.
The default is None.
"""
available_opt_methods = ('L-BFGS-B', 'SLSQP', 'TNC')
def __init__(self, latent='normal', inverted=False,
method='L-BFGS-B', num_restarts=3,
t1_min=None, t1_max=None, t2_guess=None,
precision=8, resolution=None,
lower_bound=None, upper_bound=None,
hist=False, log_file=None):
self.inverted = inverted
self.theta = None
self.latent = latent
self.method = method
self.num_restarts = num_restarts
if self.num_restarts < 1:
print('Number of restarts must be greater than or eqaul to 1.')
print('Defaulting to 3.')
self.num_restarts = 3
if self.method not in self.available_opt_methods:
print("""method '{}' not understood.
Defaulting to L-BFGS-B.
Please choose from {}""".format(self.method,
self.available_opt_methods))
self.method = 'L-BFGS-B'
if latent == 'normal':
from .norm_funcs import function_dictionary
elif latent == 'logistic':
from .logistic_funcs import function_dictionary
elif latent == 'log-logistic':
from .log_logistic_funcs import function_dictionary
else:
raise ValueError("""Value for "latent" not understood.
Must be "normal", "logistic", or "log-logistic".""")
self.pred = function_dictionary['pred']
self.opt_config = function_dictionary['opt_config']
self.cost_func = function_dictionary['cost']
self.cost_deriv = function_dictionary['cost_deriv']
self.est_names = function_dictionary['estimate_names']
self.Hessian = function_dictionary['Hessian']
self.cdf_deriv = function_dictionary['cdf_deriv']
self.info = function_dictionary['info']
self.precision = precision
self.start = True
self.binary = True
self.overlap = True
self.mle = True
self.lower_bound = lower_bound
self.upper_bound = upper_bound
self.hist = hist
if isinstance(log_file, str):
self.log_file = log_file
file_obj = open(log_file, 'w')
file_obj.close()
if resolution != None:
self.resolution = resolution
if self.hist == True:
self.det_vals = []
self.det_res = []
self.x_pts = []
self.t1_min = t1_min
self.t1_max = t1_max
self.t2_guess = t2_guess
self.X = np.asarray([]).reshape((-1,1))
self.Y = np.asarray([]).reshape((-1,1))
self.theta = np.array([np.nan, np.nan])
self.observed_info = np.empty((2,2))
self.updated = -1
def fit(self, X, Y):
"""
Compute the maximum likelihood estimates of the distribution parameters.
Parameters
----------
X : 2D array
The tested stimulus levels. Must be of shape (n_pts, 1)
Y : array
The observed response at each stimulus level. 1 for 'go' and 0
for 'no-go'.
Returns
-------
self
"""
if X.ndim != 2:
raise ValueError("X must be of shape [n_examples, 1]")
if X.shape[0] != Y.shape[0]:
raise ValueError("""input and output must have the same number of rows!
shapes {} and {} do not match.""".format(X.shape, Y.shape))
Y = Y.reshape((-1,1))
self.Y = Y.copy()
self.X = X
if self.inverted:
Y = np.logical_not(Y).astype(int)
if check_success(Y) or check_fail(Y):
raise HomogeneousResult('Need to have positive AND negative responses present in the data in order to call fit.')
thetas = []
costs = []
t1_low, t1_high, t2_low, t2_high, bounds = self.opt_config(self.X)
for i in range(self.num_restarts):
theta_0 = [np.random.uniform(t1_low, t1_high),
np.random.uniform(t2_low, t2_high)]
theta_0 = np.array(theta_0)
res = minimize(self.cost_func, theta_0,
args = (self.X, Y),
method=self.method,
jac=self.cost_deriv,
bounds=bounds)
thetas.append(res.x)
costs.append(res.fun)
thetas = np.asarray(thetas)
costs = np.asarray(costs)
best_run = np.argmin(costs)
self.theta = thetas[best_run]
self.cost = costs[best_run]
return self
def get_estimators(self):
"""
Provides access to the stored estimate of theta. For example,
[mu, sigma] or [alpha, beta].
Returns
-------
array
Current parameter estimates. Shape is (2,)
"""
if self.theta is not None:
if check_diff(self.X, self.Y, self.inverted) > 0:
raise Exception('Not enough data to estimate theta.')
return self.theta
else:
raise Exception('Model not yet trained!')
def print_estimators(self, cost=False):
"""
Prints the current parameter estimates to the console.
Parameters
----------
cost : boolean, optional
If true, the value of the negative log-likelihood, or cost, at the
current parameter estimates is also printed to the console.
The default is False.
Returns
-------
None.
"""
if self.theta is not None:
if check_diff(self.X, self.Y, self.inverted) > 0:
raise Exception('Not enough data to estimate theta.')
t1n, t2n = self.est_names()
t1, t2 = self.theta
print('{}: {}\n{}: {}'.format(t1n, t1, t2n, t2))
if cost:
print('cost: {}'.format(self.cost))
else:
raise Exception('Model not yet trained!')
def predict_probability(self, pts=None, confidence=None,
CI_level = [.5, .8, .9, .95],
num_samples=1000, max_iter=5):
"""
Returns the probability of a 'go' at pts. p(y=0|pt)
Parameters
----------
pts : array, optional
The stimulus levels at which to compute probability predictions.
The default is None. If None, range = max(X) - min(X) and
pts = np.linspace(min(X)-0.5*range, max(X)+0.5*range, 100)
confidence : str, optional
The name of the method used to supply confidence intervals.
Options are 'delta', 'perturbation' (same as delta), 'likelihood-ratio',
'parametric-bootstrap', and 'nonparametric-bootstrap'.
The default is None.
CI_level : list, optional
The confidence levels. Ignored if confidence is None.
The default is [.5, .8, .9, .95].
num_samples : int, optional
The number of bootstrapped samples generated. Only used if
confidence = 'parametric-bootstrap' or 'nonparametric=bootstrap'.
The default is 1000.
max_iter : int, optional
The maximum number of attempts to map the likelihood ratio.
Only used if confidence = 'likelihood-ratio'. The default is 5.
Returns
-------
tuple
Consists of the stimulus points, the predicted probability, and
arrays of the lower bounds and upper bounds of the confidence levels
if confidence was requested.
(pts (n_pts, 1), predicted probability (n_pts, 1)) or
(pts (n_pts, 1), predicted probability (n_pts, 1), lower CI bounds, upper CI bounds)
where the shape of lower and upper CI bounds is (n_pts, n_levels)
"""
if self.theta is None:
raise Exception('Model not yet trained!')
if check_diff(self.X, self.Y, self.inverted) > 0:
raise Exception('Not enough data to make a prediction.')
if pts is None:
xmin = np.min(self.X)
xmax = np.max(self.X)
xint = xmax-xmin
xstart = xmin - xint*.05
xend = xmax + xint*.05
pts = np.linspace(xstart, xend, 100)
pts = np.array(pts).reshape((-1,1))
p = self.pred(pts, self.theta, self.inverted)
if confidence is None:
return pts, p
elif confidence == 'parametric-bootstrap':
current_model = copy.deepcopy(self)
lb, ub = parametric_bootstrap(current_model,
pts,
num_samples,
CI_level)
return pts, p, lb, ub
elif confidence == 'nonparametric-bootstrap':
current_model = copy.deepcopy(self)
lb, ub = nonparametric_bootstrap(current_model,
pts,
num_samples,
CI_level)
return pts, p, lb, ub
elif confidence == 'likelihood-ratio':
new_bounds = increase_bounds(self.opt_config(self.X),
'both', 'both')
lb, ub = contour_walk(self, pts, new_bounds, [100],
CI_level, max_iter)
return pts, p, lb, ub
elif confidence == 'delta' or confidence == 'perturbation':
lb, ub = delta(self,
pts,
num_samples,
CI_level, p)
return pts, p, lb, ub
else:
ci_methods = [None, 'parametric-bootstrap',
'nonparametric-bootstrap', 'likelihood-ratio',
'delta', 'perturbation']
raise ValueError("confidence '{}' not understood.\nPlease choose from {}".format(confidence, ci_methods))
def plot_probability(self, include_data=True, xlabel=None, ylabel=None,
alpha=1.0, save_dst=None, show=True, **kwargs):
"""
A high-level method to call self.predict_probability and plot the result.
Parameters
----------
include_data : boolean, optional
Whether or not to plot the data (stimuli and responses).
The default is True.
xlabel : str, optional
If provided, the text for the plot xlabel. The default is None.
ylabel : str, optional
If provided, the text for the plot ylabel. The default is None.
alpha : float, optional
opacity of the observed data points. Must be between 0 and 1.
Only used if include_data is True. Useful to visualize many overlapping
data points. The default is 1.0.
save_dst : str, optional
The file path (including file type) where the plot should be saved.
The default is None.
show : boolean, optional
If True, simply calls matplotlib.plt.show(). May be required for
some IDEs. The default is True.
**kwargs :
All keyworkd arguments provided to predict_probability can also be
provided here.
Returns
-------
None.
"""
pp(self, include_data, xlabel, ylabel,
alpha, save_dst, show, **kwargs)
def plot_confidence_region(self, limits, n, CI_levels=10,
save_dst=None, show=True):
"""
A high-level function to plot the confidence region of the parameters.
Parameters
----------
limits : list
The plot limits provided as [lower xlim, upper xlim, lower ylim, upper ylim].
n : int or list of length 2
The number locations to sample in the x (theta_1) and y (theta_2) directions.
CI_levels : int or list, optional
If an integer, a filled contour plot will be produced with that
many levels. If it is a list, the list values specify the confidence
levels at which to draw contour lines. The default is 10.
save_dst : str, optional
The file path (including file type) where the plot should be saved.
The default is None
show : boolean, optional
If True, simply calls matplotlib.plt.show(). May be required for
some IDEs. The default is True.
Returns
-------
None.
"""
if self.theta is None:
raise Exception('Model not yet trained!')
if check_diff(self.X, self.Y, self.inverted) > 0:
raise Exception('Not enough data to make a prediction.')
pcr(self, limits, n, CI_levels, save_dst, show)
def __prompt_input(self):
"""
If the sequential design algorithm is used and if there is 1) insufficent
data or 2) t1_min, t1_max, and t2_guess were not specifed, then prompt
the user for those values. Used internally. Should not be called.
Returns
-------
None.
"""
t1n, t2n = self.est_names()
self.t1_min = float(input('Lower bound guess for {}: '.format(t1n)))
self.t1_max = float(input('Upper bound guess for {}: '.format(t1n)))
self.t2_guess = float(input('Initial guess for {}: '.format(t2n)))
def __max_info(self, theta):
def det(level):
X_test = np.vstack((self.X, level))
info = self.info(X_test, theta[0], theta[1])
return -1*(info[0][0] * info[1][1] - info[0][1] * info[1][0])
ranges = self.max_s - self.min_s
if self.lower_bound == None and self.upper_bound == None:
res = brute(det, ((self.min_s - .5*ranges, self.max_s + .5*ranges),),
Ns=100, finish=fmin)
else:
if self.lower_bound == None:
lb = self.min_s - ranges
else: lb = self.lower_bound
if self.upper_bound == None:
ub = self.min_s + ranges
else: ub = self.upper_bound
res = brute(det, ((lb, ub),),
Ns=100, finish=fmin)
if self.hist:
if self.lower_bound == None:
x_pts = np.linspace(self.min_s - 2.5*ranges,
self.max_s + 2.5*ranges,
500)
else:
x_pts = np.linspace(self.lower_bound - .1 * ranges,
self.upper_bound + .1 * ranges,
500)
self.x_pts.append(x_pts)
d_res = []
for i in x_pts:
d_res.append(-1*det(np.asarray(i)))
self.det_vals.append(d_res)
self.det_res.append(float(res))
return float(res)
def __check_initial_theta(self):
if self.t1_max <= self.t1_min:
raise ValueError('t1_max cannot be less than t1_min!')
elif self.t2_guess <= 0:
raise ValueError('t2_guess must be positive!')
def next_pt(self):
"""
The sequential design algorithm. When this method is called, the next
suggested stimulus level for testing is printed to the console.
Returns
-------
self
"""
Y = self.Y.copy().astype(bool)
if self.inverted:
Y = np.logical_not(Y)
if self.start:
self.start = False
if self.X.size == 0:
custom_log(self, 'Starting Sequential Algorithm with No Data', True)
if (self.t1_min == None) or (self.t1_max == None) or (self.t2_guess == None):
self.__prompt_input()
self.__check_initial_theta()
self.nx = _round(self, (self.t1_min + self.t1_max) / 2.)
check_bounds(self, self.nx)
custom_log(self, 'Next Point Requested: {}'.format(self.nx))
self.updated = 0
return self.nx
else:
diff = check_diff(self.X, self.Y, self.inverted)
if diff > 0:
if (self.t1_min == None) or (self.t1_max == None) or (self.t2_guess == None):
print("""Even though data has been provided, overlap has not been achieved.
In this case it is necessary to provide parameters for t1_min, t1_max, and t2_guess.
""")
self.__prompt_input()
self.__check_initial_theta()
return self.next_pt()
else:
self.binary = False
self.overlap = False
return self.next_pt()
else:
if self.X.size > self.updated:
self.updated = self.X.size
else:
return self.nx
if self.binary:
self.max_s = np.max(self.X)
self.min_s = np.min(self.X)
custom_log(self, 'In Binary Search Section', True)
custom_log(self, 'Min Stimlus: {}'.format(self.min_s))
custom_log(self, 'Max Stimulus: {}'.format(self.max_s))
# all success case
if Y.size == np.sum(Y):
custom_log(self, 'In All Success Section', True)
t1 = (self.t1_min + self.min_s) / 2.
t2 = self.min_s - 2. * self.t2_guess
t3 = 2. * self.min_s - self.max_s
self.nx = _round(self, min(t1, t2, t2))
check_bounds(self, self.nx)
custom_log(self, 'Next Point Requested: {}'.format(self.nx))
return self.nx
# all failure case
if np.sum(Y) == 0:
custom_log(self, 'In All Failure Section', True)
t1 = (self.t1_max + self.max_s) / 2.
t2 = self.max_s + 2. * self.t2_guess
t3 = 2. * self.max_s - self.min_s
self.nx = _round(self, max(t1, t2, t3))
check_bounds(self, self.nx)
custom_log(self, 'Next Point Requested: {}'.format(self.nx))
return self.nx
self.min_go = np.min(self.X[Y])
self.max_no = np.max(self.X[np.logical_not(Y)])
self.diff = round(self.min_go - self.max_no, self.precision)
custom_log(self, 'Min Go: {}'.format(self.min_go))
custom_log(self, 'Max No-Go: {}'.format(self.max_no))
custom_log(self, 'Difference: {}'.format(self.diff))
custom_log(self, 'Theta 2 guess: {}'.format(self.t2_guess))
if self.diff > self.t2_guess:
self.nx = _round(self, (self.max_no + self.min_go) / 2.)
check_bounds(self, self.nx)
custom_log(self, 'Next Point Requested: {}'.format(self.nx))
return self.nx
else:
self.binary = False
if self.overlap:
custom_log(self, 'In Overlap Search Section', True)
self.min_go = np.min(self.X[Y])
self.max_no = np.max(self.X[np.logical_not(Y)])
self.diff = round(self.min_go - self.max_no, self.precision)
custom_log(self, 'Min Go: {}'.format(self.min_go))
custom_log(self, 'Max No-Go: {}'.format(self.max_no))
custom_log(self, 'Difference: {}'.format(self.diff))
custom_log(self, 'Theta 2 guess: {}'.format(self.t2_guess))
if self.diff > self.t2_guess:
custom_log(self, 'Reverting Back to Binary Search', True)
self.binary = True
self.updated = -1
return self.next_pt()
if self.diff < 0:
custom_log(self, '--- Overlap Achieved! ---', True)
self.overlap = False
else:
self.theta[0] = (self.max_no + self.min_go) / 2.
self.theta[1] = self.t2_guess
custom_log(self, 'Maximize Determinate With...')
t1n, t2n = self.est_names()
custom_log(self, '{}: {}'.format(t1n, self.theta[0]))
custom_log(self, '{}: {}'.format(t2n, self.theta[1]))
self.nx = _round(self, self.__max_info(self.theta))
self.t2_guess *= 0.8
check_bounds(self, self.nx)
custom_log(self, 'Next Point Requested: {}'.format(self.nx))
return self.nx
if self.mle:
custom_log(self, 'In Maximum Liklihood Section', True)
self.max_s = max(self.X)
self.min_s = min(self.X)
custom_log(self, 'Min Stimlus: {}'.format(self.min_s))
custom_log(self, 'Max Stimulus: {}'.format(self.max_s))
self.fit(self.X, self.Y)
t1n, t2n = self.est_names()
custom_log(self, 'Estimated {}: {}'.format(t1n, self.theta[0]))
custom_log(self, 'Estimated {}: {}'.format(t2n, self.theta[1]))
self.theta[0] = max(self.min_s, min(self.theta[0], self.max_s))
self.theta[1] = min(self.theta[1], self.max_s - self.min_s)
custom_log(self, 'Bounded Estimated {}: {}'.format(t1n, self.theta[0]))
custom_log(self, 'Bounded Estimated {}: {}'.format(t2n, self.theta[1]))
self.nx = _round(self, self.__max_info(self.theta))
check_bounds(self, self.nx)
custom_log(self, 'Next Point Requested: {}'.format(self.nx))
return self.nx
def post_test_outcome(self, res, pt):
"""
Append a stimulus level and result to the existing data.
Parameters
----------
res : int or boolean
The observed result at the tested stimulus level. Either 0, 1 or
False, True.
pt : float
The stimulus level at which the test was performed.
Returns
-------
None.
"""
if isinstance(res, bool) or (res == 0) or (res == 1):
self.X = np.vstack((self.X, pt))
custom_log(self, 'Tested Points: \n {}'.format(self.X.flatten()))
self.Y = np.vstack((self.Y, int(res)))
custom_log(self, 'Test Results: \n {}'.format(self.Y.flatten()))
else:
raise ValueError('Result must be \{0, 1\} or \{True, False\}!')
def loop(self, iterations=1000000):
"""
This method suggests new test levels and accepts user input to calculate
maximum likelihood estimates. That is, this method constitutes a loop.
Loop will continue indefinitely until 'end' is received as user input
during the either the test level or result input queries. Alternatively,
if a set number of specimens is to be used then the number of loops can
be specified with the 'iterations' keyword argument.
Parameters
----------
iterations : int, optional
End the loop automatically after n iterations. The default is 1000000.
Returns
-------
None.
"""
print('-'*50)
print("""If the level at which the test is performed is the same as the
suggested level, then the user can simply press enter (no need for input)
when queried about the test level.""")
print('\n')
print("""When the user does not wish to test any more levels,
input "end" (without quotes) when queried abou the next test.""")
print('-'*50)
print('\n')
for _ in range(iterations):
nx = self.next_pt()
print('Specimen number: {}'.format(self.X.size + 1))
print('The next suggested test point is: {}'.format(nx))
pt = input('Please input the level at which the test was performed: ')
pt = "".join(pt.split()).lower()
if pt == 'end':
break
elif pt == '':
pt = nx
else:
try:
pt = float(pt)
except:
print("Input level '{}' not understood. Try again. Type 'end' to terminate loop.".format(pt))
continue
res = input('Please input the result: ')
res = "".join(res.split()).lower()
print('\n')
if res == 'true' or res == '1':
self.post_test_outcome(1, pt)
elif res == 'false' or res == '0':
self.post_test_outcome(0, pt)
elif res == '':
pass
elif res == 'end':
break
else:
print("Result value '{}' not understood. Input must be 0 or False for a negative response and 1 or True for a positive response. Boolean inputs are not case sensitive. Try again. Type 'end' during input query to terminate loop.".format(res))
| StarcoderdataPython |
1657729 | <gh_stars>0
# -*- coding: utf-8 -*-
# Copyright 2014, Digital Reasoning
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
from rest_framework import serializers
from . import models
logger = logging.getLogger(__name__)
class BlueprintPropertiesSerializer(serializers.Serializer):
def to_native(self, obj):
if obj is not None:
return obj.properties
return {}
class BlueprintAccessRuleSerializer(serializers.ModelSerializer):
class Meta:
model = models.BlueprintAccessRule
fields = (
'protocol',
'from_port',
'to_port',
'rule',
)
class BlueprintVolumeSerializer(serializers.ModelSerializer):
class Meta:
model = models.BlueprintVolume
fields = (
'device',
'mount_point',
'snapshot',
)
class BlueprintHostFormulaComponentSerializer(
serializers.HyperlinkedModelSerializer):
title = serializers.Field(source='component.title')
description = serializers.Field(source='component.description')
formula = serializers.Field(source='component.formula')
component_id = serializers.Field(source='component.id')
sls_path = serializers.Field(source='component.sls_path')
class Meta:
model = models.BlueprintHostFormulaComponent
fields = (
'component_id',
'title',
'description',
'formula',
'sls_path',
'order',
)
class BlueprintHostDefinitionSerializer(
serializers.HyperlinkedModelSerializer):
formula_components = BlueprintHostFormulaComponentSerializer(many=True)
access_rules = BlueprintAccessRuleSerializer(many=True, required=False)
volumes = BlueprintVolumeSerializer(many=True)
class Meta:
model = models.BlueprintHostDefinition
fields = (
'id',
'title',
'description',
'cloud_profile',
'count',
'hostname_template',
'size',
'zone',
'subnet_id',
'formula_components',
'access_rules',
'volumes',
'spot_price',
)
class BlueprintSerializer(serializers.HyperlinkedModelSerializer):
owner = serializers.Field()
properties = serializers.HyperlinkedIdentityField(
view_name='blueprint-properties')
host_definitions = BlueprintHostDefinitionSerializer(many=True,
required=False)
class Meta:
model = models.Blueprint
fields = (
'id',
'title',
'description',
'owner',
'public',
'url',
'properties',
'host_definitions',
)
| StarcoderdataPython |
3310313 | <filename>stable_baselines3/common/maskable/callbacks.py
import os
import numpy as np
from stable_baselines3.common.callbacks import EvalCallback
from stable_baselines3.common.vec_env import sync_envs_normalization
from stable_baselines3.common.maskable.evaluation import evaluate_policy
class MaskableEvalCallback(EvalCallback):
"""
Callback for evaluating an agent. Supports invalid action masking.
:param eval_env: The environment used for initialization
:param callback_on_new_best: Callback to trigger
when there is a new best model according to the ``mean_reward``
:param n_eval_episodes: The number of episodes to test the agent
:param eval_freq: Evaluate the agent every eval_freq call of the callback.
:param log_path: Path to a folder where the evaluations (``evaluations.npz``)
will be saved. It will be updated at each evaluation.
:param best_model_save_path: Path to a folder where the best model
according to performance on the eval env will be saved.
:param deterministic: Whether the evaluation should
use a stochastic or deterministic actions.
:param render: Whether to render or not the environment during evaluation
:param verbose:
:param warn: Passed to ``evaluate_policy`` (warns if ``eval_env`` has not been
wrapped with a Monitor wrapper)
:param use_masking: Whether or not to use invalid action masks during evaluation
"""
def __init__(self, *args, use_masking: bool = True, **kwargs):
super().__init__(*args, **kwargs)
self.use_masking = use_masking
def _on_step(self) -> bool:
if self.eval_freq > 0 and self.n_calls % self.eval_freq == 0:
# Sync training and eval env if there is VecNormalize
sync_envs_normalization(self.training_env, self.eval_env)
# Reset success rate buffer
self._is_success_buffer = []
# Note that evaluate_policy() has been patched to support masking
episode_rewards, episode_lengths = evaluate_policy(
self.model,
self.eval_env,
n_eval_episodes=self.n_eval_episodes,
render=self.render,
deterministic=self.deterministic,
return_episode_rewards=True,
warn=self.warn,
callback=self._log_success_callback,
use_masking=self.use_masking,
)
if self.log_path is not None:
self.evaluations_timesteps.append(self.num_timesteps)
self.evaluations_results.append(episode_rewards)
self.evaluations_length.append(episode_lengths)
kwargs = {}
# Save success log if present
if len(self._is_success_buffer) > 0:
self.evaluations_successes.append(self._is_success_buffer)
kwargs = dict(successes=self.evaluations_successes)
np.savez(
self.log_path,
timesteps=self.evaluations_timesteps,
results=self.evaluations_results,
ep_lengths=self.evaluations_length,
**kwargs,
)
mean_reward, std_reward = np.mean(episode_rewards), np.std(episode_rewards)
mean_ep_length, std_ep_length = np.mean(episode_lengths), np.std(episode_lengths)
self.last_mean_reward = mean_reward
if self.verbose > 0:
print(f"Eval num_timesteps={self.num_timesteps}, " f"episode_reward={mean_reward:.2f} +/- {std_reward:.2f}")
print(f"Episode length: {mean_ep_length:.2f} +/- {std_ep_length:.2f}")
# Add to current Logger
self.logger.record("eval/mean_reward", float(mean_reward))
self.logger.record("eval/mean_ep_length", mean_ep_length)
if len(self._is_success_buffer) > 0:
success_rate = np.mean(self._is_success_buffer)
if self.verbose > 0:
print(f"Success rate: {100 * success_rate:.2f}%")
self.logger.record("eval/success_rate", success_rate)
# Dump log so the evaluation results are printed with the correct timestep
self.logger.record("time/total timesteps", self.num_timesteps, exclude="tensorboard")
self.logger.dump(self.num_timesteps)
if mean_reward > self.best_mean_reward:
if self.verbose > 0:
print("New best mean reward!")
if self.best_model_save_path is not None:
self.model.save(os.path.join(self.best_model_save_path, "best_model"))
self.best_mean_reward = mean_reward
# Trigger callback if needed
if self.callback is not None:
return self._on_event()
return True
| StarcoderdataPython |
137632 | # -*- test-case-name: vumi.transports.smpp.tests.test_smpp -*-
from datetime import datetime
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks, returnValue
from vumi import log
from vumi.utils import get_operator_number
from vumi.transports.base import Transport
from vumi.transports.smpp.clientserver.client import (
EsmeTransceiverFactory, EsmeTransmitterFactory, EsmeReceiverFactory,
EsmeCallbacks)
from vumi.transports.smpp.clientserver.config import ClientConfig
from vumi.transports.failures import FailureMessage
from vumi.message import Message, TransportUserMessage
from vumi.persist.txredis_manager import TxRedisManager
class SmppTransport(Transport):
"""
An SMPP transport.
The SMPP transport has many configuration parameters. These are
divided up into sections below.
SMPP server account configuration options:
:type system_id: str
:param system_id:
User id used to connect to the SMPP server.
:type password: str
:param password:
Password for the system id.
:type system_type: str, optional
:param system_type:
Additional system metadata that is passed through to the SMPP server
on connect.
:type host: str
:param host:
Hostname of the SMPP server.
:type port: int
:param port:
Port the SMPP server is listening on.
:type initial_reconnect_delay: int, optional
:param initial_reconnect_delay:
Number of seconds to delay before reconnecting to the server after
being disconnected. Default is 5s. Some WASPs, e.g. Clickatell,
require a 30s delay before reconnecting. In these cases a 45s
initial_reconnect_delay is recommended.
:type split_bind_prefix: str, optional
:param split_bind_prefix:
This is the Redis prefix to use for storing things like sequence
numbers and message ids for delivery report handling.
It defaults to `<system_id>@<host>:<port>`.
*ONLY* if the connection is split into two separate binds for RX and TX
then make sure this is the same value for both binds.
This _only_ needs to be done for TX & RX since messages sent via the TX
bind are handled by the RX bind and they need to share the same prefix
for the lookup for message ids in delivery reports to work.
:type throttle_delay: float, optional
:param throttle_delay:
Delay (in seconds) before retrying a message after receiving
`ESME_RTHROTTLED`. Default 0.1
SMPP protocol configuration options:
:type interface_version: str, optional
:param interface_version:
SMPP protocol version. Default is '34' (i.e. version 3.4).
:type dest_addr_ton:
:param dest_addr_ton:
Destination TON (type of number). Default .
:type dest_addr_npi:
:param dest_addr_npi:
Destination NPI (number plan identifier). Default 1 (ISDN/E.164/E.163).
:type source_addr_ton:
:param source_addr_ton:
Source TON (type of number). Default is 0 (Unknown)
:type source_addr_npi:
:param source_addr_npi:
Source NPI (number plan identifier). Default is 0 (Unknown)
:type registered_delivery:
:param registered_delivery:
Whether to ask for delivery reports. Default 1 (request delivery
reports).
:param dict data_coding_overrides:
Overrides for data_coding character set mapping. This is useful for
setting the default encoding (0), adding additional undefined encodings
(such as 4 or 8) or overriding encodings in cases where the SMSC is
violating the spec (which happens a lot). Keys should be integers,
values should be strings containing valid Python character encoding
names.
:param bool send_long_messages:
If `True`, messages longer than 254 characters will be sent in the
`message_payload` optional field instead of the `short_message` field.
Default is `False`, simply because that maintains previous behaviour.
The list of SMPP protocol configuration options given above is not
exhaustive. Any other options specified are passed through to the
python-smpp library PDU (protocol data unit) builder.
Cellphone number routing options:
:type COUNTRY_CODE: str, optional
:param COUNTRY_CODE:
Used to translate a leading zero in a destination MSISDN into a
country code. Default '',
:type OPERATOR_PREFIX: str, optional
:param OPERATOR_PREFIX:
Nested dictionary of prefix to network name mappings. Default {} (set
network to 'UNKNOWN'). E.g. { '27': { '27761': 'NETWORK1' }}.
:type OPERATOR_NUMBER:
:param OPERATOR_NUMBER:
Dictionary of source MSISDN to use for each network listed in
OPERATOR_PREFIX. If a network is not listed, the source MSISDN
specified by the message sender is used. Default {} (always used the
from address specified by the message sender). E.g. { 'NETWORK1':
'27761234567'}.
"""
# We only want to start this after we finish connecting to SMPP.
start_message_consumer = False
callLater = reactor.callLater
def validate_config(self):
self.client_config = ClientConfig.from_config(self.config)
self.throttle_delay = float(self.config.get('throttle_delay', 0.1))
@inlineCallbacks
def setup_transport(self):
log.msg("Starting the SmppTransport with %s" % self.config)
self.third_party_id_expiry = self.config.get(
"third_party_id_expiry",
60 * 60 * 24 * 7 # 1 week
)
r_config = self.config.get('redis_manager', {})
default_prefix = "%s@%s:%s" % (
self.client_config.system_id,
self.client_config.host,
self.client_config.port,
)
r_prefix = self.config.get('split_bind_prefix', default_prefix)
redis = yield TxRedisManager.from_config(r_config)
self.redis = redis.sub_manager(r_prefix)
self.r_message_prefix = "message_json"
self.throttled = False
self.esme_callbacks = EsmeCallbacks(
connect=self.esme_connected,
disconnect=self.esme_disconnected,
submit_sm_resp=self.submit_sm_resp,
delivery_report=self.delivery_report,
deliver_sm=self.deliver_sm)
if not hasattr(self, 'esme_client'):
# start the Smpp transport (if we don't have one)
self.factory = self.make_factory()
reactor.connectTCP(
self.client_config.host,
self.client_config.port,
self.factory)
@inlineCallbacks
def teardown_transport(self):
if hasattr(self, 'factory'):
self.factory.stopTrying()
self.factory.esme.transport.loseConnection()
yield self.redis._close()
def make_factory(self):
return EsmeTransceiverFactory(
self.client_config, self.redis, self.esme_callbacks)
def esme_connected(self, client):
log.msg("ESME Connected, adding handlers")
self.esme_client = client
# Start the consumer
self.unpause_connectors()
@inlineCallbacks
def handle_outbound_message(self, message):
log.debug("Consumed outgoing message %r" % (message,))
log.debug("Unacknowledged message count: %s" % (
(yield self.esme_client.get_unacked_count()),))
yield self.r_set_message(message)
yield self._submit_outbound_message(message)
@inlineCallbacks
def _submit_outbound_message(self, message):
sequence_number = yield self.send_smpp(message)
yield self.r_set_id_for_sequence(
sequence_number, message.payload.get("message_id"))
def esme_disconnected(self):
log.msg("ESME Disconnected")
self.pause_connectors()
# Redis message storing methods
def r_message_key(self, message_id):
return "%s#%s" % (self.r_message_prefix, message_id)
def r_set_message(self, message):
message_id = message.payload['message_id']
return self.redis.set(
self.r_message_key(message_id), message.to_json())
def r_get_message_json(self, message_id):
return self.redis.get(self.r_message_key(message_id))
@inlineCallbacks
def r_get_message(self, message_id):
json_string = yield self.r_get_message_json(message_id)
if json_string:
returnValue(Message.from_json(json_string))
else:
returnValue(None)
def r_delete_message(self, message_id):
return self.redis.delete(self.r_message_key(message_id))
# Redis sequence number storing methods
def r_get_id_for_sequence(self, sequence_number):
return self.redis.get(str(sequence_number))
def r_delete_for_sequence(self, sequence_number):
return self.redis.delete(str(sequence_number))
def r_set_id_for_sequence(self, sequence_number, id):
return self.redis.set(str(sequence_number), id)
# Redis 3rd party id to vumi id mapping
def r_third_party_id_key(self, third_party_id):
return "3rd_party_id#%s" % (third_party_id,)
def r_get_id_for_third_party_id(self, third_party_id):
return self.redis.get(self.r_third_party_id_key(third_party_id))
def r_delete_for_third_party_id(self, third_party_id):
return self.redis.delete(
self.r_third_party_id_key(third_party_id))
@inlineCallbacks
def r_set_id_for_third_party_id(self, third_party_id, id):
rkey = self.r_third_party_id_key(third_party_id)
yield self.redis.set(rkey, id)
yield self.redis.expire(rkey, self.third_party_id_expiry)
def _start_throttling(self):
if self.throttled:
return
log.err("Throttling outbound messages.")
self.throttled = True
self.pause_connectors()
def _stop_throttling(self):
if not self.throttled:
return
log.err("No longer throttling outbound messages.")
self.throttled = False
self.unpause_connectors()
@inlineCallbacks
def submit_sm_resp(self, *args, **kwargs):
transport_msg_id = kwargs['message_id']
sent_sms_id = (
yield self.r_get_id_for_sequence(kwargs['sequence_number']))
if sent_sms_id is None:
log.err("Sequence number lookup failed for:%s" % (
kwargs['sequence_number'],))
else:
yield self.r_set_id_for_third_party_id(
transport_msg_id, sent_sms_id)
yield self.r_delete_for_sequence(kwargs['sequence_number'])
status = kwargs['command_status']
if status == 'ESME_ROK':
# The sms was submitted ok
yield self.submit_sm_success(sent_sms_id, transport_msg_id)
yield self._stop_throttling()
elif status == 'ESME_RTHROTTLED':
yield self._start_throttling()
yield self.submit_sm_throttled(sent_sms_id)
else:
# We have an error
yield self.submit_sm_failure(sent_sms_id,
status or 'Unspecified')
yield self._stop_throttling()
@inlineCallbacks
def submit_sm_success(self, sent_sms_id, transport_msg_id):
yield self.r_delete_message(sent_sms_id)
log.debug("Mapping transport_msg_id=%s to sent_sms_id=%s" % (
transport_msg_id, sent_sms_id))
log.debug("PUBLISHING ACK: (%s -> %s)" % (
sent_sms_id, transport_msg_id))
self.publish_ack(
user_message_id=sent_sms_id,
sent_message_id=transport_msg_id)
@inlineCallbacks
def submit_sm_failure(self, sent_sms_id, reason, failure_code=None):
error_message = yield self.r_get_message(sent_sms_id)
if error_message is None:
log.err("Could not retrieve failed message:%s" % (
sent_sms_id))
else:
yield self.r_delete_message(sent_sms_id)
yield self.publish_nack(sent_sms_id, reason)
yield self.failure_publisher.publish_message(FailureMessage(
message=error_message.payload,
failure_code=None,
reason=reason))
@inlineCallbacks
def submit_sm_throttled(self, sent_sms_id):
message = yield self.r_get_message(sent_sms_id)
if message is None:
log.err("Could not retrieve throttled message:%s" % (
sent_sms_id))
else:
self.callLater(self.throttle_delay,
self._submit_outbound_message, message)
def delivery_status(self, state):
if state in [
"DELIVRD",
"0" # Currently we will accept this for Yo! TODO: investigate
]:
return "delivered"
if state in [
"REJECTD"
]:
return "failed"
return "pending"
@inlineCallbacks
def delivery_report(self, *args, **kwargs):
transport_metadata = {
"message": kwargs['delivery_report'],
"date": datetime.strptime(
kwargs['delivery_report']['done_date'], "%y%m%d%H%M%S")
}
delivery_status = self.delivery_status(
kwargs['delivery_report']['stat'])
message_id = yield self.r_get_id_for_third_party_id(
kwargs['delivery_report']['id'])
if message_id is None:
log.warning("Failed to retrieve message id for delivery report."
" Delivery report from %s discarded."
% self.transport_name)
return
log.msg("PUBLISHING DELIV REPORT: %s %s" % (message_id,
delivery_status))
returnValue((yield self.publish_delivery_report(
user_message_id=message_id,
delivery_status=delivery_status,
transport_metadata=transport_metadata)))
def deliver_sm(self, *args, **kwargs):
message_type = kwargs.get('message_type', 'sms')
message = {
'message_id': kwargs['message_id'],
'to_addr': kwargs['destination_addr'],
'from_addr': kwargs['source_addr'],
'content': kwargs['short_message'],
'transport_type': message_type,
'transport_metadata': {},
}
if message_type == 'ussd':
session_event = {
'new': TransportUserMessage.SESSION_NEW,
'continue': TransportUserMessage.SESSION_RESUME,
'close': TransportUserMessage.SESSION_CLOSE,
}[kwargs['session_event']]
message['session_event'] = session_event
session_info = kwargs.get('session_info')
message['transport_metadata']['session_info'] = session_info
log.msg("PUBLISHING INBOUND: %s" % (message,))
# TODO: This logs messages that fail to serialize to JSON
# Usually this happens when an SMPP message has content
# we can't decode (e.g. data_coding == 4). We should
# remove the try-except once we handle such messages
# better.
return self.publish_message(**message).addErrback(log.err)
def send_smpp(self, message):
log.debug("Sending SMPP message: %s" % (message))
# first do a lookup in our YAML to see if we've got a source_addr
# defined for the given MT number, if not, trust the from_addr
# in the message
to_addr = message['to_addr']
from_addr = message['from_addr']
text = message['content']
continue_session = (
message['session_event'] != TransportUserMessage.SESSION_CLOSE)
route = get_operator_number(to_addr,
self.config.get('COUNTRY_CODE', ''),
self.config.get('OPERATOR_PREFIX', {}),
self.config.get('OPERATOR_NUMBER', {})) or from_addr
return self.esme_client.submit_sm(
short_message=text.encode('utf-8'),
destination_addr=str(to_addr),
source_addr=route,
message_type=message['transport_type'],
continue_session=continue_session,
session_info=message['transport_metadata'].get('session_info'),
)
def stopWorker(self):
log.msg("Stopping the SMPPTransport")
return super(SmppTransport, self).stopWorker()
def send_failure(self, message, exception, reason):
"""Send a failure report."""
log.msg("Failed to send: %s reason: %s" % (message, reason))
return super(SmppTransport, self).send_failure(message,
exception, reason)
class SmppTxTransport(SmppTransport):
def make_factory(self):
return EsmeTransmitterFactory(
self.client_config, self.redis, self.esme_callbacks)
class SmppRxTransport(SmppTransport):
def make_factory(self):
return EsmeReceiverFactory(
self.client_config, self.redis, self.esme_callbacks)
| StarcoderdataPython |
3282856 | <filename>secret.py
# those are imported from secrets.py
clientId = '<KEY>'
clientSecret = 'ba32982d56ad4398834210941df54ccc'
| StarcoderdataPython |
3308665 | <filename>Projects/2/Classes/iotJumpWay.py
############################################################################################
#
# Project: Peter Moss COVID-19 AI Research Project
# Repository: AI-Classification
# Repo Project: COVID-19 Tensorflow DenseNet Classifier
#
# Author: <NAME> (<EMAIL>)
# Contributors:
# Title: iotJumpWay Class
# Description: iotJumpWay functions for the COVID-19 Tensorflow DenseNet Classifier.
# License: MIT License
# Last Modified: 2020-06-10
#
############################################################################################
import inspect
import json
import os
import paho.mqtt.client as mqtt
from Classes.Helpers import Helpers
class Device():
""" iotJumpWay Class
iotJumpWay functions for the COVID-19 xDNN Python Classifier.
"""
def __init__(self, configs):
""" Initializes the class. """
self.Helpers = Helpers("iotJumpWay")
self.confs = configs
self.Helpers.logger.info("Initiating Local iotJumpWay Device.")
if self.confs['host'] == None:
raise ConfigurationException("** Host (host) property is required")
elif self.confs['port'] == None:
raise ConfigurationException("** Port (port) property is required")
elif self.confs['lid'] == None:
raise ConfigurationException(
"** Location ID (lid) property is required")
elif self.confs['zid'] == None:
raise ConfigurationException(
"** Zone ID (zid) property is required")
elif self.Helpers.confs["iotJumpWay"]["an"] == None:
elif self.confs['aid'] == None:
raise ConfigurationException(
"** Application ID (aid) property is required")
elif self.Helpers.confs["iotJumpWay"]["an"] == None:
raise ConfigurationException(
"** Application Name (an) property is required")
elif self.confs['un'] == None:
raise ConfigurationException(
"** MQTT UserName (un) property is required")
elif self.confs['pw'] == None:
raise ConfigurationException(
"** MQTT Password (pw) property is required")
self.mqttClient = None
self.mqttTLS = "/etc/ssl/certs/DST_Root_CA_X3.pem"
self.appStatusCallback = None
self.deviceStatusCallback = None
self.deviceSensorCallback = None
self.deviceCommandsCallback = None
self.deviceNotificationsCallback = None
self.deviceNotificationsCallback = None
def __init__(self, configs):
print("-- Initiating JumpWayMQTT Device")
self._configs = configs
self.mqttClient = None
self.mqttTLS = os.path.dirname(
os.path.abspath(__file__)) + "/ca.pem"
self.mqttHost = 'iot.techbubbletechnologies.com'
self.mqttPort = 8883
self.deviceStatusCallback = None
self.deviceCommandsCallback = None
self.deviceKeysCallback = None
self.deviceSSLsCallback = None
if self._configs['locationID'] == None:
raise ConfigurationException(
"** Location ID (locationID) property is required")
elif self._configs['zoneID'] == None:
raise ConfigurationException(
"** Application Name (zoneID) property is required")
elif self._configs['deviceId'] == None:
raise ConfigurationException(
"** Device Name (deviceId) property is required")
elif self._configs['deviceName'] == None:
raise ConfigurationException(
"** Device Name (deviceName) property is required")
elif self._configs['username'] == None:
raise ConfigurationException(
"** MQTT UserName (username) property is required")
elif self._configs['password'] == None:
raise ConfigurationException(
"** MQTT Password (password) property is required")
print("-- JumpWayMQTT Device Initiated")
def connectToDevice(self):
print("-- JumpWayMQTT Device Connection Initiating")
deviceStatusTopic = '%s/Devices/%s/%s/Status' % (
self._configs['locationID'], self._configs['zoneID'], self._configs['deviceId'])
self.mqttClient = mqtt.Client(
client_id=self._configs['deviceName'], clean_session=False)
self.mqttClient.will_set(deviceStatusTopic, "OFFLINE", 0, False)
self.mqttClient.tls_set(self.mqttTLS, certfile=None, keyfile=None)
self.mqttClient.on_connect = self.on_connect
self.mqttClient.on_message = self.on_message
self.mqttClient.on_publish = self.on_publish
self.mqttClient.on_subscribe = self.on_subscribe
self.mqttClient.username_pw_set(
str(self._configs['username']), str(self._configs['password']))
self.mqttClient.connect(self.mqttHost, self.mqttPort, 10)
self.mqttClient.loop_start()
print("-- JumpWayMQTT Device Connection Initiated")
def on_connect(self, client, obj, flags, rc):
print("-- JumpWayMQTT Device Connected")
print("rc: "+str(rc))
self.publishToDeviceStatus("ONLINE")
def on_subscribe(self, client, obj, mid, granted_qos):
print("JumpWayMQTT Subscription: " +
str(self._configs['deviceName']))
def on_message(self, client, obj, msg):
print("JumpWayMQTT Message Received")
splitTopic = msg.topic.split("/")
if splitTopic[4] == 'Commands':
if self.deviceCommandsCallback == None:
print(
"** Device Commands Callback Required (deviceCommandsCallback)")
else:
self.deviceCommandsCallback(msg.topic, msg.payload)
elif splitTopic[4] == 'Keys':
if self.deviceKeysCallback == None:
print("** Device Keys Callback Required (deviceKeysCallback)")
else:
self.deviceKeysCallback(msg.topic, msg.payload)
elif splitTopic[4] == 'SSLs':
if self.deviceSSLsCallback == None:
print("** Device SSLs Callback Required (deviceSSLsCallback)")
else:
self.deviceSSLsCallback(msg.topic, msg.payload)
def subscribeToDeviceChannel(self, channelID, qos=0):
print("-- Subscribing JumpWayMQTT To Device Topic")
if self._configs['locationID'] == None:
print("** Device Location ID Required (locationID)")
return False
elif self._configs['zoneID'] == None:
print("** Device Zone ID Required (zoneID)")
return False
elif self._configs['deviceId'] == None:
print("** Device ID Required (deviceId)")
return False
elif channelID == None:
print("** Device Channel ID Required (channelID)")
return False
else:
deviceChannel = '%s/Devices/%s/%s/%s' % (
self._configs['locationID'], self._configs['zoneID'], self._configs['deviceId'], channelID)
self.mqttClient.subscribe(deviceChannel, qos=qos)
print("-- Subscribed to Device " +
self._configs['deviceId']+" Channel "+channelID)
return True
def publishToDeviceStatus(self, data):
if self._configs['locationID'] == None:
print("** Device Location ID Required (locationID)")
return False
elif self._configs['zoneID'] == None:
print("** Device Zone ID Required (zoneID)")
return False
elif self._configs['deviceId'] == None:
print("** Device ID Required (deviceId)")
return False
else:
deviceStatusTopic = '%s/Devices/%s/%s/Status' % (
self._configs['locationID'], self._configs['zoneID'], self._configs['deviceId'])
self.mqttClient.publish(deviceStatusTopic, data)
print("-- Published to Device Status ")
def publishToDeviceChannel(self, channelID, data):
if self._configs['locationID'] == None:
print("** Device Location ID Required (locationID)")
return False
elif self._configs['zoneID'] == None:
print("** Device Zone ID Required (zoneID)")
return False
elif self._configs['deviceId'] == None:
print("** Device ID Required (deviceId)")
return False
elif channelID == None:
print("** Device Channel ID Required (channelID)")
return False
else:
deviceChannel = '%s/Devices/%s/%s/%s' % (
self._configs['locationID'], self._configs['zoneID'], self._configs['deviceId'], channelID)
self.mqttClient.publish(deviceChannel, json.dumps(data))
print("-- Published to Device "+channelID+" Channel")
def on_publish(self, client, obj, mid):
print("-- Published: "+str(mid))
def on_log(self, client, obj, level, string):
print(string)
def disconnectFromDevice(self):
self.publishToDeviceStatus("OFFLINE")
self.mqttClient.disconnect()
self.mqttClient.loop_stop()
| StarcoderdataPython |
70338 | <filename>agents/archivist/archivist.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Zoe archivist
# https://github.com/rmed/zoe-archivist
#
# Copyright (c) 2015 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
sys.path.append('./lib')
import gettext
import threading
import zoe
from infocards.archive import Archive
from os import environ as env
from os.path import join as path
from zoe.deco import Agent, Message
from zoe.models.users import Users
gettext.install("archivist")
with open(path(env["ZOE_HOME"], "etc", "archivist.conf"), "r") as f:
DB_PATH = f.readline().strip()
LOCALEDIR = path(env["ZOE_HOME"], "locale")
ZOE_LOCALE = env["ZOE_LOCALE"] or "en"
LOCK = threading.Lock()
@Agent(name="archivist")
class Archivist:
@Message(tags=["add-section"])
def add_card_to_section(self, parser):
""" Adds a card to the given section.
cid* - card id
sname* - card title
sender - sender of the message
src - channel by which the message was delivered
"""
cid, sname, sender, src = self.multiparse(
parser, ['cid', 'sname', 'sender', 'src'])
self.set_locale(sender)
if not self.has_permissions(sender):
self.logger.info("%s cannot modify section relations" % sender)
return self.feedback(_("You don't have permissions to do that"),
sender, src)
with LOCK:
try:
ar = self.connect()
result = ar.add_card_to_section(cid=int(cid), sname=sname)
except Exception as e:
return self.feedback("Error: " + str(e), sender, src)
if result:
return self.feedback(
_("Added card to section '%s'") % sname, sender, src)
return self.feedback(
_("Failed to add card to section '%s'") % sname, sender, src)
@Message(tags=["card-list"])
def card_list(self, parser):
""" List all the cards in the archive.
sender* - sender of the message
src* - channel by which the message was delivered
"""
sender, src = self.multiparse(parser, ['sender', 'src'])
self.set_locale(sender)
msg = ""
with LOCK:
try:
ar = self.connect()
cards = ar.cards()
for card in cards:
msg += "- [%d] %s: %s\n" % (
card.id, card.title, card.desc)
except Exception as e:
return self.feedback("Error: " + str(e), sender, src)
if not msg:
msg = _("No cards found")
return self.feedback(msg, sender, src)
@Message(tags=["card-sections"])
def card_sections(self, parser):
""" Show all the sections a card appears in.
cid* - card id
sender - sender of the message
src - channel by which the message was delivered
"""
cid, sender, src = self.multiparse(
parser, ['cid', 'sender', 'src'])
self.set_locale(sender)
msg = ""
with LOCK:
try:
ar = self.connect()
card = ar.get_card(cid=int(cid))
if not card:
return self.feedback(_("Card %s does not exist") % cid,
sender, src)
sections = card.sections()
for section in sections:
msg += "- %s\n" % section.name
except Exception as e:
return self.feedback("Error: " + str(e), sender, src)
if not msg:
msg = _("No sections found")
return self.feedback(msg, sender, src)
@Message(tags=["delete-card"])
def delete_card(self, parser):
""" Remove a card from the archive.
cid* - card id
sender - sender of the message
src - channel by which the message was delivered
"""
cid, sender, src = self.multiparse(
parser, ['cid', 'sender', 'src'])
self.set_locale(sender)
if not self.has_permissions(sender):
self.logger.info("%s cannot remove cards" % sender)
return self.feedback(_("You don't have permissions to do that"),
sender, src)
with LOCK:
try:
ar = self.connect()
result = ar.delete_card(cid=int(cid))
except Exception as e:
return self.feedback("Error: " + str(e), sender, src)
if result:
return self.feedback(
_("Removed card '%s'") % cid, sender, src)
return self.feedback(_("Failed to remove card '%s'") % cid, sender, src)
@Message(tags=["delete-section"])
def delete_section(self, parser):
""" Remove a section from the archive.
name* - section name
sender - sender of the message
src - channel by which the message was delivered
"""
name, sender, src = self.multiparse(
parser, ['name', 'sender', 'src'])
self.set_locale(sender)
if not self.has_permissions(sender):
self.logger.info("%s cannot remove cards" % sender)
return self.feedback(_("You don't have permissions to do that"),
sender, src)
with LOCK:
try:
ar = self.connect()
result = ar.delete_section(name=name)
except Exception as e:
return self.feedback("Error: " + str(e), sender, src)
if result:
return self.feedback(
_("Removed section '%s'") % name, sender, src)
return self.feedback(_("Failed to remove '%s'") % name, sender, src)
@Message(tags=["get-cards"])
def get_cards(self, parser):
""" Obtain information from a list of cards and send it to the user
through the chosen communication method.
cids* - list of card ids
method* - delivery method
sender - sender of the message
src - channel by which the message was delivered
to - optional recipient of the cards
"""
cids, method, sender, src, to = self.multiparse(
parser, ['cids', 'method', 'sender', 'src', 'to'])
self.set_locale(sender)
with LOCK:
try:
ar = self.connect()
msg = ""
for cid in cids.split(" "):
card = ar.get_card(cid=int(cid))
if card:
msg += "%s\n\n" % self.build_card_msg(card)
continue
msg += _("Card %s not found") % cid
msg += "\n"
except Exception as e:
return self.feedback("Error: " + str(e), sender, src)
if not to:
to = sender
if method == "mail":
return (
self.feedback(_("Sending..."), sender, src),
self.feedback(msg, to, subject="Archivist")
)
return self.feedback(msg, to, src)
@Message(tags=["get-section"])
def get_section(self, parser):
""" Obtain information from the cards contained in a given section.
sname* - section name
method* - delivery method
sender - sender of the message
src - channel by which the message was delivered
to - optional recipient of the cards
"""
sname, method, sender, src, to = self.multiparse(
parser, ['sname', 'method', 'sender', 'src', 'to'])
self.set_locale(sender)
with LOCK:
try:
ar = self.connect()
section = ar.get_section(name=sname)
if not section:
return self.feedback(
_("Section %s does not exist") % sname, sender, src)
cards = section.cards()
msg = ""
for card in cards:
msg += "%s\n\n" % self.build_card_msg(card)
except Exception as e:
return self.feedback("Error: " + str(e), sender, src)
if not to:
to = sender
if method == "mail":
return (
self.feedback(_("Sending..."), sender, src),
self.feedback(msg, to, subject="Archivist")
)
return self.feedback(msg, to, src)
@Message(tags=["modify-card"])
def modify_card(self, parser):
""" Modify an existing card.
cid* - card id
title - unique title of the card
desc - description of the card
content - main content of the card
tags - space separated tags
sender - sender of the message
src - channel by which the message was delivered
"""
cid, title, desc, content, tags, sender, src= self.multiparse(
parser, ['cid', 'title', 'desc', 'content', 'tags',
'sender', 'src'])
self.set_locale(sender)
if not self.has_permissions(sender):
self.logger.info("%s cannot create sections" % sender)
return self.feedback(_("You don't have permissions to do that"),
sender, src)
with LOCK:
try:
ar = self.connect()
# Obtain current information
card = ar.get_card(cid=int(cid))
newcard = ar.modify_card(
cid=int(cid),
title=title or card.title,
desc=desc or card.desc,
content=content.replace('_NL_', '\n'),
tags=tags or card.tags,
author=sender or "UNKNOWN"
)
except Exception as e:
return self.feedback("Error: " + str(e), sender, src)
if newcard:
return self.feedback(_("Modified card '%s'") % cid, sender, src)
return self.feedback(_("Failed to modify card '%s'") % cid,
sender, src)
@Message(tags=["new-card"])
def new_card(self, parser):
""" Add a new card to the archive. Cards are added by sending
an email with a specific format.
Timestamp is obtained automatically.
title* - unique title of the card
desc* - description of the card
content* - main content of the card
tags* - space separated tags
sender - sender of the message
"""
title, desc, content, tags, sender = self.multiparse(
parser, ['title', 'desc', 'content', 'tags', 'sender'])
self.set_locale(sender)
dst = None
subject = None
if sender:
dst = Users().subject(sender).get("preferred", "mail")
if dst == "mail":
subject = "Archivist"
if not self.has_permissions(sender):
self.logger.info("%s cannot add cards" % sender)
return self.feedback(_("You don't have permissions to do that"),
sender, dst, subject=subject)
with LOCK:
try:
ar = self.connect()
newcard = ar.new_card(
title,
desc,
content.replace('_NL_', '\n'),
tags,
sender or "UNKNOWN"
)
except Exception as e:
return self.feedback("Error: " + str(e), sender, dst,
subject=subject)
if newcard:
return self.feedback(
_("Created new card [%d]") % newcard.id, sender, dst,
subject=subject)
return self.feedback(_("Failed to create card"), sender, dst,
subject=subject)
@Message(tags=["new-section"])
def new_section(self, parser):
""" Create a new section in the archive.
name* - unique name for the section
sender - sender of the message
src - channel by which the message was delivered
"""
name, sender, src = self.multiparse(
parser, ['name', 'sender', 'src'])
self.set_locale(sender)
if not self.has_permissions(sender):
self.logger.info("%s cannot create sections" % sender)
return self.feedback(_("You don't have permissions to do that"),
sender, src)
with LOCK:
try:
ar = self.connect()
result = ar.new_section(name)
except Exception as e:
return self.feedback("Error: " + str(e), sender, src)
if result:
return self.feedback(_("Created section '%s'") % name,
sender, src)
return self.feedback(_("Could not create section '%s'") % name,
sender, src)
@Message(tags=["remove-section"])
def remove_card_from_section(self, parser):
""" Remove a card from a given section.
cid* - card id
sname* - section name
sender - sender of the message
src - channel by which the message was delivered
"""
cid, sname, sender, src = self.multiparse(
parser, ['cid', 'sname', 'sender', 'src'])
self.set_locale(sender)
if not self.has_permissions(sender):
self.logger.info("%s cannot modify section relations" % sender)
return self.feedback(_("You don't have permissions to do that"),
sender, src)
with LOCK:
try:
ar = self.connect()
result = ar.remove_card_from_section(cid=int(cid), sname=sname)
except Exception as e:
return self.feedback("Error: " + str(e), sender, src)
if result:
return self.feedback(
_("Removed card"), sender, src)
return self.feedback(
_("Could not remove card"), sender, src)
@Message(tags=["rename-section"])
def rename_section(self, parser):
""" Rename a section of the archive.
name* - original section name
newname* - new section name
sender - sender of the message
src - channel by which the message was delivered
"""
name, newname, sender, src = self.multiparse(
parser, ['name', 'newname', 'sender', 'src'])
self.set_locale(sender)
if not self.has_permissions(sender):
self.logger.info("%s cannot modify section relations" % sender)
return self.feedback(_("You don't have permissions to do that"),
sender, src)
with LOCK:
try:
ar = self.connect()
result = ar.rename_section(newname, oldname=name)
except Exception as e:
return self.feedback("Error: " + str(e), sender, src)
if result:
return self.feedback(
_("Renamed section"), sender, src)
return self.feedback(
_("Could not rename"), sender, src)
@Message(tags=["search"])
def search(self, parser):
""" Traverse a section and find cards relevant to the query.
query* - search query
sender* - sender of the message
section - narrow search results to the specified section
src - channel by which the message was delivered
"""
query, sender, section, src = self.multiparse(
parser, ['query', 'sender', 'section', 'src'])
self.set_locale(sender)
if not query:
return self.feedback(_("No query specified"), sender, src)
result = ""
with LOCK:
try:
ar = self.connect()
cards = ar.search(query, sname=section)
for card in cards:
result += "- [%d] %s: %s\n" % (
card.id, card.title, card.desc)
except Exception as e:
return self.feedback("Error: " + str(e), sender, src)
if not result:
result = _("No cards found")
return self.feedback(result, sender, src)
@Message(tags=["section-list"])
def section_list(self, parser):
""" Show all the sections in the archive.
sender* - sender of the message
src* - channel by which the message was delivered
"""
sender, src = self.multiparse(parser, ['sender', 'src'])
self.set_locale(sender)
with LOCK:
try:
ar = self.connect()
sections = ar.sections()
msg = ""
for section in sections:
msg += "- %s\n" % section.name
except Exception as e:
return self.feedback("Error: " + str(e), sender, src)
if not msg:
msg = _("No sections found")
return self.feedback(msg, sender, src)
@Message(tags=["section-cards"])
def section_cards(self, parser):
""" Show all the cards present in a section.
name* - section name
sender* - sender of the message
src* - channel by which the message was delivered
"""
name, sender, src = self.multiparse(
parser, ['name', 'sender', 'src'])
self.set_locale(sender)
msg = ""
with LOCK:
try:
ar = self.connect()
section = ar.get_section(name=name)
if not section:
return self.feedback(
_("Section %s does not exist") % name, sender, src)
cards = section.cards()
for card in cards:
msg += "- [%d] %s: %s\n" % (
card.id, card.title, card.desc)
except Exception as e:
return self.feedback("Error: " + str(e), sender, src)
if not msg:
msg = _("No cards found")
return self.feedback(msg, sender, src)
def build_card_msg(self, card):
""" Format the card's information for easier reading. """
msg = "\n--------------------\n"
msg += "[%d] %s" % (card.id, card.title)
msg += "\n--------------------\n\n"
msg += "%s\n\n" % card.desc
msg += "Last modified <%s> - %s\n" % (
str(card.modified), card.modified_by)
msg += "Tags: %s\n\n" % card.tags
msg += card.content
return msg
def connect(self):
return Archive(db_type='sqlite', db_name=DB_PATH)
def feedback(self, msg, user, dst=None, subject=None, att=None):
""" Send a message or mail to a given user.
msg - message text or attachment
user - user to send the feedback to
subject - if using mail feedback, subject for the mail
dst - destination of the message: 'jabber' or 'tg'
att - mail attachment
"""
if not user:
return
to_send = {
"dst": "relay",
"to": user
}
if not subject:
to_send["relayto"] = dst
to_send["msg"] = msg
else:
to_send["relayto"] = "mail"
if att:
to_send["att"] = att.str()
to_send["txt"] = msg or ""
to_send["subject"] = subject
return zoe.MessageBuilder(to_send)
def has_permissions(self, user):
""" Check if the user has permissions necessary to interact with the
agent manager (belongs to group 'archivists').
"""
# No user, manual commands from terminal
if not user or user in Users().membersof("archivists"):
return True
return False
def multiparse(self, parser, keys):
""" Obtain several elements from the parser, identified by the
list of keys.
Values are returned in the order specified by the keys list.
"""
result = []
for k in keys:
result.append(parser.get(k))
return result
def set_locale(self, user):
""" Set the locale for messages based on the locale of the sender.
If no locale is povided, Zoe's default locale is used or
English (en) is used by default.
"""
if not user:
locale = ZOE_LOCALE
else:
conf = Users().subject(user)
locale = conf.get("locale", ZOE_LOCALE)
lang = gettext.translation("archivist", localedir=LOCALEDIR,
languages=[locale,])
lang.install()
| StarcoderdataPython |
3328184 | <gh_stars>1-10
from .base import Interface
class Betriebsstellen(Interface):
"""Wrapper for Deutsche Bahn's Betriebsstellen API.
Documentation at:
https://developer.deutschebahn.com/store/apis/info?name=BahnPark&version=v1&provider=DBOpenData
"""
def __init__(self, token=None, key=None, secret=None, config=None):
super(Betriebsstellen, self).__init__(key=key, secret=secret,
token=token, config=config)
self.address += 'betriebsstellen/v1/'
def request(self, endpoint, verb=None, **req_kwargs):
"""Returns Data from Betriebsstellen endpoint as python object.
Querys API using a super() call to Interface.request(), checks the
HTTP status code and returns the response's json data
as a python object.
:param endpoint: str
:param verb: str
:param req_kwargs: kwargs accepted by requests.Request()
:return: Dict or list
"""
req_kwargs['headers'] = {'Authorization': 'Bearer ' + self.token,
'Accept': 'application/json'}
resp = super(Betriebsstellen, self).request(endpoint, verb=verb,
**req_kwargs)
resp.raise_for_status()
return resp.json()
def betriebsstellen(self, station_name, is_abbreviation=False):
"""Returns data on a operation station.
:param station_name:
:param is_abbreviation:
:return:
"""
endpoint = 'betriebsstellen'
if is_abbreviation:
endpoint += '/' + station_name
return self.request(endpoint)
else:
return self.request(endpoint, params={'name': station_name})
| StarcoderdataPython |
78763 | <gh_stars>10-100
import treeano.nodes as tn
from treeano.sandbox.nodes import unbiased_nesterov_momentum as unm
def test_unbiased_nesterov_momentum_node_serialization():
tn.check_serialization(
unm.UnbiasedNesterovMomentumNode("a", tn.IdentityNode("i")))
def test_unbiased_nesterov_momentum_node():
def unbiased_nag(name, children):
return tn.SGDNode(name,
{"cost": children["cost"],
"subtree": unm.UnbiasedNesterovMomentumNode(
name + "_momentum",
children["subtree"])},
learning_rate=0.01)
tn.test_utils.check_updates_node(unbiased_nag)
| StarcoderdataPython |
142260 | <gh_stars>1-10
import argparse
import json
parser = argparse.ArgumentParser()
parser.add_argument("--in-file")
args = parser.parse_args()
for claim in open(args.in_file):
print(json.loads(claim, encoding='utf8')["claim"])
| StarcoderdataPython |
150017 | <reponame>omnivector-solutions/license-manager
from fastapi import APIRouter
from lm_backend.api.booking import router as router_booking
from lm_backend.api.config import router as router_config
from lm_backend.api.license import router as router_license
api_v1 = APIRouter()
api_v1.include_router(router_license, prefix="/license", tags=["License"])
api_v1.include_router(router_booking, prefix="/booking", tags=["Booking"])
api_v1.include_router(router_config, prefix="/config", tags=["Config"])
| StarcoderdataPython |
29950 | #!/usr/bin/env python
from __future__ import unicode_literals
import os
import sys
import tarfile
import shutil
import tempfile
from contextlib import contextmanager
from pymatgen.io.gaussian import GaussianInput, GaussianOutput
from tinydb import TinyDB
@contextmanager
def cd(run_path, cleanup=lambda: True):
"""
Temporarily work in another directory, creating it if necessary.
"""
home = os.getcwd()
os.chdir(os.path.expanduser(run_path))
try:
yield
finally:
os.chdir(home)
cleanup()
@contextmanager
def tempdir():
"""
Temporarily work in temporary directory, deleting it aftewards.
"""
dirpath = tempfile.mkdtemp()
def cleanup():
shutil.rmtree(dirpath)
with cd(dirpath, cleanup):
yield dirpath
def extract_data_from_tar_file(tar_file):
with tarfile.open(tar_file, 'r:gz') as tar:
tar.extractall()
folder = tar_file.replace('.tar.gz', '')
with cd(folder):
tdout = GaussianOutput('td.log')
td_exit = tdout.read_excitation_energies()
td_triplet = [e for e in td_exit if 'triplet' in e[3].lower()][0][0]
td_singlet = [e for e in td_exit if 'singlet' in e[3].lower()][0][0]
tdaout = GaussianOutput('tda.log')
tda_exit = tdaout.read_excitation_energies()
tda_triplet = [e for e in tda_exit if 'triplet' in e[3].lower()][0][0]
tda_singlet = [e for e in tda_exit if 'singlet' in e[3].lower()][0][0]
nicssout = GaussianOutput('nics_singlet.log')
# occasionally some jobs fail here
if not nicssout.properly_terminated:
return False
nicss_mag = nicssout.read_magnetic_shielding()
nicss_six_ring_above = (abs(nicss_mag[-8]['isotropic']) +
abs(nicss_mag[-6]['isotropic']))/2
nicss_six_ring_below = (abs(nicss_mag[-7]['isotropic']) +
abs(nicss_mag[-5]['isotropic']))/2
nicss_five_ring_above = (abs(nicss_mag[-4]['isotropic']) +
abs(nicss_mag[-2]['isotropic']))/2
nicss_five_ring_below = (abs(nicss_mag[-3]['isotropic']) +
abs(nicss_mag[-1]['isotropic']))/2
nicstout = GaussianOutput('nics_triplet.log')
if not nicstout.properly_terminated:
return False
nicst_mag = nicstout.read_magnetic_shielding()
nicst_six_ring_above = (abs(nicst_mag[-8]['isotropic']) +
abs(nicst_mag[-6]['isotropic']))/2
nicst_six_ring_below = (abs(nicst_mag[-7]['isotropic']) +
abs(nicst_mag[-5]['isotropic']))/2
nicst_five_ring_above = (abs(nicst_mag[-4]['isotropic']) +
abs(nicst_mag[-2]['isotropic']))/2
nicst_five_ring_below = (abs(nicst_mag[-3]['isotropic']) +
abs(nicst_mag[-1]['isotropic']))/2
data = {'td_singlet': td_singlet, 'td_triplet': td_triplet,
'tda_singlet': tda_singlet, 'tda_triplet': tda_triplet,
'nicss_six_ring_above': nicss_six_ring_above,
'nicss_six_ring_below': nicss_six_ring_below,
'nicss_five_ring_above': nicss_five_ring_above,
'nicss_five_ring_below': nicss_five_ring_below,
'nicst_six_ring_above': nicst_six_ring_above,
'nicst_six_ring_below': nicst_six_ring_below,
'nicst_five_ring_above': nicst_five_ring_above,
'nicst_five_ring_below': nicst_five_ring_below}
return data
data_to_write = []
db = TinyDB(os.path.join('..', 'data', 'structures.json'))
systems = list(db.all())
done = 0
for i, system in enumerate(systems):
input_file = GaussianInput.from_dict(system['input'])
directory = input_file.title
tar_name = '{}.tar.gz'.format(directory)
tar_file = os.path.abspath(os.path.join('..', 'data', 'calculations', tar_name))
if os.path.isfile(tar_file):
# extract the data in a temp directory to avoid clobbering any data
with tempdir() as tmp_dir:
shutil.copy(tar_file, tmp_dir)
data = extract_data_from_tar_file(tar_name)
if not data:
print('{} did not finish correctly, skipping'.format(directory))
continue
data.update({'x_sub': system['x_sub'], 'y_sub': system['y_sub'],
'z_sub': system['z_sub'], 'nx': system['nx'],
'ny': system['ny'], 'title': system['title']})
data_to_write.append(data)
if i % 500 == 0:
done += 5
print('{}% completed'.format(done))
print('writing data')
db = TinyDB(os.path.join('..', 'data', 'calculated-data.json'))
db.insert_multiple(data_to_write)
| StarcoderdataPython |
196836 | <reponame>mmore500/hstrat
import random
import unittest
from hstrat import hstrat
random.seed(1)
class TestStratumRetentionDripPlot(unittest.TestCase):
# tests can run independently
_multiprocess_can_split_ = True
def test(self):
for predicate in [
hstrat.StratumRetentionPredicateDepthProportionalResolution(),
hstrat.StratumRetentionPredicateFixedResolution(),
hstrat.StratumRetentionPredicateNominalResolution(),
hstrat.StratumRetentionPredicatePerfectResolution(),
hstrat.StratumRetentionPredicateRecencyProportionalResolution(),
hstrat.StratumRetentionPredicateStochastic(),
hstrat.\
StratumRetentionPredicateTaperedDepthProportionalResolution(),
]:
hstrat.stratum_retention_drip_plot(predicate, 100, do_show=False)
hstrat.stratum_retention_drip_plot(predicate, 10, do_show=False)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
1755016 | <gh_stars>0
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import re
from google.appengine.ext import testbed
import webapp2
import webtest
from handlers import build_failure
from handlers import handlers_util
from handlers import result_status
from model.wf_analysis import WfAnalysis
from model import analysis_status
from model.wf_analysis import WfAnalysis
from waterfall import buildbot
from waterfall.test import wf_testcase
# Root directory appengine/findit.
ROOT_DIR = os.path.join(os.path.dirname(__file__),
os.path.pardir, os.path.pardir)
SAMPLE_TRY_JOB_INFO = {
'm/b/119': {
'step1 on platform':{
'try_jobs': [
{
'ref_name': 'step1',
'try_job_key': 'm/b/119',
'task_id': 'task1',
'task_url': 'url/task1',
'status': analysis_status.COMPLETED,
'try_job_url': (
'http://build.chromium.org/p/tryserver.chromium.'
'linux/builders/linux_variable/builds/121'),
'try_job_build_number': 121,
'tests': ['test3'],
'culprit': {}
},
{
'ref_name': 'step1',
'try_job_key': 'm/b/119',
'task_id': 'task1',
'task_url': 'url/task1',
'status': analysis_status.COMPLETED,
'try_job_url': (
'http://build.chromium.org/p/tryserver.chromium.'
'linux/builders/linux_variable/builds/121'),
'try_job_build_number': 121,
'culprit': {
'revision': 'rev2',
'commit_position': '2',
'review_url': 'url_2'
},
'tests': ['test2']
},
{
'ref_name': 'step1',
'try_job_key': 'm/b/119',
'status': result_status.FLAKY,
'task_id': 'task1',
'task_url': 'url/task1',
'tests': ['test4']
},
{
'ref_name': 'step1',
'try_job_key': 'm/b/120',
'status': result_status.NO_TRY_JOB_REASON_MAP[
analysis_status.PENDING],
'task_id': 'task2',
'task_url': 'url/task2',
'tests': ['test1']
}
]
}
},
'm/b/120': {
'compile': {
'try_jobs': [
{
'try_job_key': 'm/b/120',
'status': analysis_status.COMPLETED,
'try_job_build_number': 120,
'try_job_url': (
'http://build.chromium.org/p/tryserver.chromium.'
'linux/builders/linux_variable/builds/120'),
'culprit': {
'revision': 'rev2',
'commit_position': '2',
'review_url': 'url_2'
}
}
]
}
}
}
class BuildFailureTest(wf_testcase.WaterfallTestCase):
app_module = webapp2.WSGIApplication([
('/build-failure', build_failure.BuildFailure),
], debug=True)
def setUp(self):
super(BuildFailureTest, self).setUp()
# Setup clean task queues.
self.testbed.init_taskqueue_stub(root_path=ROOT_DIR)
self.taskqueue_stub = self.testbed.get_stub(testbed.TASKQUEUE_SERVICE_NAME)
for queue in self.taskqueue_stub.GetQueues():
self.taskqueue_stub.FlushQueue(queue['name'])
def MockedGetAllTryJobResults(master_name, builder_name, build_number):
build_key = '%s/%s/%d' % (master_name, builder_name, build_number)
return SAMPLE_TRY_JOB_INFO.get(build_key, None)
self.mock(handlers_util, 'GetAllTryJobResults', MockedGetAllTryJobResults)
def testGetTriageHistoryWhenUserIsNotAdmin(self):
analysis = WfAnalysis.Create('m', 'b', 1)
analysis.status = analysis_status.COMPLETED
analysis.triage_history = [
{
'triage_timestamp': 1438380761,
'user_name': 'test',
'result_status': 'dummy status',
'version': 'dummy version',
}
]
self.assertIsNone(build_failure._GetTriageHistory(analysis))
def testGetTriageHistoryWhenUserIsAdmin(self):
analysis = WfAnalysis.Create('m', 'b', 1)
analysis.status = analysis_status.COMPLETED
analysis.triage_history = [
{
'triage_timestamp': 1438380761,
'user_name': 'test',
'result_status': 'dummy status',
'version': 'dummy version',
}
]
self.mock_current_user(user_email='<EMAIL>', is_admin=True)
self.assertEqual(1, len(build_failure._GetTriageHistory(analysis)))
def testInvalidBuildUrl(self):
build_url = 'abc'
self.assertRaisesRegexp(
webtest.app.AppError,
re.compile('.*501 Not Implemented.*Url "%s" '
'is not pointing to a build.*' % build_url,
re.MULTILINE | re.DOTALL),
self.test_app.get, '/build-failure', params={'url': build_url})
def testNonAdminCanViewAnalysisOfFailureOnUnsupportedMaster(self):
master_name = 'm2'
builder_name = 'b 1'
build_number = 123
build_url = buildbot.CreateBuildUrl(
master_name, builder_name, build_number)
analysis = WfAnalysis.Create(master_name, builder_name, build_number)
analysis.status = analysis_status.COMPLETED
analysis.put()
response = self.test_app.get('/build-failure',
params={'url': build_url})
self.assertEquals(200, response.status_int)
self.assertEqual(0, len(self.taskqueue_stub.get_filtered_tasks()))
def testNonAdminCannotRequestAnalysisOfFailureOnUnsupportedMaster(self):
master_name = 'm2'
builder_name = 'b 1'
build_number = 123
build_url = buildbot.CreateBuildUrl(
master_name, builder_name, build_number)
self.assertRaisesRegexp(
webtest.app.AppError,
re.compile('.*501 Not Implemented.*Master "%s" '
'is not supported yet.*' % master_name,
re.MULTILINE | re.DOTALL),
self.test_app.get, '/build-failure', params={'url': build_url})
def testAdminCanRequestAnalysisOfFailureOnUnsupportedMaster(self):
master_name = 'm2'
builder_name = 'b'
build_number = 123
build_url = buildbot.CreateBuildUrl(
master_name, builder_name, build_number)
self.mock_current_user(user_email='<EMAIL>', is_admin=True)
response = self.test_app.get('/build-failure', params={'url': build_url})
self.assertEquals(200, response.status_int)
self.assertEqual(1, len(self.taskqueue_stub.get_filtered_tasks()))
def testAnyoneCanRequestAnalysisOfFailureOnSupportedMaster(self):
master_name = 'm'
builder_name = 'b 1'
build_number = 123
build_url = buildbot.CreateBuildUrl(
master_name, builder_name, build_number)
response = self.test_app.get('/build-failure', params={'url': build_url})
self.assertEquals(200, response.status_int)
self.assertEqual(1, len(self.taskqueue_stub.get_filtered_tasks()))
def testGetOrganizedAnalysisResultBySuspectedCLNonSwarming(self):
analysis_result = {
'failures': [
{
'step_name': 'a',
'first_failure': 98,
'last_pass': None,
'supported': True,
'suspected_cls': [
{
'build_number': 99,
'repo_name': 'chromium',
'revision': 'r99_2',
'commit_position': None,
'url': None,
'score': 2,
'hints': {
'modified f99_2.cc (and it was in log)': 2,
},
}
],
}
]
}
result = build_failure._GetOrganizedAnalysisResultBySuspectedCL(
analysis_result)
expected_result = {
'a': [
{
'first_failure': 98,
'last_pass': None,
'supported': True,
'suspected_cls': [
{
'build_number': 99,
'repo_name': 'chromium',
'revision': 'r99_2',
'commit_position': None,
'url': None,
'score': 2,
'hints': {
'modified f99_2.cc (and it was in log)': 2,
},
}
],
'tests': []
}
]
}
self.assertEqual(expected_result, result)
def testGetOrganizedAnalysisResultBySuspectedCLSwarming(self):
analysis_result = {
'failures': [
{
'step_name': 'b',
'first_failure': 98,
'last_pass': 96,
'supported': True,
'suspected_cls': [
{
'build_number': 98,
'repo_name': 'chromium',
'revision': 'r98_1',
'commit_position': None,
'url': None,
'score': 4,
'hints': {
'modified f98.cc[123, 456] (and it was in log)': 4,
},
}
],
'tests': [
{
'test_name': 'Unittest2.Subtest1',
'first_failure': 98,
'last_pass': 97,
'suspected_cls': [
{
'build_number': 98,
'repo_name': 'chromium',
'revision': 'r98_1',
'commit_position': None,
'url': None,
'score': 4,
'hints': {
('modified f98.cc[123] '
'(and it was in log)'): 4,
},
}
]
},
{
'test_name': 'Unittest3.Subtest2',
'first_failure': 98,
'last_pass': 96,
'suspected_cls': [
{
'build_number': 98,
'repo_name': 'chromium',
'revision': 'r98_1',
'commit_position': None,
'url': None,
'score': 4,
'hints': {
('modified f98.cc[456] '
'(and it was in log)'): 4,
},
}
]
},
{
'test_name': 'Unittest3.Subtest3',
'first_failure': 98,
'last_pass': 96,
'suspected_cls': []
}
]
}
]
}
result = build_failure._GetOrganizedAnalysisResultBySuspectedCL(
analysis_result)
expected_result = {
'b': [
{
'supported': True,
'first_failure': 98,
'last_pass': 97,
'suspected_cls': [
{
'build_number': 98,
'repo_name': 'chromium',
'revision': 'r98_1',
'commit_position': None,
'url': None,
'score': 4,
'hints': {
'modified f98.cc[123, 456] (and it was in log)': 4,
},
}
],
'tests': ['Unittest2.Subtest1', 'Unittest3.Subtest2']
},
{
'first_failure': 98,
'last_pass': 96,
'supported': True,
'suspected_cls': [],
'tests': ['Unittest3.Subtest3']
}
]
}
self.assertEqual(expected_result, result)
def testGetAnalysisResultWithTryJobInfo(self):
master_name = 'm'
builder_name = 'b'
build_number = 119
organized_results = {
'step1 on platform': [
{
'supported': True,
'first_failure': 119,
'last_pass': 118,
'suspected_cls': [
{
'build_number': 119,
'repo_name': 'chromium',
'revision': 'r98_1',
'commit_position': None,
'url': None,
'score': 4,
'hints': {
'modified f98.cc[123, 456] (and it was in log)': 4,
},
}
],
'tests': ['test2', 'test3']
},
{
'first_failure': 119,
'last_pass': 118,
'supported': True,
'suspected_cls': [],
'tests': ['test4']
},
{
'first_failure': 120,
'last_pass': 119,
'supported': True,
'suspected_cls': [],
'tests': ['test1']
}
]
}
updated_result = build_failure._GetAnalysisResultWithTryJobInfo(
organized_results, master_name, builder_name, build_number)
expected_result = {
'step1 on platform':{
'results': {
'reliable_failures': [
{
'try_job':{
'ref_name': 'step1',
'try_job_key': 'm/b/119',
'task_id': 'task1',
'task_url': 'url/task1',
'status': analysis_status.COMPLETED,
'try_job_url': (
'http://build.chromium.org/p/tryserver.chromium'
'.linux/builders/linux_variable/builds/121'),
'try_job_build_number': 121,
'tests': ['test3'],
'culprit': {}
},
'heuristic_analysis': {
'suspected_cls': [
{
'build_number': 119,
'repo_name': 'chromium',
'revision': 'r98_1',
'commit_position': None,
'url': None,
'score': 4,
'hints': {
('modified f98.cc[123, 456] '
'(and it was in log)'): 4,
},
}
]
},
'tests': ['test3'],
'first_failure': 119,
'last_pass': 118,
'supported': True
},
{
'try_job':{
'ref_name': 'step1',
'try_job_key': 'm/b/119',
'task_id': 'task1',
'task_url': 'url/task1',
'status': analysis_status.COMPLETED,
'try_job_url': (
'http://build.chromium.org/p/tryserver.chromium'
'.linux/builders/linux_variable/builds/121'),
'try_job_build_number': 121,
'culprit': {
'revision': 'rev2',
'commit_position': '2',
'review_url': 'url_2'
},
'tests': ['test2']
},
'heuristic_analysis': {
'suspected_cls': [
{
'build_number': 119,
'repo_name': 'chromium',
'revision': 'r98_1',
'commit_position': None,
'url': None,
'score': 4,
'hints': {
('modified f98.cc[123, 456] '
'(and it was in log)'): 4,
},
}
]
},
'tests': ['test2'],
'first_failure': 119,
'last_pass': 118,
'supported': True
}
],
'flaky_failures': [
{
'try_job':{
'ref_name': 'step1',
'try_job_key': 'm/b/119',
'status': result_status.FLAKY,
'task_id': 'task1',
'task_url': 'url/task1',
'tests': ['test4']
},
'heuristic_analysis': {
'suspected_cls': []
},
'tests': ['test4'],
'first_failure': 119,
'last_pass': 118,
'supported': True
}
],
'unclassified_failures': [
{
'try_job':{
'ref_name': 'step1',
'try_job_key': 'm/b/120',
'status': result_status.NO_TRY_JOB_REASON_MAP[
analysis_status.PENDING],
'task_id': 'task2',
'task_url': 'url/task2',
'tests': ['test1']
},
'heuristic_analysis': {
'suspected_cls': []
},
'tests': ['test1'],
'first_failure': 120,
'last_pass': 119,
'supported': True
}
]
}
}
}
self.assertEqual(expected_result, updated_result)
def testGetAnalysisResultWithTryJobInfoNoTryJobInfo(self):
organized_results = {
'step1 on platform':{}
}
result = build_failure._GetAnalysisResultWithTryJobInfo(
organized_results, 'n', 'b', 120)
self.assertEqual({}, result)
def testGetAnalysisResultWithTryJobInfoCompile(self):
organized_results = {
'compile': [
{
'first_failure': 120,
'last_pass': 119,
'supported': True,
'suspected_cls': [
{
'build_number': 120,
'repo_name': 'chromium',
'revision': 'rev2',
'commit_position': None,
'url': None,
'score': 2,
'hints': {
'modified f99_2.cc (and it was in log)': 2,
},
}
],
'tests': []
}
]
}
result = build_failure._GetAnalysisResultWithTryJobInfo(
organized_results, 'm', 'b', 120)
expected_result = {
'compile':{
'results': {
'reliable_failures': [
{
'try_job': {
'try_job_key': 'm/b/120',
'status': analysis_status.COMPLETED,
'try_job_build_number': 120,
'try_job_url': (
'http://build.chromium.org/p/tryserver.chromium'
'.linux/builders/linux_variable/builds/120'),
'culprit': {
'revision': 'rev2',
'commit_position': '2',
'review_url': 'url_2'
}
},
'heuristic_analysis': {
'suspected_cls': [
{
'build_number': 120,
'repo_name': 'chromium',
'revision': 'rev2',
'commit_position': None,
'url': None,
'score': 2,
'hints': {('modified f99_2.cc '
'(and it was in log)'): 2
},
}
]
},
'tests': [],
'first_failure': 120,
'last_pass': 119,
'supported': True
}
]
}
}
}
self.assertEqual(expected_result, result)
| StarcoderdataPython |
146804 | from maya.app.general.mayaMixin import MayaQWidgetDockableMixin
import pymel.core as pm
import PySide2.QtCore as QtCore
import PySide2.QtUiTools as QtUiTools
import PySide2.QtWidgets as QtWidgets
class FlottiWindow(QtWidgets.QDialog):
window_title = "FlottiTools Window"
object_name = None
def __init__(self, parent=None):
super(FlottiWindow, self).__init__(parent=parent)
if self.object_name is not None:
self.setObjectName(self.object_name)
self.setWindowTitle(self.window_title)
layout = QtWidgets.QVBoxLayout()
self.setLayout(layout)
@staticmethod
def clear_layout(layout):
for i in reversed(range(layout.count())):
widget = layout.itemAt(i).widget()
widget.setParent(None)
widget.deleteLater()
class FlottiMayaWindow(MayaQWidgetDockableMixin, FlottiWindow):
window_title = "FlottiTools Maya Window"
object_name = None
class FlottiWindowDesignerUI(FlottiWindow):
ui_designer_file_path = None
def __init__(self, parent=None):
super(FlottiWindowDesignerUI, self).__init__(parent=parent)
if self.ui_designer_file_path is None:
raise NotImplementedError()
loader = QtUiTools.QUiLoader()
uifile = QtCore.QFile(self.ui_designer_file_path)
uifile.open(QtCore.QFile.ReadOnly)
self.ui = loader.load(uifile)
uifile.close()
self.layout().setContentsMargins(0, 0, 0, 0)
self.layout().addWidget(self.ui)
class FlottiMayaWindowDesignerUI(MayaQWidgetDockableMixin, FlottiWindowDesignerUI):
window_title = "FlottiTools Maya Window"
object_name = None
class QHLine(QtWidgets.QFrame):
def __init__(self):
super(QHLine, self).__init__()
self.setFrameShape(QtWidgets.QFrame.HLine)
self.setFrameShadow(QtWidgets.QFrame.Sunken)
class QVLine(QtWidgets.QFrame):
def __init__(self):
super(QVLine, self).__init__()
self.setFrameShape(QtWidgets.QFrame.VLine)
self.setFrameShadow(QtWidgets.QFrame.Sunken)
class NonScrollFocusedQComboBox(QtWidgets.QComboBox):
def __init__(self, *args, **kwargs):
super(NonScrollFocusedQComboBox, self).__init__(*args, **kwargs)
self.setFocusPolicy(QtCore.Qt.StrongFocus)
def wheelEvent(self, *args, **kwargs):
pass
class RotatedButton(QtWidgets.QPushButton):
def paintEvent(self, event):
painter = QtWidgets.QStylePainter(self)
painter.rotate(270)
painter.translate(-1 * self.height(), 0)
painter.drawControl(QtWidgets.QStyle.CE_PushButton, self.getSyleOptions())
def getSyleOptions(self):
options = QtWidgets.QStyleOptionButton()
options.initFrom(self)
size = options.rect.size()
size.transpose()
options.rect.setSize(size)
options.features = QtWidgets.QStyleOptionButton.None_
if self.isFlat():
options.features |= QtWidgets.QStyleOptionButton.Flat
if self.menu():
options.features |= QtWidgets.QStyleOptionButton.HasMenu
if self.autoDefault() or self.isDefault():
options.features |= QtWidgets.QStyleOptionButton.AutoDefaultButton
if self.isDefault():
options.features |= QtWidgets.QStyleOptionButton.DefaultButton
if self.isDown() or (self.menu() and self.menu().isVisible()):
options.state |= QtWidgets.QStyle.State_Sunken
if self.isChecked():
options.state |= QtWidgets.QStyle.State_On
if not self.isFlat() and not self.isDown():
options.state |= QtWidgets.QStyle.State_Raised
options.text = self.text()
options.icon = self.icon()
options.iconSize = self.iconSize()
return options
class GroupBoxVisibilityToggle(QtWidgets.QGroupBox):
def __init__(self, *group_box_args):
super(GroupBoxVisibilityToggle, self).__init__(*group_box_args)
self.setCheckable(True)
self.setChecked(True)
gbox_layout = QtWidgets.QVBoxLayout()
self.setLayout(gbox_layout)
self.visibility_widget = VisibilityToggleWidget()
gbox_layout.addWidget(self.visibility_widget)
self.toggled.connect(lambda x: self.visibility_widget.setVisible(self.isChecked()))
class VisibilityToggleWidget(QtWidgets.QWidget):
def __init__(self):
super(VisibilityToggleWidget, self).__init__()
vw_layout = QtWidgets.QVBoxLayout()
self.setLayout(vw_layout)
self.layout().setContentsMargins(0, 0, 0, 0)
class MayaProgressBar(QtWidgets.QProgressBar):
def __init__(self):
super(MayaProgressBar, self).__init__()
self.chunks = []
self.current_chunk_index = 0
def reset(self):
super(MayaProgressBar, self).reset()
self.chunks = []
self.current_chunk_index = 0
pm.refresh()
def update_value(self, value):
self.setValue(value)
pm.refresh()
def iterate_value(self, step_size=1):
self.setValue(self.value()+step_size)
def iterate_chunk(self):
self.setValue(self.value() + self.chunks[self.current_chunk_index])
self.current_chunk_index += 1
def update_iterate_value(self, step_size=1):
self.update_value(self.value()+step_size)
def update_iterate_chunk(self):
self.iterate_chunk()
pm.refresh()
def set_chunks(self, chunk_max_values):
chunks = [0.01] # This gets our progress bar started reading 0% rather than just blank
chunks.extend(chunk_max_values)
self.chunks = chunks
self.setMaximum(sum(chunk_max_values))
class ProgressBarWithLabel(QtWidgets.QWidget):
def __init__(self):
super(ProgressBarWithLabel, self).__init__()
layout = QtWidgets.QVBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
self.setLayout(layout)
self.progress_bar = MayaProgressBar()
self.label = QtWidgets.QLabel()
layout.addWidget(self.progress_bar)
layout.addWidget(self.label)
def reset(self):
self.label.setText('')
self.progress_bar.reset()
def set_maximum(self, maximum):
self.progress_bar.setMaximum(maximum)
def update_value(self, value):
self.progress_bar.update_value(value)
def iterate_value(self, step_size=1):
self.progress_bar.iterate_value(step_size)
def update_iterate_value(self, step_size=1):
self.progress_bar.update_iterate_value(step_size)
def update_label_and_iter_val(self, text):
self.progress_bar.setValue(self.progress_bar.value()+1)
self.label.setText(text)
pm.refresh()
def update_label_and_add_val(self, text, value):
self.progress_bar.setValue(self.progress_bar.value()+value)
self.label.setText(text)
pm.refresh()
def update_label(self, text):
self.label.setText(text)
pm.refresh()
def update_label_and_value(self, text, value):
self.progress_bar.setValue(value)
self.label.setText(text)
pm.refresh()
def iterate_chunk(self):
self.progress_bar.iterate_chunk()
def set_chunks(self, chunk_max_values):
self.progress_bar.set_chunks(chunk_max_values)
def update_iterate_chunk(self):
self.iterate_chunk()
pm.refresh()
def update_label_and_iter_chunk(self, text):
self.iterate_chunk()
self.label.setText(text)
pm.refresh()
| StarcoderdataPython |
129709 | <filename>yandex_checkout/domain/models/confirmation/confirmation_class_map.py
from yandex_checkout.domain.common.confirmation_type import ConfirmationType
from yandex_checkout.domain.common.data_context import DataContext
from yandex_checkout.domain.models.confirmation.request.confirmation_embedded import \
ConfirmationEmbedded as RequestConfirmationEmbedded
from yandex_checkout.domain.models.confirmation.request.confirmation_external import \
ConfirmationExternal as RequestConfirmationExternal
from yandex_checkout.domain.models.confirmation.request.confirmation_redirect import \
ConfirmationRedirect as RequestConfirmationRedirect
from yandex_checkout.domain.models.confirmation.request.confirmation_qr import \
ConfirmationQr as RequestConfirmationQr
from yandex_checkout.domain.models.confirmation.response.confirmation_embedded import \
ConfirmationEmbedded as ResponseConfirmationEmbedded
from yandex_checkout.domain.models.confirmation.response.confirmation_external import \
ConfirmationExternal as ResponseConfirmationExternal
from yandex_checkout.domain.models.confirmation.response.confirmation_redirect import \
ConfirmationRedirect as ResponseConfirmationRedirect
from yandex_checkout.domain.models.confirmation.response.confirmation_qr import \
ConfirmationQr as ResponseConfirmationQr
class ConfirmationClassMap(DataContext):
def __init__(self):
super(ConfirmationClassMap, self).__init__(('request', 'response'))
@property
def request(self):
return {
ConfirmationType.REDIRECT: RequestConfirmationRedirect,
ConfirmationType.EXTERNAL: RequestConfirmationExternal,
ConfirmationType.EMBEDDED: RequestConfirmationEmbedded,
ConfirmationType.QR: RequestConfirmationQr
}
@property
def response(self):
return {
ConfirmationType.REDIRECT: ResponseConfirmationRedirect,
ConfirmationType.EXTERNAL: ResponseConfirmationExternal,
ConfirmationType.EMBEDDED: ResponseConfirmationEmbedded,
ConfirmationType.QR: ResponseConfirmationQr
}
| StarcoderdataPython |
4826264 | <filename>CompetitiveProgramming/CodingBat/Python/WarmUp-1/monkey_trouble.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
We have two monkeys, a and b, and the parameters a_smile and b_smile indicate if each is smiling.
We are in trouble if they are both smiling or if neither of them is smiling. Return True if we are in trouble.
monkey_trouble(True, True) → True
monkey_trouble(False, False) → True
monkey_trouble(True, False) → False
"""
from utils.args_to_r import validate
def main(a_smile: bool, b_smile: bool) -> bool:
if (a_smile and b_smile):
return True
if (a_smile or b_smile):
return False
return True
tests = [
((True, True), True),
((False, False), True),
((True, False), False)
]
validate(main, tests)
| StarcoderdataPython |
1706398 | from collections import Counter
from consts import NodeRoles
from tests.base_test import BaseTest
class TestRoleSelection(BaseTest):
def test_automatic_role_assignment(self, api_client, nodes, cluster):
"""Let the system automatically assign all roles in a satisfying environment."""
cluster_id = cluster().id
self.setup_hosts(cluster_id=cluster_id,
api_client=api_client,
nodes=nodes)
self.set_network_params(cluster_id=cluster_id,
api_client=api_client,
controller=nodes.controller)
self.expect_ready_to_install(cluster_id=cluster_id,
api_client=api_client)
actual_assignments = self.start_installation(cluster_id=cluster_id,
api_client=api_client)
assert Counter(actual_assignments.values()) == Counter(master=3, worker=2)
def test_partial_role_assignment(self, api_client, nodes, cluster):
"""Let the system semi-automatically assign roles in a satisfying environment."""
cluster_id = cluster().id
hosts = self.setup_hosts(cluster_id=cluster_id,
api_client=api_client,
nodes=nodes)
self.set_network_params(cluster_id=cluster_id,
api_client=api_client,
controller=nodes.controller)
self.expect_ready_to_install(cluster_id=cluster_id,
api_client=api_client)
manually_assigned_roles = self.assign_roles(cluster_id=cluster_id,
api_client=api_client,
hosts=hosts,
requested_roles=Counter(master=1, worker=1))
actual_assignments = self.start_installation(cluster_id=cluster_id,
api_client=api_client)
assert Counter(actual_assignments.values()) == Counter(master=3, worker=2)
assert set(tuple(a.values()) for a in manually_assigned_roles) <= set(actual_assignments.items())
| StarcoderdataPython |
191303 | """
Textko platform for notify component.
For more details about this platform, please refer to the documentation at
https://github.com/textko/hass-notify
"""
# Import dependencies.
import logging
import requests
import json
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.notify import (
PLATFORM_SCHEMA, BaseNotificationService)
# Get logger instance.
_LOGGER = logging.getLogger(__name__)
# Set platform parameters.
CONF_API_URL_MSG = 'https://textko.com/api/v2/messages'
CONF_API_TOKEN = 'api_<PASSWORD>'
CONF_TO_NO = 'to_no'
# Validate parameter schema.
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_API_TOKEN): cv.string,
vol.Required(CONF_TO_NO): cv.string,
})
# Define service instance.
def get_service(hass, config, discovery_info=None):
# Set notification service instance.
return TextkoNotificationService(config[CONF_API_TOKEN], config[CONF_TO_NO])
# Implement the notification service.
class TextkoNotificationService(BaseNotificationService):
"""Implementation of a notification service for the Twitter service."""
def __init__(self, access_token, to_no):
# Set variables.
self.access_token = access_token
self.to_no = to_no
def send_message(self, message="", **kwargs):
# Send request.
data = {'to_no': self.to_no, 'text': message}
headers = {'Content-type': 'application/json', 'Authorization': 'Bearer ' + self.access_token}
resp = requests.post(CONF_API_URL_MSG, data=json.dumps(data), headers=headers)
# Display error when failed.
if resp.status_code != 200:
obj = json.loads(resp.text)
error_message = obj['response_msg']
error_code = obj['http_code']
_LOGGER.error("Error %s : %s (Code %s)", resp.status_code,
error_message, error_code) | StarcoderdataPython |
3282697 | <gh_stars>1-10
# model parameters
BATCH_SIZE = 32
EPOCHS = 30
TRAIN_SIZE = 0.70
IMAGE_SIZE = 32
| StarcoderdataPython |
3207336 | <reponame>ninanshoulewozaizhe/ShopAccount
from app.database.models import SalesVolumes
from app.database import db
from app.log import logger
def create_new_record(record):
with db.auto_commit_db():
new_sales = SalesVolumes(pid=record['pid'], sid=record['sid'], pname=record['pname'], date=record['date'], sales=record['sales'])
db.session.add(new_sales)
db.session.flush()
rid = new_sales.id
return rid
def get_record_one_day(pid, date):
record = SalesVolumes.query.filter_by(pid=pid, date=date).first()
return record
def get_shop_records_one_day(sid, date):
records = SalesVolumes.query.filter_by(sid=sid, date=date).all()
return records
def get_shop_records_by_period(sid, start, end):
records = SalesVolumes.query.filter_by(sid=sid) \
.filter((SalesVolumes.date <= end) & (SalesVolumes.date >= start)) \
.order_by(SalesVolumes.date).all()
return records
def get_records_by_period(pid, start, end):
records = SalesVolumes.query.filter_by(pid=pid) \
.filter((SalesVolumes.date <= end) & (SalesVolumes.date >= start)) \
.order_by(SalesVolumes.date).all()
return records
def update_record_sales(pid, date, sales):
record = SalesVolumes.query.filter_by(pid=pid, date=date).first()
if record is not None:
record.sales = sales
db.session.commit()
return True
else:
return False
def delete_record(pid, date):
record = SalesVolumes.query.filter_by(pid=pid, date=date).first()
if record is not None:
db.session.delete(record)
db.session.commit()
logger.info(f'delete record (pid:{pid}, date:{date}) succeed')
return True
else:
logger.info(f'delete record (pid:{pid}, date:{date}) failed, record not exists')
return False
def delete_records_by_date(date):
SalesVolumes.query.filter_by(date=date).delete()
db.session.commit()
logger.info(f'delete records (date:{date}) succeed')
return True
def delete_records_by_pid(pid):
SalesVolumes.query.filter_by(pid=pid).delete()
db.session.commit()
logger.info(f'delete records (pid:{pid}) succeed')
return True
def delete_records_by_sid(sid):
SalesVolumes.query.filter_by(sid=sid).delete()
db.session.commit()
logger.info(f'delete records (sid:{sid}) succeed')
return True | StarcoderdataPython |
70201 | import argparse
import torch as t
import torch.nn as nn
import torchvision.transforms as transforms
# from tensorboardX import SummaryWriter
from torch.autograd import Variable
from torch.optim import Adam
from torchvision import datasets
from models import *
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='train')
parser.add_argument('--num-epochs', type=int, default=60, metavar='NI',
help='num epochs (default: 10)')
parser.add_argument('--batch-size', type=int, default=70, metavar='BS',
help='batch size (default: 70)')
parser.add_argument('--use-cuda', type=bool, default=False, metavar='CUDA',
help='use cuda (default: False)')
parser.add_argument('--learning-rate', type=float, default=0.0005, metavar='LR',
help='learning rate (default: 0.0005)')
parser.add_argument('--mode', type=str, default='vardropout', metavar='M',
help='training mode (default: simple)')
args = parser.parse_args()
# writer = SummaryWriter(args.mode)
assert args.mode in ['simple', 'dropout', 'vardropout'] # Invalid mode, should be in [simple, dropout, vardropout]
Model = {
'simple': SimpleModel,
'dropout': DropoutModel,
'vardropout': VariationalDropoutModel
}
Model = Model[args.mode]
dataset = datasets.MNIST(root='data/',
transform=transforms.Compose([
transforms.ToTensor()]),
download=True,
train=True)
train_dataloader = t.utils.data.DataLoader(dataset, batch_size=args.batch_size, shuffle=True)
dataset = datasets.MNIST(root='data/',
transform=transforms.Compose([
transforms.ToTensor()]),
download=True,
train=False)
test_dataloader = t.utils.data.DataLoader(dataset, batch_size=args.batch_size, shuffle=True, drop_last=True)
model = Model()
if args.use_cuda:
model.cuda()
optimizer = Adam(model.parameters(), args.learning_rate, eps=1e-6)
cross_enropy_averaged = nn.CrossEntropyLoss(size_average=True)
for epoch in range(args.num_epochs):
for iteration, (input, target) in enumerate(train_dataloader):
input = Variable(input).view(-1, 784)
target = Variable(target)
if args.use_cuda:
input, target = input.cuda(), target.cuda()
optimizer.zero_grad()
loss = None
model.train()
if args.mode == 'simple':
loss = model.loss(input=input, target=target, average=True)
elif args.mode == 'dropout':
loss = model.loss(input=input, target=target, p=0.4, average=True)
else:
likelihood, kld = model.loss(input=input, target=target, average=True)
coef = min(epoch / 40., 1.)
loss = likelihood + kld * coef
loss.backward()
optimizer.step()
if iteration % 50 == 0:
print('train epoch {}, iteration {}, loss {}'.format(epoch, iteration, loss.cpu().data.numpy()))
if iteration % 100 == 0:
model.eval()
loss = 0
for input, target in test_dataloader:
input = Variable(input).view(-1, 784)
target = Variable(target)
if args.use_cuda:
input, target = input.cuda(), target.cuda()
if args.mode == 'simple':
loss += model.loss(input=input, target=target, average=False).cpu().data.numpy()
elif args.mode == 'dropout':
loss += model.loss(input=input, target=target, p=0., average=False).cpu().data.numpy()
else:
loss += model.loss(input=input, target=target, average=False).cpu().data.numpy()
loss = loss / (args.batch_size * len(test_dataloader))
print('valid epoch {}, iteration {}, loss {}'.format(epoch, iteration, loss))
print('_____________')
# writer.add_scalar('data/loss', loss, epoch * len(train_dataloader) + iteration)
# writer.close()
| StarcoderdataPython |
4813942 | <reponame>longwangjhu/LeetCode
# https://leetcode.com/problems/kth-smallest-instructions/
# Bob is standing at cell (0, 0), and he wants to reach destination: (row,
# column). He can only travel right and down. You are going to help Bob by
# providing instructions for him to reach destination.
# The instructions are represented as a string, where each character is either:
# Multiple instructions will lead Bob to destination. For example, if destination
# is (2, 3), both "HHHVV" and "HVHVH" are valid instructions.
# However, Bob is very picky. Bob has a lucky number k, and he wants the kth
# lexicographically smallest instructions that will lead him to destination. k is
# 1-indexed.
# Given an integer array destination and an integer k, return the kth
# lexicographically smallest instructions that will take Bob to destination.
################################################################################
# find the kth element in full combination of "H" and "V"
# assume append "V" and count how many elements would be skipped ([H|...])
from math import comb
class Solution:
def kthSmallestPath(self, destination: List[int], k: int) -> str:
V_total, H_total = destination[0], destination[1]
ans = []
# sequentially decide if can append "V"
V_unused = V_total
for i in range(V_total + H_total): # loop over every step
if V_unused == 0:
ans.append("H")
continue
# check if can append "V"
# count skipped elements [H|...]
n_skipped = comb(V_total + H_total - (i + 1), V_unused)
if n_skipped < k: # OK to append "V"
ans.append("V")
V_unused -= 1
k -= n_skipped
else: # cannot append "V"
ans.append("H")
return ''.join(ans)
| StarcoderdataPython |
1639296 | <filename>gazer/ensembler.py
from __future__ import print_function
import os, sys, time, copy, glob, random, warnings
from operator import itemgetter
import numpy as np
from sklearn.externals import joblib
from tqdm import tqdm_notebook as tqdm
from .metrics import get_scorer
from .sampling import Loguniform
from .core import GazerMetaLearner
from .library import library_config
from .optimization import param_search
def single_fit(estimator, scorer, X, y, path, i, **kwargs):
modelfile = os.path.join(path, "model_{:04d}train.pkl".format(i))
try:
estimator.set_params(**kwargs).fit(X, y)
joblib.dump(estimator, modelfile)
return (modelfile, scorer(estimator.predict(X), y))
except:
fail = (None, float("-Inf"))
_, desc, _ = sys.exc_info()
warnings.warn("Could not fit and save: {}".format(desc))
return fail
def _sklearn_score_fitted(path, X, y, scorer):
try:
model = joblib.load(path)
yhat = model.predict(X)
score = scorer(yhat, y)
return (path, yhat, score)
except:
return None
def _keras_score_fitted(path, X, y, scorer):
""" Load previously fitted keras model. Then predict
on `X` and return score based on comparison to `y`.
"""
from keras.models import load_model
import tensorflow as tf
config = tf.ConfigProto()
graph = tf.Graph()
with graph.as_default():
sess = tf.Session(graph=graph, config=config)
with sess.as_default():
try:
model = load_model(path)
yhat = model.predict_classes(X)
score = scorer(yhat, y)
return (path, yhat, score)
except:
return None
class GazerMetaEnsembler(object):
"""
Ensembler class.
Parameters:
------------
learner : instance of GazerMetaLearner class
Used to infer which algorithms to include in the
ensembling procedure
data_shape : tuple, length 2
Should specify input data dimensions according to
(X.shape[0], X.shape[1]) where `X` is the canonical data-matrix
with shape (n_samples, n_features)
models : optional, dict, default: None
Only used when instantiating from a pre-existing state.
Activated by from_state = True (see below).
- Note: automatically computed by classmethod 'from_state'
and passed into the constructor. You should never set
this variable manually: use 'GazerMetaEnsembler.from_state()'
and pass the top-directory wherein your model files are located.
from_state : bool, default: False
Instantiate an ensembler from pre-existing files when True.
The default behavior is to build a new ensemble from scratch
by calling the internal '_build()' method.
Notes:
------
>>> ensembler = GazerMetaEnsembler.from_state(files)
# No need to perform fitting of models at this point
# since we are loading from a state where this is taken care of.
>>> ensembler.hillclimb()
# Instead, dive straight into hillclimbing: make sure that there is consistency
# between the data you have previously trained on, and the validation set you
# pass into the hillclimbing method.
"""
def __init__(self, learner, data_shape, models=None, from_state=False):
self.learner = learner
if learner is not None:
if not isinstance(learner, GazerMetaLearner().__class__):
raise TypeError("learner must be a GazerMetaLearner.")
self.data_shape = data_shape
if data_shape is not None:
if not isinstance(data_shape, tuple) and len(data_shape)==2:
raise TypeError("data_shape must be a 2-tuple.")
# These are set according to passed state variable
if not from_state:
self.ensemble = self._build()
self.models = {}
self.allow_train = True
elif from_state:
self.ensemble = None
self.models = models
self.allow_train = False
@classmethod
def from_state(cls, topdir):
kwargs = {'learner': None, 'data_shape': None, 'from_state': True}
kwargs.update({'models': cls.fetch_state_dict(topdir)})
return cls(**kwargs)
@staticmethod
def fetch_state_dict(topdir):
d = {}
assert os.path.isdir(topdir)
search_tree = os.walk(topdir)
_ = next(search_tree)
for dirpath, dirnames, dirfiles in search_tree:
if dirnames:
raise Exception("Tree is too deep. Remove subdirs: {}".format(dirnames))
if dirfiles:
key = os.path.basename(dirpath)
d[key] = dirfiles
else:
warnings.warn("Empty dir: {} (skipping)".format(dirpath))
return d
def _build(self):
""" Build ensemble from base learners
contained in the `learner` object.
"""
lib = library_config(self.learner.names, *self.data_shape)
build = {}
for name, grid in lib:
# Check metadata to determine if external
info = self.learner.clf[name].get_info()
is_external = info.get('external', False)
# Here we take care of external packages with their
# own api
if is_external:
if name=='neuralnet':
build[name] = grid
else:
build[name] = self._gen_templates(name, grid)
return build
def _gen_templates(self, name, params):
""" Here we generate estimators to later fit.
"""
clf = self.learner._get_algorithm(name)
estimators = []
for param in params:
par = param['param']
premise = param['config']
values = self._gen_grid(param['grid'])
for value in values:
estimator = copy.deepcopy(clf.estimator)
pars = {par:value}
pars.update(premise)
try:
estimator.set_params(**pars)
except:
warnings.warn("Failed to set {}".format(par))
continue
estimators.append(estimator)
del estimator
return estimators
def _gen_grid(self, grid):
""" Generate a config grid.
"""
method = grid.get('method', None)
assert method in ('take', 'sample')
if method=='take':
return grid['values']
elif method=='sample':
category = grid.get('category', None)
assert category in ('discrete', 'continuous')
low, high, points, prior = (
grid['low'], grid['high'], grid['numval'], grid['prior'])
if category=='discrete':
raise NotImplementedError('Discrete sampling not implemented yet.')
elif category=='continuous':
if prior=='loguniform':
return Loguniform(low=low, high=high, size=points).range()
else:
return np.linspace(low, high, points, endpoint=True)
def fit(self, X, y, save_dir, scoring='accuracy', n_jobs=1, verbose=0, **kwargs):
"""
Fit an ensemble of algorithms.
- Models are pickled under the `save_dir`
folder (each algorithm will have a separate folder in the tree)
- If directory does not exist, we attempt to create it.
Parameters:
-----------
X : matrix-like
2D matrix of shape (n_samples, n_features)
y : array-like
Label vector of shape (n_samples,)
save_dir : str
A valid folder wherein pickled algorithms will be saved
scoring : str or callable
Used when obtaining training data score
Fetches get_scorer() from local metrics.py module
n_jobs : integer, default: 1
If n_jobs > 1 we use parallel processing to fit and save
scikit-learn models.
Note: it is not used when training the neural network.
verbose : integer, default: 0
Control verbosity during training process.
**kwargs:
Variables related to scikit-learn estimator.
Used to alter estimator parameters if needed (such as e.g. n_jobs)
Example:
- Use e.g. {'random_forest': {'n_jobs': 4}} to use parallel
processing when fitting the random forest algorithm.
- Note that the key needs to match the a key in the `ensemble` dict
to take effect.
- The change takes place through estimator.set_params()
Returns:
--------
Dictionary with paths to fitted and pickled learners, as well as scores on
training data. Note that joblib is used to pickle the data.
"""
if not self.allow_train:
raise Exception("Loaded from existing state: training not possible. "+\
"Try calling .hillclimb(X, y,..) method instead.")
if not save_dir:
raise Exception("'{}' is not a valid directory.".format(save_dir))
if os.path.exists(save_dir):
warnings.warn("Warning: overwriting existing folder {}.".format(save_dir))
else:
os.makedirs(save_dir)
self.models = self._fit(X=X, y=y, save_dir=save_dir,
scorer=get_scorer(scoring),
n_jobs=n_jobs, verbose=verbose,
**kwargs)
def _fit(self, X, y, save_dir, scorer, n_jobs, verbose, **kwargs):
""" Implement fitting.
"""
# Keep track of model and score
# All relevant data is available in `history`
history = {}
names = list(self.ensemble.keys())
for name in names:
os.makedirs(os.path.join(save_dir, name))
name = 'neuralnet'
if name in names:
args, param_grid = self.ensemble.pop(name)
n_iter = args['n_iter']
data = {'train': (X, y), 'val': None}
modelfiles = [os.path.join(save_dir, name, file) for file in args['modelfiles']]
_, df = param_search(
self.learner, param_grid,
data=data,
type_of_search='random',
n_iter=n_iter,
name=name,
modelfiles=modelfiles)
history[name] = zip(modelfiles, df.head(len(modelfiles))['train_score'].values)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for name, estimators in self.ensemble.items():
path = os.path.join(save_dir, name)
kwarg = kwargs.get(name, {})
if n_jobs != 1:
models = joblib.Parallel(n_jobs=n_jobs, verbose=verbose, backend="threading")(
joblib.delayed(single_fit)(estimator, scorer, X, y, path, i, **kwarg)
for i, estimator in enumerate(estimators, start=1))
else:
models = []
for i, estimator in enumerate(tqdm(estimators, desc="{}".format(name), ncols=120)):
this_modelfile, this_score = single_fit(estimator, scorer, X, y, path, i, **kwarg)
models.append((this_modelfile, this_score))
history[name] = sorted(list(models), key=lambda x: -x[1])
# Purge any failed fits
for name, models in history.items():
history[name] = [(name, file) for file, _ in models if file is not None]
return history
def _add_networks(self, clf, X, y, path):
"""Add to ensemble repository a set of keras neural network
models
"""
y_ = clf.y_check(y, verbose=0)
# Prepare for ensembling
os.makedirs(path)
clf.set_param('chkpnt_dir', path)
# When 'ensemble' is set to True,
# checkpointing to the 'path' folder is enabled
clf.set_param('ensemble', True)
# Train
print("Training neural net..")
start = time.time()
clf.fit(X, y_, verbose=0)
print("Train time: {:.2f} min"
.format((time.time()-start)/60.))
time.sleep(1)
# Evaluate and save
patterns = ('*.hdf5','*.h5','*.h5py')
weightfiles = []
for pattern in patterns:
weightfiles += glob.glob(os.path.join(path, pattern))
model = clf.estimator
models = []
for weightfile in tqdm(weightfiles, desc="Net (save wts)", ncols=120):
model.load_weights(weightfile)
loss, score = model.evaluate(X, y_, verbose=0)
models.append((weightfile, np.round(loss, decimals=4)))
del y_, model
# We sort according to loss: lower is better
return (clf, sorted(models, key=lambda x: x[1]))
def hillclimb(self, X, y, n_best=0.1, p=0.3, iterations=10, scoring='accuracy',
greater_is_better=True, n_jobs=1, verbose=0):
"""
Perform hillclimbing on the validation data
Parameters:
------------
X : validation data, shape (n_samples, n_features)
y : validation labels, shape (n_samples,)
n_best : int or float, default: 0.1
Specify number (int) or fraction (float) of classifiers
to use as initial ensemble. The best will be chosen.
p : float, default: 0.3
Fraction of classifiers to select for bootstrap
iterations : int, default: 10
Number of separate hillclimb loop iterations to perform
Due to the stochastic nature of the ensemble selection
we try 'iterations' times to find the best one
scoring : str, default: 'accuracy'
The metric to use when hillclimbing
greater_is_better : boolean, default: True
If True then a higher score on the validation
set is better.
n_jobs : int, default: 1
Parallel processing of files.
verbose : int, default: 0
Whether to output extra information or not.
- Set verbose = 1 to get info.
"""
if isinstance(n_best, float):
grab = int(n_best*len(self.models))
elif isinstance(n_best, int):
grab = n_best
else:
raise TypeError("n_best should be int or float.")
nets = [path for name, path in self.models if (name == 'neuralnet')]
clfs = [path for name, path in self.models if (name != 'neuralnet')]
scorer = get_scorer(scoring)
parallel = joblib.Parallel(n_jobs=n_jobs,
verbose=verbose,
backend="threading")
with warnings.catch_warnings():
warnings.simplefilter('ignore')
sklearn = parallel(joblib.delayed(_sklearn_score_fitted)(path, X, y, scorer)
for path in clfs) if clfs else []
time.sleep(1)
external = parallel(joblib.delayed(_keras_score_fitted)(path, X, y, scorer)
for path in nets) if nets else []
pooled = sorted([clf for clf in sklearn+external if not clf is None],
key=itemgetter(2))
del sklearn
del external
if verbose > 0:
max_score = max(pooled, key=itemgetter(2))
print("Single model max validation score = {}".format(np.round(max_score, 4)))
pooled = [(str(idx), clf, preds) for idx, (clf, preds, _) in enumerate(pooled)]
ensemble = pooled[:grab]
weights = {idx: 0. for idx, *_ in pooled}
for idx, *_ in ensemble: weights[idx] = 1.
if verbose > 0:
print("Best model: {}".format(ensemble[0][1]))
all_ensembles = []
for _ in range(iterations):
this_ensemble = self._hillclimb_loop(X = X, y = y, scorer = scorer, ensemble = ensemble,
weights = weights, pooled = pooled, p = p, verbose = verbose)
if this_ensemble:
all_ensembles.append(this_ensemble)
scores = []
ensembles = []
for ensemble in all_ensembles:
scores.append(ensemble[-1])
ensembles.append(ensemble[:-1])
max_score = max(scores)
return max_score, ensembles[scores.index(max_score)]
def _hillclimb_loop(self, X, y, scorer, ensemble, weights, pooled, p, verbose, seed=None):
""" Execute hillclimb loop.
"""
max_iter = 100
val_scores = []
best_score = float("-Inf") if greater_is_better else float("Inf")
if seed is not None:
np.random.seed(seed)
scargs = {'greater_is_better': greater_is_better}
hc_weights = weights.copy()
hc_ensemble = ensemble.copy()
hc_pool = self.sample_algorithms(p, pooled)
curr_score = self.score(hc_ensemble, hc_weights, y, scorer)
if verbose > 0:
print("Initial ensemble score = {:.4f}".format(curr_score))
for i in range(1, max_iter):
for algorithm in hc_pool:
idx = algorithm[0]
local_ensemble = hc_ensemble.copy()
local_ensemble.append(algorithm)
local_weights = hc_weights.copy()
local_weights[idx] += 1
this_score = self.score(local_ensemble, local_weights, y, scorer)
if rank_scores(this_score, best_score, **scargs)
best_idx = idx
best_score = this_score
best_algorithm = [algorithm]
if rank_scores(curr_score, best_score, strict=False, **scargs)
print("Failed to improve. Updated score was: {:.4f}".format(best_score))
break
elif rank_scores(best_score, curr_score, **scargs)
curr_score = best_score
hc_weights[best_idx] += 1
if not best_idx in self.get_idx(hc_ensemble):
hc_ensemble += best_algorithm
val_scores.append((i, curr_score))
if verbose > 0:
print("Loop iter: {} \tScore: {:.4f}".format(*val_scores[-1]))
weighted_ensemble = [(path, hc_weights[idx]) for idx, path, _ in hc_ensemble]
weighted_ensemble.append(val_scores[-1][-1])
return weighted_ensemble
@staticmethod
def rank_scores(score, score_to_compare, greater_is_better, strict=True):
if strict:
op = operator.gt if greater_is_better else operator.lt
else:
op = operator.ge if greater_is_better else operator.le
return op(score, score_to_compare)
def score(self, ensemble, weights, y, scorer):
""" Compute weighted majority vote.
"""
wts = np.zeros(len(ensemble))
preds = np.zeros((len(y), len(ensemble)), dtype=int)
for col, (idx, _, pred) in enumerate(ensemble):
wts[col] = weights[idx]
preds[:, col] = pred
return self.weighted_vote_score(wts, preds, y, scorer)
def weighted_vote_score(self, weights, preds, y, scorer):
""" Score an ensemble of classifiers using weighted voting.
"""
classes = np.unique(preds)
classmapper = {}
for i, cls in enumerate(classes):
classmapper[i] = cls
belief = np.matmul(preds[:,:]==cls, weights)
weighted = belief if i==0 else np.vstack((weighted, belief))
predicted_class = np.array(
list(map(lambda idx: classmapper[idx], np.argmax(weighted.T, axis=1))))
return scorer(predicted_class, y)
def sample_algorithms(self, p, pool):
""" Sample algorithms from repository
"""
idxmapper = {idx: (idx, clf, pr) for idx, clf, pr in pool}
if isinstance(p, float):
size = int(p * float(len(pool)))
elif isinstance(p, int):
size = p
return list(map(lambda idx: idxmapper[idx],
np.random.choice(self.get_idx(pool),
size=size, replace=False)))
def get_idx(self, item):
return [idx for idx, *_ in item] | StarcoderdataPython |
1660454 | <reponame>lanfis/Spider
#!/usr/bin/env python
# license removed for brevity
import requests
from bs4 import BeautifulSoup
import sys
import os
current_folder = os.path.dirname(os.path.realpath(__file__))
sys.path.append(current_folder)
import time
from modules.Facebook_Finder import Facebook_Finder
ff = Facebook_Finder(is_cookies_clear=True, is_debug=True, is_window=True)
ff.login("<EMAIL>", "f2mPqDDG")
ff.link("https://www.facebook.com/sukuze?__tn__=%2Cd-]-h-R&eid=ARCKyYNC5j4oE78j13w8HaycmOJLSU_TQUlAHl50Yfl2jW9KB65c3Nf4Xjp9vwJNaZWUModv5YkidnO5")
ff.parse_personal_page()
#search_user_list = ff.user_search("吳音寧")
#for search_user in search_user_list:
#ff.link(search_user)
#ff.parse_personal_page()
#ff.link("https://www.facebook.com/groups/WuYinlingFanGroup/")
#ff.post_parser()
#ff.link("https://www.facebook.com/profile.php?id=100001277912128&__tn__=%2Cd-]-h-R&eid=ARBo_xeaJ8T0r8X6IQFxWM99sqIXjOpxCdTxL9g5s1dVhTKT1kJj44yQKvXMy1QNnx7pNQ6mK57MzBdk")
#ff.link("https://www.facebook.com/profile.php?id=100022934512189")
#ff.link("https://www.facebook.com/groups/451357468329757/?jazoest=2651001208210110412052665652120821001147665108731081051021078111868715776110715210810852651197711411010566768910065586510012079120113814597119578010410472116896948114861065253116104979811212210612210649121104102881201047611210511111065")
#ff.parse_personal_page()
time.sleep(20)
| StarcoderdataPython |
18007 | '''
Leetcode problem No 862 Shortest Subarray with Sum at Least K
Solution written by <NAME> on 1 July, 2018
'''
import collections
class Solution(object):
def shortestSubarray(self, A, K):
"""
:type A: List[int]
:type K: int
:rtype: int
"""
n = len(A)
B = [0] * (n + 1)
for i in range(n):
B[i+1] = B[i] + A[i]
d = collections.deque()
ans = n + 1
for i in range(n+1):
while d and B[i] - B[d[0]] >= K:
ans = min(ans, i-d.popleft())
while d and B[i] <= B[d[-1]]:
d.pop()
d.append(i)
return ans if ans <= n else -1
def main():
s = Solution()
print(s.shortestSubarray([2,-1,2], 3))
print(s.shortestSubarray([1,2], 4))
print(s.shortestSubarray([1], 1))
print(s.shortestSubarray([1,2,3,-5,4,-7,5,-8,6,-9,7,8,-4], 5)) #1
print(s.shortestSubarray([1,2,-5,3,-5,4,-7,5,-8,6,-9,7,8,-4], 5))
main()
| StarcoderdataPython |
4837416 | import os
import imageio
import numpy as np
import tensorflow as tf
from PIL import Image
from ..utils import facenet
from ..utils import detect_face
# Set allow_pickle=True
np_load_old = np.load
np.load = lambda *a, **k: np_load_old(*a, allow_pickle=True, **k)
class AlignImgDB:
def __init__(self, datadir, output_dir_path, mtcnn_model_dir):
# Config variables
self.minsize = 20 # minimum size of face
self.threshold = [0.6, 0.7, 0.7] # three steps's threshold
self.factor = 0.709 # scale factor
self.margin = 44
self.image_size = 182
self.datadir = datadir
# Make sure output_dir exists, if not create it
self.output_dir = os.path.expanduser(output_dir_path)
if not os.path.exists(self.output_dir):
os.makedirs(self.output_dir)
print('Creating networks and loading parameters')
with tf.Graph().as_default():
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.5)
sess = tf.Session(config=tf.ConfigProto(
gpu_options=gpu_options, log_device_placement=False))
with sess.as_default():
self.pnet, self.rnet, self.onet = detect_face.create_mtcnn(sess, mtcnn_model_dir)
def perform_alignment(self):
# Load the dataset
self.dataset = facenet.get_dataset(self.datadir)
# Add a random key to the filename to allow alignment using multiple processes
random_key = np.random.randint(0, high=99999)
bounding_boxes_filename = os.path.join(
self.output_dir, 'bounding_boxes_%05d.txt' % random_key)
with open(bounding_boxes_filename, "w") as text_file:
nrof_images_total = 0
nrof_successfully_aligned = 0
for cls in self.dataset:
output_class_dir = os.path.join(self.output_dir, cls.name)
if not os.path.exists(output_class_dir):
os.makedirs(output_class_dir)
for image_path in cls.image_paths:
nrof_images_total += 1
filename = os.path.splitext(os.path.split(image_path)[1])[0]
output_filename = os.path.join(output_class_dir, filename + '.png')
print("Source Image: %s" % image_path)
if not os.path.exists(output_filename):
try:
img = imageio.imread(image_path)
print('Read data dimension: ', img.ndim)
except (IOError, ValueError, IndexError) as e:
errorMessage = '{}: {}'.format(image_path, e)
print(errorMessage)
else:
if img.ndim < 2:
print('Error! Unable to align "%s"' % image_path)
text_file.write('%s\n' % (output_filename))
continue
if img.ndim == 2:
img = facenet.to_rgb(img)
print('to_rgb data dimension: ', img.ndim)
img = img[:, :, 0:3]
print('After data dimension: ', img.ndim)
bounding_boxes, _ = detect_face.detect_face(
img, self.minsize, self.pnet, self.rnet, self.onet, self.threshold, self.factor)
nrof_faces = bounding_boxes.shape[0]
print('Number of Detected Face(s): %d' % nrof_faces)
if nrof_faces > 0:
det = bounding_boxes[:, 0:4]
img_size = np.asarray(img.shape)[0:2]
if nrof_faces > 1:
bounding_box_size = (
det[:, 2] - det[:, 0]) * (det[:, 3] - det[:, 1])
img_center = img_size / 2
offsets = np.vstack([(det[:, 0] + det[:, 2]) / 2 - img_center[1],
(det[:, 1] + det[:, 3]) / 2 - img_center[0]])
offset_dist_squared = np.sum(
np.power(offsets, 2.0), 0)
# some extra weight on the centering
index = np.argmax(
bounding_box_size - offset_dist_squared * 2.0)
det = det[index, :]
det = np.squeeze(det)
bb_temp = np.zeros(4, dtype=np.int32)
bb_temp[0] = det[0]
bb_temp[1] = det[1]
bb_temp[2] = det[2]
bb_temp[3] = det[3]
try:
cropped_temp = img[bb_temp[1]:bb_temp[3], bb_temp[0]:bb_temp[2], :]
# scaled_temp = misc.imresize(
# cropped_temp, (image_size, image_size), interp='bilinear')
scaled_temp = np.array(Image.fromarray(cropped_temp).resize(
(self.image_size, self.image_size), resample=Image.BILINEAR))
nrof_successfully_aligned += 1
imageio.imsave(output_filename, scaled_temp)
text_file.write('%s %d %d %d %d\n' % (
output_filename, bb_temp[0], bb_temp[1], bb_temp[2], bb_temp[3]))
except Exception as e:
os.remove(image_path)
else:
print('Error! Unable to align "%s"' % image_path)
text_file.write('%s\n' % (output_filename))
print('Total number of images: %d' % nrof_images_total)
print('Number of successfully aligned images: %d' % nrof_successfully_aligned)
| StarcoderdataPython |
71085 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
class BaseEmitter(object):
'''Base for emitters of the *data-migrator*.
Attributes:
manager (BaseManager): reference to the manager that is calling this
emitter to export objects from that manager
model_class (Model): reference to the model linked to the class
extension (str): file extension for output file of this emitter
note: :attr:`~.model_class` and :attr:`~.manager` are linked together
'''
def __init__(self, extension=None, manager=None):
# reference to the manager that is calling this emitter to
# export objects from the manager
self.manager = manager
self.model_class = manager.model_class
self.meta = self.model_class._meta
self.extension = extension or getattr(self.__class__,
'extension', '.txt')
def emit(self, l):
'''output the result set of an object.
Args:
l (Model): object to transform
Returns:
list: generated strings
'''
raise NotImplementedError
def filename(self):
'''generate filename for this emitter.
generates a filename bases on :attr:`~.BaseEmitter.extension` and
either :attr:`~.Options.file_name` or :attr:`~.Options.table_name`
Returns:
str: filename
'''
_ext = self.extension
if _ext[0] != '.':
_ext = '.' + _ext
_filename = self.meta.file_name or (self.meta.table_name + _ext)
return _filename
def preamble(self, headers):
'''generate a premable for the file to emit.
Args:
headers (list): additional header to provide outside the emitter
(e.g. statistics)
Returns:
list: preamble lines
'''
raise NotImplementedError
def postamble(self): #pylint disable=no-self-use
'''generate a postamble for the file to emit.
Returns:
list: postamble lines
'''
return []
| StarcoderdataPython |
198791 | from background_task import background
from .models import Post
@background(schedule=10)
def reset_post_upvotes():
posts = Post.objects.all()
for post in posts:
post.amount_of_upvotes = 0
post.save()
| StarcoderdataPython |
3203818 | import numpy as np
class LinearRegressionPy:
pass
class LinearRegressionNp:
def __init__(self, solver="normal_eq"):
self.solver = solver
self.theta = None
self.intercept_ = None
self.coef_ = None
def fit(self, X, y):
if self.solver == "normal_eq":
self._fit_normal(X, y)
elif self.solver == "pseudo_inv":
self._fit_pseudo_inv(X, y)
elif self.solver == "ols":
self._fit_ols(X, y)
elif self.solver == "gd":
self._fit_gd(X, y)
elif self.solver == "sgd":
self._fit_sgd(X, y)
elif self.solver == "bgd":
pass
else:
print(f"Solver {self.solver} non reconnu")
return
self._update_parameters()
def predict(self, X):
X_1 = self._add_constant(X)
return X_1.dot(self.theta)
def _add_constant(self, X):
return np.c_[np.ones((X.shape[0], 1)), X]
# Fit functions
def _fit_normal(self, X, y):
X_1 = self._add_constant(X)
self.theta = np.linalg.inv(X_1.T.dot(X_1)).dot(X_1.T.dot(y))
def _fit_pseudo_inv(self, X, y):
X_1 = self._add_constant(X)
self.theta = np.linalg.pinv(X_1).dot(y)
def _fit_ols(self, X, y):
X_1 = self._add_constant(X)
self.theta = np.linalg.lstsq(X_1, y, rcond=1e-6)[0]
def _fit_gd(self, X, y, learning_rate=0.01, n_iter=10000):
X_1 = self._add_constant(X)
y = y.reshape(-1,1)
self.theta = np.random.randn(X_1.shape[1], 1)
for i in range(n_iter):
gradient = (2/X_1.shape[0])*X_1.T.dot(X_1.dot(self.theta)-y)
self.theta = self.theta - learning_rate*gradient
self.theta = self.theta.flatten()
def _fit_sgd(self, X, y, t0=800, lr0=0.1, n_epochs=500):
X_1 = self._add_constant(X)
y = y.reshape(-1,1)
self.theta = np.random.randn(X_1.shape[1], 1)
for epoch in range(n_epochs):
random_index = np.random.randint(X_1.shape[0])
X_i = X_1[random_index:random_index+1]
y_i = y[random_index:random_index+1]
gradient = 2*X_i.T.dot(X_i.dot(self.theta)-y_i)
learning_rate = lr0*(t0/(t0+epoch))
self.theta = self.theta - learning_rate*gradient
self.theta = self.theta.flatten()
def _fit_bgd(self, X, y, learning_rate=0.01, n_iter=10000):
pass
def _update_parameters(self):
self.intercept_ = self.theta[0]
self.coef_ = self.theta[1:]
class RidgeNp:
def __init__(self, solver="normal_eq", alpha=1):
self.solver = solver
self.alpha = alpha
self.theta = None
self.intercept_ = None
self.coef_ = None
def fit(self, X, y):
if self.solver == "normal_eq":
self._fit_normal(X, y)
elif self.solver == "gd":
pass
elif self.solver == "sgd":
pass
elif self.solver == "bgd":
pass
else:
print(f"Solver {self.solver} non reconnu")
return
self._update_parameters()
def predict(self, X):
X_1 = self._add_constant(X)
return X_1.dot(self.theta)
def _fit_normal(self, X, y):
X_1 = self._add_constant(X)
self.theta = np.linalg.inv(X_1.T.dot(X_1)+self.alpha*np.identity(X_1.shape[1])).dot(X_1.T.dot(y))
def _add_constant(self, X):
return np.c_[np.ones((X.shape[0], 1)), X]
def _update_parameters(self):
self.intercept_ = self.theta[0]
self.coef_ = self.theta[1:] | StarcoderdataPython |
124494 | from re import S
from numpy.core.numeric import NaN
import streamlit as st
import pandas as pd
import numpy as np
st.title('world gdp')
@st.cache
def load_data(path):
data = pd.read_csv(path)
data.columns = data.columns.str.lower()
return data
data = load_data("data/gdp.csv")
if st.checkbox('show raw data'):
st.write(data)
if st.checkbox('Show all gdp'):
st.subheader('all(color is too much, so the id is not useful)')
# all_data = pd.DataFrame(data.values.T, index=data.columns, columns=data["country name"].unique())[4:]
all_data = pd.DataFrame(data.values.T, index=data.columns, columns=data.index)[4:]
st.line_chart(all_data)
product_list = data["country name"].unique()
product_type = st.sidebar.selectbox(
"Which kind of event do you want to compare?",
product_list,
key = 'ada'
)
product_type_2 = st.sidebar.selectbox(
"Which kind of event do you want to compare?",
product_list,
key = 'ava'
)
if(product_type != product_type_2):
st.title(f"{product_type} vs {product_type_2} GDP")
sub_data = data[(data["country name"] == product_type) | (data["country name"] == product_type_2)]
sub_data2 = pd.DataFrame(sub_data.values.T, index=sub_data.columns, columns=[product_type, product_type_2])[4:]
st.line_chart(sub_data2)
else:
st.title(f"{product_type}的GDP折线图")
sub_data = data[(data["country name"] == product_type)]
sub_data2 = pd.DataFrame(sub_data.values.T, index=sub_data.columns, columns=[product_type])[4:]
st.line_chart(sub_data2)
| StarcoderdataPython |
3384826 | <filename>source/utils.py
import os
import copy
import sys
import matplotlib.pyplot as plt
import numpy as np
import torch
from torch import nn
import torch.optim as optim
import torch.backends.cudnn as cudnn
from torch.utils.data.dataloader import DataLoader
from tqdm import tqdm
import yaml
from source.models import ESPCN
def printconfig(config_dict):
""" Print configuration dictionary to console
Configuration values in yaml file (default= config.yaml) passed as a dictionary and printed to the console for convenient inspection. Command line arg is (-pc, --print-config). Terminates execution after printing.
:param config_dict: Nested dictionary of configuration values for using ESPCN
:return: None
"""
print('\nConfiguration parameters-\n')
for i in config_dict:
print(i,':')
for key in config_dict[i]:
print(' ',key, ':', config_dict[i][key])
print()
sys.exit()
def visualize_filters(dict_vis):
""" Visualize and save filters of all the convolutional layers
Plot filters of the conv layers using matplotlib. Weights are loaded, after which the function extracts the weights to 'model_weights'. Filters visuals are plotted for each layer and saved in data/visualize_filters. Command line arg is (-f, --filter-vis). Terminates execution after plotting and saving.
:param dict_vis: dictionary containing scale value and path to weights file
:return: None
"""
weights_file= dict_vis['weights file']
scale= dict_vis['scale']
device = torch.device('cpu')
model = ESPCN(scale_factor=scale)
state_dict = model.state_dict()
for n, p in torch.load(weights_file, map_location=lambda storage, loc: storage).items():
if n in state_dict.keys():
state_dict[n].copy_(p)
else:
raise KeyError(n)
model_weights= [] # To store weights
conv_layers= [] # To store the conv2d layers
model_children= list(model.children())
counter = 0
for i in range(len(model_children)):
for j in range(len(model_children[i])):
child= model_children[i][j]
if type(child) == nn.Conv2d:
counter += 1
model_weights.append(child.weight)
conv_layers.append(child)
out_path= 'data/visualize_filters'
if not os.path.exists(out_path):
os.makedirs(out_path)
sizes= [(8,8), (4,8), (3,3)]
k_sizes= [5,3,3]
plt.figure(figsize=(20, 17))
for n in range(len(model_weights)):
for i, filter in enumerate(model_weights[n]):
plt.subplot(sizes[n][0], sizes[n][1], i+1)
plt.imshow(filter[0, :, :].detach(), cmap='gray')
plt.axis('off')
plt.suptitle('Convolutional Layer ' + str(n+1) + ': Filter visualization', fontsize=40)
plt.savefig('data/visualize_filters/filter'+str(n+1)+'.png')
plt.clf()
print('Filter images saved to data/visualize_filters')
sys.exit()
def is_image_file(filename):
""" Check if file is an image
:param filename: file name string
:return: Boolean toggle
"""
return any(filename.endswith(extension) for extension in ['.bmp', '.png', '.jpg', '.jpeg', '.JPG', '.JPEG', '.PNG'])
def is_video_file(filename):
""" Check if file is a video
:param filename: file name string
:return: Boolean toggle
"""
return any(filename.endswith(extension) for extension in ['.mp4', '.avi', '.mpg', '.mkv', '.wmv', '.flv'])
def convert_rgb_to_y(img, dim_order='hwc'):
""" Get Y(CbCr) value from RGB image (standard conversion)
:param img: input image array in RGB form
:return: array of Y values
"""
if dim_order == 'hwc':
return 16. + (64.738 * img[..., 0] + 129.057 * img[..., 1] + 25.064 * img[..., 2]) / 256.
else:
return 16. + (64.738 * img[0] + 129.057 * img[1] + 25.064 * img[2]) / 256.
def convert_rgb_to_ycbcr(img, dim_order='hwc'):
""" Convert to YCbCr from RGB (standard conversion)
:param img: input image array in RGB form
:return: out image array in YCbCr form
"""
if dim_order == 'hwc':
y = 16. + (64.738 * img[..., 0] + 129.057 * img[..., 1] + 25.064 * img[..., 2]) / 256.
cb = 128. + (-37.945 * img[..., 0] - 74.494 * img[..., 1] + 112.439 * img[..., 2]) / 256.
cr = 128. + (112.439 * img[..., 0] - 94.154 * img[..., 1] - 18.285 * img[..., 2]) / 256.
else:
y = 16. + (64.738 * img[0] + 129.057 * img[1] + 25.064 * img[2]) / 256.
cb = 128. + (-37.945 * img[0] - 74.494 * img[1] + 112.439 * img[2]) / 256.
cr = 128. + (112.439 * img[0] - 94.154 * img[1] - 18.285 * img[2]) / 256.
return np.array([y, cb, cr]).transpose([1, 2, 0])
def convert_ycbcr_to_rgb(img, dim_order='hwc'):
""" Convert to RGB from YCbCr (standard conversion)
:param img: input image array in YCbCr form
:return: out image array in RGB form
"""
if dim_order == 'hwc':
r = 298.082 * img[..., 0] / 256. + 408.583 * img[..., 2] / 256. - 222.921
g = 298.082 * img[..., 0] / 256. - 100.291 * img[..., 1] / 256. - 208.120 * img[..., 2] / 256. + 135.576
b = 298.082 * img[..., 0] / 256. + 516.412 * img[..., 1] / 256. - 276.836
else:
r = 298.082 * img[0] / 256. + 408.583 * img[2] / 256. - 222.921
g = 298.082 * img[0] / 256. - 100.291 * img[1] / 256. - 208.120 * img[2] / 256. + 135.576
b = 298.082 * img[0] / 256. + 516.412 * img[1] / 256. - 276.836
return np.array([r, g, b]).transpose([1, 2, 0])
def preprocess(img, device):
""" Process image into torch tensor
:param img: input image array in RGB form
:return: tensor, array
"""
img = np.array(img).astype(np.float32)
ycbcr = convert_rgb_to_ycbcr(img)
x = ycbcr[..., 0]
x /= 255.
x = torch.from_numpy(x).to(device)
x = x.unsqueeze(0).unsqueeze(0)
return x, ycbcr
def calc_psnr(img1, img2):
""" PSNR calculator
:param img1: true/estimated image
:param img2: estimated/true image
:return: PSNR value
"""
return 10. * torch.log10(1. / torch.mean((img1 - img2) ** 2))
class AverageMeter(object):
""" Simple object to keep track of a parameter average over time
Object initialized to zero, and stores the average, count, sum and last value variables. Used in training to track best average PSNR.
"""
def __init__(self):
""" Constructor
"""
self.reset()
def reset(self):
""" Reset to zero
"""
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
""" Update and compute val, sum, count, avg
"""
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count | StarcoderdataPython |
115516 | # minqlx - Extends Quake Live's dedicated server with extra functionality and scripting.
# Copyright (C) 2015 Mino <<EMAIL>>
# This file is part of minqlx.
# minqlx is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# minqlx is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with minqlx. If not, see <http://www.gnu.org/licenses/>.
import minqlx
import re
_DUMMY_USERINFO = ("ui_singlePlayerActive\\0\\cg_autoAction\\1\\cg_autoHop\\0"
"\\cg_predictItems\\1\\model\\bitterman/sport_blue\\headmodel\\crash/red"
"\\handicap\\100\\cl_anonymous\\0\\color1\\4\\color2\\23\\sex\\male"
"\\teamtask\\0\\rate\\25000\\country\\NO")
class NonexistentPlayerError(Exception):
"""An exception that is raised when a player that disconnected is being used
as if the player were still present.
"""
pass
class Player():
"""A class that represents a player on the server. As opposed to minqlbot,
attributes are all the values from when the class was instantiated. This
means for instance if a player is on the blue team when you check, but
then moves to red, it will still be blue when you check a second time.
To update it, use :meth:`~.Player.update`. Note that if you update it
and the player has disconnected, it will raise a
:exc:`minqlx.NonexistentPlayerError` exception.
"""
def __init__(self, client_id, info=None):
self._valid = True
# Can pass own info for efficiency when getting all players and to allow dummy players.
if info:
self._id = client_id
self._info = info
else:
self._id = client_id
self._info = minqlx.player_info(client_id)
if not self._info:
self._invalidate("Tried to initialize a Player instance of nonexistant player {}."
.format(client_id))
self._userinfo = None
self._steam_id = self._info.steam_id
# When a player connects, a the name field in the client struct has yet to be initialized,
# so we fall back to the userinfo and try parse it ourselves to get the name if needed.
if self._info.name:
self._name = self._info.name
else:
self._userinfo = minqlx.parse_variables(self._info.userinfo, ordered=True)
if "name" in self._userinfo:
self._name = self._userinfo["name"]
else: # No name at all. Weird userinfo during connection perhaps?
self._name = ""
def __repr__(self):
if not self._valid:
return "{}(INVALID:'{}':{})".format(self.__class__.__name__,
self.clean_name, self.steam_id)
return "{}({}:'{}':{})".format(self.__class__.__name__, self._id,
self.clean_name, self.steam_id)
def __str__(self):
return self.name
def __contains__(self, key):
return key in self.cvars
def __getitem__(self, key):
return self.cvars[key]
def __eq__(self, other):
if isinstance(other, type(self)):
return self.steam_id == other.steam_id
else:
return self.steam_id == other
def __ne__(self, other):
return not self.__eq__(other)
def update(self):
"""Update the player information with the latest data. If the player
disconnected it will raise an exception and invalidates a player.
The player's name and Steam ID can still be accessed after being
invalidated, but anything else will make it throw an exception too.
:raises: minqlx.NonexistentPlayerError
"""
self._info = minqlx.player_info(self._id)
if not self._info or self._steam_id != self._info.steam_id:
self._invalidate()
if self._info.name:
self._name = self._info.name
else:
self._userinfo = minqlx.parse_variables(self._info.userinfo, ordered=True)
if "name" in self._userinfo:
self._name = self._userinfo["name"]
else:
self._name = ""
def _invalidate(self, e="The player does not exist anymore. Did the player disconnect?"):
self._valid = False
raise NonexistentPlayerError(e)
@property
def cvars(self):
if not self._valid:
self._invalidate()
if not self._userinfo:
self._userinfo = minqlx.parse_variables(self._info.userinfo, ordered=True)
return self._userinfo.copy()
@cvars.setter
def cvars(self, new_cvars):
new = "".join(["\\{}\\{}".format(key, new_cvars[key]) for key in new_cvars])
minqlx.client_command(self.id, "userinfo \"{}\"".format(new))
@property
def steam_id(self):
return self._steam_id
@property
def id(self):
return self._id
@property
def ip(self):
if "ip" in self:
return self["ip"].split(":")[0]
else:
return ""
@property
def clan(self):
"""The clan tag. Not actually supported by QL, but it used to be and
fortunately the scoreboard still properly displays it if we manually
set the configstring to use clan tags."""
try:
return minqlx.parse_variables(minqlx.get_configstring(529 + self._id))["cn"]
except KeyError:
return ""
@clan.setter
def clan(self, tag):
index = self.id + 529
cs = minqlx.parse_variables(minqlx.get_configstring(index), ordered=True)
cs["xcn"] = tag
cs["cn"] = tag
new_cs = "".join(["\\{}\\{}".format(key, cs[key]) for key in cs])
minqlx.set_configstring(index, new_cs)
@property
def name(self):
return self._name + "^7"
@name.setter
def name(self, value):
new = self.cvars
new["name"] = value
self.cvars = new
@property
def clean_name(self):
"""Removes color tags from the name."""
return re.sub(r"\^[0-9]", "", self.name)
@property
def qport(self):
if "qport" in self:
return int(self["qport"])
else:
return -1
@property
def team(self):
return minqlx.TEAMS[self._info.team]
@team.setter
def team(self, new_team):
self.put(new_team)
@property
def colors(self):
# Float because they can occasionally be floats for some reason.
return float(self["color1"]), float(self["color2"])
@colors.setter
def colors(self, value):
new = self.cvars
c1, c2 = value
new["color1"] = c1
new["color2"] = c2
self.cvars = new
@property
def model(self):
return self["model"]
@model.setter
def model(self, value):
new = self.cvars
new["model"] = value
self.cvars = new
@property
def headmodel(self):
return self["headmodel"]
@headmodel.setter
def headmodel(self, value):
new = self.cvars
new["headmodel"] = value
self.cvars = new
@property
def handicap(self):
return self["handicap"]
@handicap.setter
def handicap(self, value):
new = self.cvars
new["handicap"] = value
self.cvars = new
@property
def autohop(self):
return bool(int(self["cg_autoHop"]))
@autohop.setter
def autohop(self, value):
new = self.cvars
new["autohop"] = int(value)
self.cvars = new
@property
def autoaction(self):
return bool(int(self["cg_autoAction"]))
@autoaction.setter
def autoaction(self, value):
new = self.cvars
new["cg_autoAction"] = int(value)
self.cvars = new
@property
def predictitems(self):
return bool(int(self["cg_predictItems"]))
@predictitems.setter
def predictitems(self, value):
new = self.cvars
new["cg_predictItems"] = int(value)
self.cvars = new
@property
def connection_state(self):
"""A string describing the connection state of a player.
Possible values:
- *free* -- The player has disconnected and the slot is free to be used by someone else.
- *zombie* -- The player disconnected and his/her slot will be available to other players shortly.
- *connected* -- The player connected, but is currently loading the game.
- *primed* -- The player was sent the necessary information to play, but has yet to send commands.
- *active* -- The player finished loading and is actively sending commands to the server.
In other words, if you need to make sure a player is in-game, check if ``player.connection_state == "active"``.
"""
return minqlx.CONNECTION_STATES[self._info.connection_state]
@property
def state(self):
return minqlx.player_state(self.id)
@property
def privileges(self):
if self._info.privileges == minqlx.PRIV_NONE:
return None
elif self._info.privileges == minqlx.PRIV_MOD:
return "mod"
elif self._info.privileges == minqlx.PRIV_ADMIN:
return "admin"
elif self._info.privileges == minqlx.PRIV_ROOT:
return "root"
elif self._info.privileges == minqlx.PRIV_BANNED:
return "banned"
@privileges.setter
def privileges(self, value):
if not value or value == "none":
minqlx.set_privileges(self.id, minqlx.PRIV_NONE)
elif value == "mod":
minqlx.set_privileges(self.id, minqlx.PRIV_MOD)
elif value == "admin":
minqlx.set_privileges(self.id, minqlx.PRIV_ADMIN)
else:
raise ValueError("Invalid privilege level.")
@property
def country(self):
return self["country"]
@country.setter
def country(self, value):
new = self.cvars
new["country"] = value
self.cvars = new
@property
def valid(self):
return self._valid
@property
def stats(self):
return minqlx.player_stats(self.id)
@property
def ping(self):
return self.stats.ping
def position(self, reset=False, **kwargs):
if reset:
pos = minqlx.Vector3((0, 0, 0))
else:
pos = self.state.position
if not kwargs:
return pos
x = pos.x if "x" not in kwargs else kwargs["x"]
y = pos.y if "y" not in kwargs else kwargs["y"]
z = pos.z if "z" not in kwargs else kwargs["z"]
return minqlx.set_position(self.id, minqlx.Vector3((x, y, z)))
def velocity(self, reset=False, **kwargs):
if reset:
vel = minqlx.Vector3((0, 0, 0))
else:
vel = self.state.velocity
if not kwargs:
return vel
x = vel.x if "x" not in kwargs else kwargs["x"]
y = vel.y if "y" not in kwargs else kwargs["y"]
z = vel.z if "z" not in kwargs else kwargs["z"]
return minqlx.set_velocity(self.id, minqlx.Vector3((x, y, z)))
def weapons(self, reset=False, **kwargs):
if reset:
weaps = minqlx.Weapons(((False,)*15))
else:
weaps = self.state.weapons
if not kwargs:
return weaps
g = weaps.g if "g" not in kwargs else kwargs["g"]
mg = weaps.mg if "mg" not in kwargs else kwargs["mg"]
sg = weaps.sg if "sg" not in kwargs else kwargs["sg"]
gl = weaps.gl if "gl" not in kwargs else kwargs["gl"]
rl = weaps.rl if "rl" not in kwargs else kwargs["rl"]
lg = weaps.lg if "lg" not in kwargs else kwargs["lg"]
rg = weaps.rg if "rg" not in kwargs else kwargs["rg"]
pg = weaps.pg if "pg" not in kwargs else kwargs["pg"]
bfg = weaps.bfg if "bfg" not in kwargs else kwargs["bfg"]
gh = weaps.gh if "gh" not in kwargs else kwargs["gh"]
ng = weaps.ng if "ng" not in kwargs else kwargs["ng"]
pl = weaps.pl if "pl" not in kwargs else kwargs["pl"]
cg = weaps.cg if "cg" not in kwargs else kwargs["cg"]
hmg = weaps.hmg if "hmg" not in kwargs else kwargs["hmg"]
hands = weaps.hands if "hands" not in kwargs else kwargs["hands"]
return minqlx.set_weapons(self.id,
minqlx.Weapons((g, mg, sg, gl, rl, lg, rg, pg, bfg, gh, ng, pl, cg, hmg, hands)))
def weapon(self, new_weapon=None):
if new_weapon is None:
return self.state.weapon
elif new_weapon in minqlx.WEAPONS:
pass
elif new_weapon in minqlx.WEAPONS.values():
new_weapon = tuple(minqlx.WEAPONS.values()).index(new_weapon)
return minqlx.set_weapon(self.id, new_weapon)
def ammo(self, reset=False, **kwargs):
if reset:
a = minqlx.Weapons(((0,)*15))
else:
a = self.state.ammo
if not kwargs:
return a
g = a.g if "g" not in kwargs else kwargs["g"]
mg = a.mg if "mg" not in kwargs else kwargs["mg"]
sg = a.sg if "sg" not in kwargs else kwargs["sg"]
gl = a.gl if "gl" not in kwargs else kwargs["gl"]
rl = a.rl if "rl" not in kwargs else kwargs["rl"]
lg = a.lg if "lg" not in kwargs else kwargs["lg"]
rg = a.rg if "rg" not in kwargs else kwargs["rg"]
pg = a.pg if "pg" not in kwargs else kwargs["pg"]
bfg = a.bfg if "bfg" not in kwargs else kwargs["bfg"]
gh = a.gh if "gh" not in kwargs else kwargs["gh"]
ng = a.ng if "ng" not in kwargs else kwargs["ng"]
pl = a.pl if "pl" not in kwargs else kwargs["pl"]
cg = a.cg if "cg" not in kwargs else kwargs["cg"]
hmg = a.hmg if "hmg" not in kwargs else kwargs["hmg"]
hands = a.hands if "hands" not in kwargs else kwargs["hands"]
return minqlx.set_ammo(self.id,
minqlx.Weapons((g, mg, sg, gl, rl, lg, rg, pg, bfg, gh, ng, pl, cg, hmg, hands)))
def powerups(self, reset=False, **kwargs):
if reset:
pu = minqlx.Powerups(((0,)*6))
else:
pu = self.state.powerups
if not kwargs:
return pu
quad = pu.quad if "quad" not in kwargs else round(kwargs["quad"]*1000)
bs = pu.battlesuit if "battlesuit" not in kwargs else round(kwargs["battlesuit"]*1000)
haste = pu.haste if "haste" not in kwargs else round(kwargs["haste"]*1000)
invis = pu.invisibility if "invisibility" not in kwargs else round(kwargs["invisibility"]*1000)
regen = pu.regeneration if "regeneration" not in kwargs else round(kwargs["regeneration"]*1000)
invul = pu.invulnerability if "invulnerability" not in kwargs else round(kwargs["invulnerability"]*1000)
return minqlx.set_powerups(self.id,
minqlx.Powerups((quad, bs, haste, invis, regen, invul)))
@property
def holdable(self):
return self.state.holdable
@holdable.setter
def holdable(self, value):
if not value:
minqlx.set_holdable(self.id, 0)
elif value == "teleporter":
minqlx.set_holdable(self.id, 27)
elif value == "medkit":
minqlx.set_holdable(self.id, 28)
elif value == "flight":
minqlx.set_holdable(self.id, 34)
self.flight(reset=True)
elif value == "kamikaze":
minqlx.set_holdable(self.id, 37)
elif value == "portal":
minqlx.set_holdable(self.id, 38)
elif value == "invulnerability":
minqlx.set_holdable(self.id, 39)
else:
raise ValueError("Invalid holdable item.")
def drop_holdable(self):
minqlx.drop_holdable(self.id)
def flight(self, reset=False, **kwargs):
state = self.state
if state.holdable != "flight":
self.holdable = "flight"
reset = True
if reset:
# Set to defaults on reset.
fl = minqlx.Flight((16000, 16000, 1200, 0))
else:
fl = state.flight
fuel = fl.fuel if "fuel" not in kwargs else kwargs["fuel"]
max_fuel = fl.max_fuel if "max_fuel" not in kwargs else kwargs["max_fuel"]
thrust = fl.thrust if "thrust" not in kwargs else kwargs["thrust"]
refuel = fl.refuel if "refuel" not in kwargs else kwargs["refuel"]
return minqlx.set_flight(self.id, minqlx.Flight((fuel, max_fuel, thrust, refuel)))
@property
def noclip(self):
return self.state.noclip
@noclip.setter
def noclip(self, value):
minqlx.noclip(self.id, bool(value))
@property
def health(self):
return self.state.health
@health.setter
def health(self, value):
minqlx.set_health(self.id, value)
@property
def armor(self):
return self.state.armor
@armor.setter
def armor(self, value):
minqlx.set_armor(self.id, value)
@property
def is_alive(self):
return self.state.is_alive
@is_alive.setter
def is_alive(self, value):
if not isinstance(value, bool):
raise ValueError("is_alive needs to be a boolean.")
cur = self.is_alive
if cur and value is False:
# TODO: Proper death and not just setting health to 0.
self.health = 0
elif not cur and value is True:
minqlx.player_spawn(self.id)
@property
def is_frozen(self):
return self.state.is_frozen
@property
def score(self):
return self.stats.score
@score.setter
def score(self, value):
return minqlx.set_score(self.id, value)
@property
def air_control(self):
return self.state.air_control
@air_control.setter
def air_control(self, value):
minqlx.set_air_control(self.id, value)
@property
def channel(self):
return minqlx.TellChannel(self)
def center_print(self, msg):
minqlx.send_server_command(self.id, "cp \"{}\"".format(msg))
def tell(self, msg, **kwargs):
return minqlx.Plugin.tell(msg, self, **kwargs)
def kick(self, reason=""):
return minqlx.Plugin.kick(self, reason)
def ban(self):
return minqlx.Plugin.ban(self)
def tempban(self):
return minqlx.Plugin.tempban(self)
def addadmin(self):
return minqlx.Plugin.addadmin(self)
def addmod(self):
return minqlx.Plugin.addmod(self)
def demote(self):
return minqlx.Plugin.demote(self)
def mute(self):
return minqlx.Plugin.mute(self)
def unmute(self):
return minqlx.Plugin.unmute(self)
def put(self, team):
return minqlx.Plugin.put(self, team)
def addscore(self, score):
return minqlx.Plugin.addscore(self, score)
def switch(self, other_player):
return minqlx.Plugin.switch(self, other_player)
def slap(self, damage=0):
return minqlx.Plugin.slap(self, damage)
def slay(self):
return minqlx.Plugin.slay(self)
def slay_with_mod(self, mod):
return minqlx.slay_with_mod(self.id, mod)
@classmethod
def all_players(cls):
return [cls(i, info=info) for i, info in enumerate(minqlx.players_info()) if info]
class AbstractDummyPlayer(Player):
def __init__(self, name="DummyPlayer"):
info = minqlx.PlayerInfo((-1, name, minqlx.CS_CONNECTED,
_DUMMY_USERINFO, -1, minqlx.TEAM_SPECTATOR, minqlx.PRIV_NONE))
super().__init__(-1, info=info)
@property
def id(self):
raise AttributeError("Dummy players do not have client IDs.")
@property
def steam_id(self):
raise NotImplementedError("steam_id property needs to be implemented.")
def update(self):
pass
@property
def channel(self):
raise NotImplementedError("channel property needs to be implemented.")
def tell(self, msg):
raise NotImplementedError("tell() needs to be implemented.")
class RconDummyPlayer(AbstractDummyPlayer):
def __init__(self):
super().__init__(name=self.__class__.__name__)
@property
def steam_id(self):
return minqlx.owner()
@property
def channel(self):
return minqlx.CONSOLE_CHANNEL
def tell(self, msg):
self.channel.reply(msg)
| StarcoderdataPython |
103619 | from django.shortcuts import render,get_object_or_404,redirect
from django.http import HttpResponse
from django.contrib.auth.decorators import login_required
from django.contrib import auth
from django.contrib.auth import authenticate, login, logout
from django.conf import settings
from django.db.models import Count,Max, Q
from axes.models import AccessAttempt
from apps.paginacion import paginacion
from apps.reportes.models import historial
from .forms import formPerfil, LoginForm, formUsuario, formModulo,formSubModulo,formEditUsuario
from .models import modulos, permisos, perfil, User
import datetime, time
today = datetime.datetime.now()
fecha = today.strftime("%Y-%m-%d")
hora = time.strftime("%H:%M:%S")
#from django.utils.decorators import method_decorator
# Crea tus vista aqui.
def historiales(request,mod):
ip = request.META['REMOTE_ADDR']
equipo = request.META['HTTP_USER_AGENT']
a = historial()
a.idusuario_id = request.user.id
a.fecha = fecha
a.hora = hora
a.equipo = equipo
a.ip = ip
a.modulo = mod[0]
a.accion = mod[1]
a.idaccion = mod[2]
a.save()
def Login(request):
u = request.user
if u.is_anonymous():
if request.method == 'POST':
formulario = LoginForm(request.POST)
if formulario.is_valid():
username = formulario.cleaned_data['username']
password = formulario.cleaned_data['password']
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
auth.login(request, user)
return redirect("/sistema")
else:
msm= "cuenta desactivada"
msm = "DATOS INCORRECTOS"
login = LoginForm()
return render(request,'seguridad/cuentas/login.html',{'login':login,'msm':msm})
else:
login = LoginForm()
return render(request,'seguridad/cuentas/login.html',{'login':login,'msm':''})
else:
return redirect('/sistema')
def LogOut(request):
logout(request)
return redirect('/')
@login_required(login_url='/')
def index(request):
idp = request.user.idperfil_id
mod = permisos.objects.filter(idperfil_id=idp, idmodulo__estado=True).values('idmodulo_id','idmodulo__padre','idmodulo__descripcion','idmodulo__icon','idmodulo__url','idperfil_id','buscar','eliminar','editar','insertar','imprimir','ver')
return render(request,'seguridad/index.html',{'mod':mod})
def permi(request,url):
idp = request.user.idperfil_id
mod = permisos.objects.filter(idmodulo__url=url, idperfil_id=idp, idmodulo__estado=True).values('idmodulo__url','buscar','eliminar','editar','insertar','imprimir','ver')
return mod
@login_required(login_url='/')
def registrar_perfil(request):
perfiles = perfil.objects.all().order_by('id')
estado = permi(request, "registro_perfil")
if request.method == 'POST' and request.is_ajax():
formu = formPerfil(request.POST)
listaMod = [(con.id) for con in modulos.objects.all()]
if formu.is_valid():
formu.save()
idp = perfil.objects.latest('id')
for x in listaMod:
m = permisos()
m.idmodulo_id = x
m.idperfil_id = idp.id
m.save()
historiales(request,["perfil","registrar",idp.id])
return render(request,'seguridad/perfil/ajax_perfil.html',{'perfil':perfiles,'n':'perfilU','estado':estado})
else:
return render(request,'seguridad/perfil/form_per.html',{'formu':formu})
else:
formu = formPerfil()
return render(request,'seguridad/perfil/perfil.html',{'formu':formu,'perfil':perfiles, 'url':'registro_perfil/','n':'perfilU','estado':estado})
@login_required(login_url='/')
def eliminar_perfil(request):
perfiles = perfil.objects.all().order_by('id')
estado = permi(request, "registro_perfil")
if request.method == 'GET' and request.is_ajax():
idb = request.GET.get("id","")
historiales(request,["perfil","eliminar",idb])
get_object_or_404(perfil,pk=idb).delete()
permisos.objects.filter(idperfil=idb).delete()
return render(request,'seguridad/perfil/ajax_perfil.html',{'perfil':perfiles,'n':'perfilU','estado':estado})
@login_required(login_url='/')
def actualizar_perfil(request):
perfiles = perfil.objects.all().order_by('id')
estado = permi(request, "registro_perfil")
if request.method == 'POST' and request.is_ajax():
idp = request.POST.get("id","")
a=get_object_or_404(perfil,pk=idp)
form=formPerfil(request.POST, instance=a)
if form.is_valid():
form.save()
historiales(request,["perfil","actualizar",idp])
return render(request,'seguridad/perfil/ajax_perfil.html',{'perfil':perfiles,'n':'perfilU','estado':estado})
else:
return render(request,'seguridad/perfil/form_per.html',{'formu':form})
else:
idp = request.GET.get("id","")
a=get_object_or_404(perfil,pk=idp)
form= formPerfil(instance=a)
return render(request,'seguridad/modal.html',{'nombre':form,'url':'actualizar_perfil/','n':'perfilU','u':'perfilU','estado':estado})
@login_required(login_url='/')
def registro_usuario(request):
usuarios = User.objects.all().order_by('id')
estado = permi(request, "registro_usuario")
if request.method == 'POST' and request.is_ajax():
formu = formUsuario(request.POST)
if formu.is_valid():
formu.save()
idp = User.objects.latest('id')
historiales(request,["usuario","registrar",idp.id])
return render(request,'seguridad/usuario/ajax_usuario.html',{'usuario':usuarios,'n':'UserU','estado':estado})
else:
return render(request,'seguridad/usuario/form_user.html',{'formu':formu})
else:
estado = (permi(request, "registro_usuario"))
formu = formUsuario()
return render(request,'seguridad/usuario/usuario.html',{'formu':formu,'usuario':usuarios, 'url':'registro_usuario/','n':'UserU','estado':estado})
@login_required(login_url='/')
def passDefault(request):
idp = request.GET.get("id","")
estado = (permi(request, "registro_usuario"))
u = User.objects.get(pk=idp)
u.password = "<PASSWORD>="
u.save()
historiales(request,["usuario","cambio contraseña",idp])
usuarios = User.objects.all().order_by('id')
return render(request,'seguridad/usuario/ajax_usuario.html',{'usuario':usuarios,'n':'UserU','estado':estado})
@login_required(login_url='/')
def eliminar_usuario(request):
usuarios = User.objects.all().order_by('id')
estado = permi(request, "registro_usuario")
if request.method == 'GET' and request.is_ajax():
idb = request.GET.get("id","")
a= User.objects.get(pk=idb)
a.estado = False
a.save()
historiales(request,["usuario","eliminar",idb])
return render(request,'seguridad/usuario/ajax_usuario.html',{'usuario':usuarios,'n':'UserU','estado':estado})
@login_required(login_url='/')
def actualizar_usuario(request):
usuarios = User.objects.all().order_by('id')
estado = permi(request, "registro_usuario")
if request.method == 'POST' and request.is_ajax():
idp = request.POST.get("id","")
a=get_object_or_404(User,pk=idp)
form=formUsuario(request.POST, instance=a)
if form.is_valid():
form.save()
historiales(request,["usuario","actualizar",idp])
return render(request,'seguridad/usuario/ajax_usuario.html',{'usuario':usuarios,'n':'UserU','estado':estado})
else:
return render(request,'seguridad/usuario/form_user.html',{'formu':form})
else:
idp = request.GET.get("id","")
a=get_object_or_404(User,pk=idp)
form= formUsuario(instance=a)
return render(request,'seguridad/modal.html',{'nombre':form,'url':'actualizar_usuario/','n':'UserU','u':'UserU','estado':estado})
@login_required(login_url='/')
def actualizar_info_usuario(request):
usuarios = User.objects.all().order_by('id')
if request.method == 'POST' and request.is_ajax():
idp = request.user.id
a=get_object_or_404(User,pk=idp)
form=formEditUsuario(request.POST, instance=a)
if form.is_valid():
form.save()
historiales(request,["usuario","actualizar perfil",idp])
return redirect('/')
else:
return render(request,'seguridad/usuario/form_user.html',{'formu':form})
else:
idp = request.user.id
a=get_object_or_404(User,pk=idp)
form= formEditUsuario(instance=a)
return render(request,'seguridad/usuario/edit_info_user.html',{'form':form})
@login_required(login_url='/')
def profile(request):
return render(request,'seguridad/usuario/cuenta.html',{'f':'ff'})
@login_required(login_url='/')
def registro_modulo(request):
modulo = modulos.objects.all().order_by('id')
estado = permi(request, "registro_modulo")
if request.method == 'POST' and request.is_ajax():
formu = formModulo(request.POST)
if formu.is_valid():
formu.save()
idp = modulos.objects.latest('id')
historiales(request,["modulo","registrar",idp.id])
return render(request,'seguridad/modulo/ajax_modulo.html',{'modulo':modulo,'n':'ModuloU','estado':estado})
else:
return render(request,'seguridad/modulo/form_modulo.html',{'formu':formu})
else:
formu = formModulo()
formu2 = formSubModulo()
return render(request,'seguridad/modulo/modulo.html',{'pa':'1','formu':formu,'formu2':formu2,'modulo':modulo, 'url':'registro_modulo/','n':'ModuloU','nm':'SubModuloU','estado':estado})
@login_required(login_url='/')
def eliminar_modulo(request):
modulo = modulos.objects.all().order_by('id')
estado = permi(request, "registro_modulo")
if request.method == 'GET' and request.is_ajax():
idb = request.GET.get("id","")
a= modulos.objects.get(pk=idb)
a.estado = False
a.save()
#get_object_or_404(modulos,pk=idb).delete()
historiales(request,["modulo","eliminar",idb])
return render(request,'seguridad/modulo/ajax_modulo.html',{'modulo':modulo,'n':'ModuloU','estado':estado})
@login_required(login_url='/')
def actualizar_modulo(request):
modulo = modulos.objects.all().order_by('id')
estado = permi(request, "registro_modulo")
if request.method == 'POST' and request.is_ajax():
idp = request.POST.get("id","")
a=get_object_or_404(modulos,pk=idp)
form=formModulo(request.POST, instance=a)
if form.is_valid():
form.save()
historiales(request,["modulo","actualizar",idp])
return render(request,'seguridad/modulo/ajax_modulo.html',{'modulo':modulo,'n':'ModuloU','estado':estado})
else:
return render(request,'seguridad/modulo/form_modulo.html',{'formu':form})
else:
idp = request.GET.get("id","")
a=get_object_or_404(modulos,pk=idp)
form= formModulo(instance=a)
return render(request,'seguridad/modal.html',{'nombre':form,'url':'actualizar_modulo/','n':'ModuloU','u':'ModuloU','estado':estado})
@login_required(login_url='/')
def eliminar_submodulo(request):
modulo = modulos.objects.all().order_by('id')
estado = permi(request, "registro_modulo")
if request.method == 'GET' and request.is_ajax():
idb = request.GET.get("id","")
for i in modulos.objects.filter(pk=idb):
padre = i.padre
a= modulos.objects.get(pk=idb)
a.estado = False
a.save()
# get_object_or_404(modulos,pk=idb).delete()
historiales(request,["submodulo","eliminar",idb])
return render(request,'seguridad/modulo/ajax_submodulo.html',{'modulo':modulo,'nm':'SubModuloU','padre':str(padre),'estado':estado})
@login_required(login_url='/')
def actualizar_submodulo(request):
modulo = modulos.objects.all().order_by('id')
estado = permi(request, "registro_modulo")
if request.method == 'POST' and request.is_ajax():
idp = request.POST.get("id","")
a=get_object_or_404(modulos,pk=idp)
for i in modulos.objects.filter(pk=idp):
padre = i.padre
form=formSubModulo(request.POST, instance=a)
if form.is_valid():
form.save()
historiales(request,["submodulo","actualizar",idp])
return render(request,'seguridad/modulo/ajax_submodulo.html',{'padre':str(padre),'modulo':modulo,'nm':'SubModuloU','estado':estado})
else:
idp = request.GET.get("id","")
a=get_object_or_404(modulos,pk=idp)
form= formSubModulo(instance=a)
return render(request,'seguridad/modal.html',{'nombre':form,'url':'actualizar_submodulo/','n':'SubModuloU','u':'SubModuloU','estado':estado})
@login_required(login_url='/')
def registro_submodulo(request):
estado = permi(request, "registro_modulo")
modulo = modulos.objects.all().order_by('id')
if request.method == 'POST' and request.is_ajax():
formu = formSubModulo(request.POST)
padre = request.POST.get("padre","")
if formu.is_valid():
formu.save()
idp = modulos.objects.latest('id')
historiales(request,["submodulo","registrar",idp.id])
return render(request,'seguridad/modulo/ajax_submodulo.html',{'modulo':modulo,'nm':'SubModuloU','padre':padre,'estado':estado})
else:
return render(request,'seguridad/modulo/form_submodulo.html',{'formu':formu})
else:
idp = request.GET.get("id","")
modulo = modulos.objects.filter(padre=idp)
#print(modulo.query) #imprime las consultas en el terminal
return render(request,'seguridad/modulo/ajax_submodulo.html',{'modulo':modulo,'nm':'SubModuloU','padre':idp,'estado':estado})
@login_required(login_url='/')
def registro_permisos(request):
if request.method == 'POST' and request.is_ajax():
idb = request.POST.get("id","")
permiso = permisos.objects.select_related('idmodulo').filter(idperfil_id=idb).values('id','idmodulo_id','idmodulo__padre','idmodulo__descripcion','idperfil_id','buscar','eliminar','editar','insertar','imprimir','ver')
return render(request,'seguridad/permisos/ajax_permisos.html',{'permisos':permiso})
else:
idb = 2
permiso = permisos.objects.select_related('idmodulo').filter(idperfil_id=idb).values('id','idmodulo_id','idmodulo__padre','idmodulo__descripcion','idperfil_id','buscar','eliminar','editar','insertar','imprimir','ver')
permiso1 = permisos.objects.values('idperfil__descripcion','idperfil_id').annotate(Count('idperfil'))
#print(permiso.query)
return render(request,'seguridad/permisos/permisos.html',{'permisos':permiso, 'permisos1':permiso1,'url':'registro_permisos/','n':'PermisosU'})
@login_required(login_url='/')
def cambiarEstadoPermiso(request):
if request.method == 'GET' and request.is_ajax():
idp = request.GET.get("id","")
u = request.GET.get("url","")
e = request.GET.get("e","")
if e == 'true':
e= False
else:
e= True
a= permisos.objects.get(pk=idp)
if (u == "v"):
a.ver = e
if (u == "e"):
a.editar = e
elif (u == "b"):
a.buscar = e
elif (u == "i"):
a.insertar = e
elif (u == "el"):
a.eliminar = e
elif (u == "im"):
a.imprimir = e
a.save()
historiales(request,["permisos","modificar",idp])
return HttpResponse('ok')
@login_required(login_url='/')
def cambiarEstadoPermiso2(request):
if request.method == 'GET' and request.is_ajax():
idp = request.GET.get("id","")
e = request.GET.get("e","")
if e == 'true':
e= False
else:
e= True
a= permisos.objects.get(pk=idp)
a.ver = e
a.editar = e
a.buscar = e
a.insertar = e
a.eliminar = e
a.imprimir = e
a.save()
historiales(request,["permisos","modificar",idp])
return HttpResponse('ok')
@login_required(login_url='/')
def user_block(request):
userBlock = AccessAttempt.objects.all()
estado = permi(request, "registro_modulo")
if request.method == 'POST' and request.is_ajax():
idp = request.POST.get("id","")
u = AccessAttempt.objects.get(pk=idp)
u.failures_since_start = request.POST["ni"]
u.save()
historiales(request,["userBlock","modificar",idp])
return render(request,'seguridad/userBlock/ajax_user_block.html',{'lista':userBlock,'estado':estado})
else:
modulo = {'estado':estado,'url':'user_block/'}
return paginacion(request,userBlock, modulo, 'seguridad/userBlock/user_block.html' )
def busq_ajax_us(request):
dat = request.GET.get('datos')
estado = permi(request, "user_block")
if request.GET.get('d') == "v":
e = AccessAttempt.objects.filter( Q(ip_address__contains=dat))[:10]
elif request.GET.get('d') == "b":
e = AccessAttempt.objects.filter(failures_since_start=dat)[:10]
modulo = {'lista':e,'estado':estado}
return render(request,'seguridad/userBlock/ajax_user_block.html', modulo)
def manual(request):
return render(request,'seguridad/manual/manual.html') | StarcoderdataPython |
3210189 | <reponame>vfxetc/sgcache
#from shotgun_api3_registry import connect
#sg = connect()
import os
if False:
from shotgun_api3_registry import connect
sg = connect(use_cache=False)
else:
from tests import Shotgun
url = 'http://127.0.0.1:8010'
sg = Shotgun(url,
os.environ.get('SGCACHE_SHOTGUN_SCRIPT_name', 'script_name'),
os.environ.get('SGCACHE_SHOTGUN_API_KEY', 'api_key'),
)
if sg.server_info.get('sgcache') or sg.server_info.get('sgmock'):
sg.clear()
SHOT = sg.create('Shot', {'code': 'multi_entity_test'})
USER = sg.create('HumanUser', {'first_name': 'multi_entity_user'})
GRP1 = sg.create('Group', {'code': 'multi_entity_group1'})
GRP2 = sg.create('Group', {'code': 'multi_entity_group2'})
sg.create('Task', {'entity': SHOT, 'content': 'both', 'task_assignees': [USER, GRP1]})
sg.create('Task', {'entity': SHOT, 'content': 'user', 'task_assignees': [USER]})
sg.create('Task', {'entity': SHOT, 'content': 'group', 'task_assignees': [GRP1]})
sg.create('Task', {'entity': SHOT, 'content': 'none', 'task_assignees': []})
else:
SHOT = {'type': 'Shot', 'id': 10891}
AA = {'type': 'Asset', 'id': 1008}
AB = {'type': 'Asset', 'id': 1009}
AC = {'type': 'Asset', 'id': 1010}
USER = {'type': 'HumanUser', 'id': 108}
GRP1 = {'type': 'Group', 'id': 11}
GRP1 = {'type': 'Group', 'id': 13}
def find(filters):
filters = list(filters)
filters.append(('entity', 'is', SHOT))
return sg.find('Task', filters, ['content'])
def test(filters):
print '%d filters:' % len(filters)
for f in filters:
print ' %r' % (f, )
entities = find(filters)
print '%d entities:' % (len(entities))
for e in entities:
print ' {id} {content}'.format(**e)
print
def assertTasks(filters, expected, message=''):
tasks = find(filters)
found = sorted(t['content'] for t in tasks)
expected = sorted(expected)
if found == expected:
print '%s%sOk.' % (message or '', ': ' if message else '')
else:
print '%s%sERROR! Expected %s, found %s' % (message or '', ': ' if message else '', expected, found)
'''
HOLY SHIT!
>>> sg.find_one('Task', [('sg_assets.Task_sg_assets_Connection.asset.Asset.code', 'contains', 'Dummy')])
>>> sg.find_one('Task', [('sg_assets.Asset.code', 'contains', 'Dummy')])
'''
print '=== name_CONTAINS ==='
assertTasks([
('task_assignees', 'name_contains', 'Mike'),
], ['both', 'user'])
assertTasks([
('task_assignees', 'name_contains', 'GRP1'),
], ['both', 'group'])
print '=== name_NOT_CONTAINS ==='
assertTasks([
('task_assignees', 'name_not_contains', 'GRP1'),
], ['user', 'none'])
| StarcoderdataPython |
3385040 | <filename>ascension/testrun/anim.py
from ascension.game import Ascension
from ascension.window import MainWindowManager
from ascension.ascsprite import SpriteManager, Sprite, UNIT_GROUP
from math import ceil, floor
from ascension.settings import AscensionConf as conf
BUFFER = (20, 20)
class RepeatCallback(object):
def __init__(self, sprite, animation):
self.sprite = sprite
self.animation = animation
def __call__(self, extra_time):
self.sprite.start_animation(self.animation, extra_time, end_callback=self)
class AnimTest(Ascension):
def initialize(self, *animation_names):
MainWindowManager.set_background_color(0.5, 0.5, 0.5)
self.animation_names = animation_names
self.animation_sprites = []
if self.animation_names and animation_names[0] == "list":
self.list_animations()
import sys
sys.exit(0)
else:
self.find_animations()
self.determine_cell_size()
self.add_sprites()
def list_animations(self):
animations = [a.name for a in SpriteManager.animations.values()]
animations.sort()
for animation in animations:
print animation
def find_animations(self):
self.animations = []
for animation in SpriteManager.animations.values():
if not self.animation_names or animation.name in self.animation_names:
self.animations.append(animation)
self.animations.sort(key=lambda x: x.name)
def determine_cell_size(self):
max_width = 0
max_height = 0
for animation in self.animations:
max_width = max(max_width, animation.width)
max_height = max(max_height, animation.height)
self.cell_width = max_width + BUFFER[0] * 2.0
self.cell_height = max_height + BUFFER[1] * 2.0
def add_sprites(self):
window_width = MainWindowManager.width / conf.sprite_scale
window_height = MainWindowManager.height / conf.sprite_scale
start_x = ceil((-window_width + self.cell_width) / 2)
x = start_x
y = floor((window_height - self.cell_height) / 2)
for animation in self.animations:
self.add_sprite(animation, x, y)
x += self.cell_width
if x + self.cell_width / 2 > window_width / 2:
x = start_x
y -= self.cell_height
def add_sprite(self, animation, x, y):
newsprite = Sprite(x=x, y=y)
newsprite.start_animation(animation, end_callback=RepeatCallback(newsprite, animation))
SpriteManager.add_sprite(newsprite)
| StarcoderdataPython |
146641 | <gh_stars>0
import numpy as np
import pandas as pd
import sys
import os
def readcsv(filepath):
if os.name == 'nt':
print (os.getcwd()+ "\\" + filepath)
csvFrame = pd.read_csv(os.getcwd()+ "\\" + filepath)
else:
csvFrame = pd.read_csv(filepath)
print(csvFrame)
print("Success")
if __name__ == "__main__":
readcsv(sys.argv[1])
| StarcoderdataPython |
1798508 | from PyQt5.QtCore import QObject
from PyQt5.QtCore import QByteArray
from PyQt5.QtCore import pyqtSlot
from PyQt5.QtNetwork import QTcpSocket
from PyQt5.QtNetwork import QAbstractSocket
from settings.netSettings import NetSettings
class _OutcomingConnection:
def __init__(self):
self.socketDescriptor = 0
self.socket = None
self.remoteAddress = ""
self.remotePort = 0
self.connected = False
self.dataPackets = list()
class ResenderEngine(QObject):
def __init__(self, parent=None, port=NetSettings.nodeDataPort):
super().__init__(parent)
self.__outcomingConnections = dict() # QMap<QString, OutcomingConnection>
self.__remoteAddresses = list() # Ip's
self.__hostAddress = '127.0.0.1' # address to omit
self.__remotePort = port
def __del__(self):
pass
# self.stop()
@pyqtSlot(list)
def setRemoteAddresses(self, addressList: list):
self.stop()
#print(addressList, "SET ADDDRESES")
self.__remoteAddresses = addressList
@pyqtSlot(str)
def setHostAddress(self, address: str):
self.__hostAddress = address
@pyqtSlot()
def stop(self):
for outcomingConnection in self.__outcomingConnections.values():
# outcomingConnection.socket.connected.disconnect()
# outcomingConnection.socket.disconnected.disconnect()
# outcomingConnection.socket.error.disconnect()
if outcomingConnection.connected:
outcomingConnection.socket.disconnectFromHost()
self.__outcomingConnections.clear()
@pyqtSlot(str, str)
def floodPacket(self, packet: str, addressToOmit=str()):
for address in self.__remoteAddresses:
#print("FLOOD", address, self.__hostAddress, addressToOmit)
if not len(address):
return
if address != addressToOmit and address != self.__hostAddress:
self.sendPacket(address, packet)
@pyqtSlot(str, str)
def sendPacket(self, address: str, packet: str):
#print("SEND_PACKET:", address, packet)
if not address:
return
outcomingConnection = self.__outcomingConnections.get(address, None)
if not outcomingConnection:
print("NO IN OUTCOME", address, len(address))
outcomingConnection = _OutcomingConnection()
outcomingConnection.dataPackets.append(packet.encode())
outcomingConnection.socket = QTcpSocket(self)
outcomingConnection.socket.setSocketOption(QAbstractSocket.LowDelayOption, 1)
outcomingConnection.socket.setSocketOption(QAbstractSocket.KeepAliveOption, 0)
outcomingConnection.socket.setObjectName(address)
outcomingConnection.socket.connected.connect(self.__newConnection)
outcomingConnection.socket.disconnected.connect(self.__disconnected)
outcomingConnection.socket.error.connect(self.__error)
outcomingConnection.remoteAddress = address
outcomingConnection.remotePort = self.__remotePort
outcomingConnection.socket.connectToHost(address, self.__remotePort)
self.__outcomingConnections[address] = outcomingConnection
#print("STARTED CON")
else:
if outcomingConnection.socket.state() == QAbstractSocket.ConnectedState:
outcomingConnection.dataPackets.append(packet.encode())
self._sendPackets(outcomingConnection)
else:
if outcomingConnection.socket.state() != QAbstractSocket.ConnectingState:
outcomingConnection.socket.disconnectFromHost()
outcomingConnection.socket.connectToHost(outcomingConnection.remoteAddress, self.__remotePort)
@pyqtSlot()
def __newConnection(self):
#print("NEW CONNECT TO")
socket = self.sender()
outcomingConnection = self.__outcomingConnections[socket.peerAddress().toString()]
# outcomingConnection.socketDescriptor = socket.socketDescriptor()
outcomingConnection.connected = True
socket.disconnected.connect(self.__disconnected)
self._sendPackets(outcomingConnection)
def _sendPackets(self, outcomingConnection: _OutcomingConnection):
#print("SEND PACKET 2")
if outcomingConnection.socket.state() == QAbstractSocket.ConnectedState:
#print("SEND PACKET 2 2")
packets = outcomingConnection.dataPackets
for packet in packets:
packetLength = len(packet)
bytesWritten = 0
while bytesWritten != 4:
bytesWritten += outcomingConnection.socket.writeData(packetLength.to_bytes(4, byteorder="little"))
bytesWritten = 0
while bytesWritten != packetLength:
bytesWritten += outcomingConnection.socket.writeData(packet)
outcomingConnection.socket.flush()
outcomingConnection.dataPackets.clear()
@pyqtSlot()
def __disconnected(self):
socket = self.sender()
outcomingConnection = self.__outcomingConnections.get(socket.objectName(), None)
if outcomingConnection:
outcomingConnection.connected = False
outcomingConnection.dataPackets.clear()
# outcomingConnection.commandPackets.clear()
# outcomingConnection.dSocket.disconnected.disconnect()
# outcomingConnection.cSocket.disconnected.disconnect()
@pyqtSlot()
def __error(self):
socket = self.sender()
print("ERROR", socket.errorString(), socket.objectName(), socket.peerAddress().toString())
| StarcoderdataPython |
1672591 | <reponame>TobiasPrt/Smartphoniker-shop<filename>project/tests/conftest.py
# -*- coding: utf-8 -*-
"""Defines fixtures available to all tests."""
import logging
from project.server.config import TestingConfig
import pytest
from webtest import TestApp
from project.server import create_app
from project.server import db as _db
from project.server.models import User, Manufacturer, Color, Device, Customer, Shop, Repair, Image, DeviceSeries, Order
@pytest.fixture
def app():
"""Create application for the tests."""
_app = create_app("project.server.config.TestingConfig")
_app.logger.setLevel(logging.CRITICAL)
ctx = _app.test_request_context()
ctx.push()
yield _app
ctx.pop()
@pytest.fixture
def app_prod(app):
"""Create a production app"""
app.config.from_object("project.server.config.ProductionConfig")
app.config['SQLALCHEMY_DATABASE_URI'] = TestingConfig.SQLALCHEMY_DATABASE_URI
ctx = app.test_request_context()
ctx.push()
yield app
ctx.pop()
@pytest.fixture
def testapp(app):
"""Create Webtest app."""
return TestApp(app)
@pytest.fixture
def prodapp(app_prod):
return TestApp(app_prod)
@pytest.fixture
def devapp(app):
"""Create a dev app"""
app.config.from_object("project.server.config.DevelopmentConfig")
ctx = app.test_request_context()
ctx.push()
yield app
ctx.pop()
@pytest.fixture
def db(app):
"""Create database for the tests."""
_db.app = app
with app.app_context():
_db.create_all()
yield _db
# Explicitly close DB connection
_db.session.close()
_db.drop_all()
@pytest.fixture
def user(db):
"""Create user for the tests."""
user = User.create(email="<EMAIL>", password="<PASSWORD>", admin=True)
return user
@pytest.fixture
def sample_manufacturer(db):
"""Create a sample manufacturer"""
return Manufacturer.create(name="Apple")
@pytest.fixture
def sample_color(db):
"""Create a sample color"""
return Color.create(name="Black", color_code="#000000", internal_name="TEEESST")
@pytest.fixture
def sample_device(sample_series, sample_color):
""" Create a sample device """
return Device.create(name="iPhone 6S", colors=[sample_color], series=sample_series)
@pytest.fixture
def another_device(sample_series, sample_color):
""" Create a sample device """
return Device.create(name="iPhone 6S Plus", colors=[sample_color], series=sample_series)
@pytest.fixture
def sample_series(sample_manufacturer):
""" Sample Series """
return DeviceSeries.create(name="iPhone", manufacturer=sample_manufacturer)
@pytest.fixture
def sample_customer(db):
""" Return a sample customer """
return Customer.create(first_name="Test", last_name="Kunde", street="Eine Straße 1", zip_code="11233", city="Kiel", tel="+49 113455665 45", email="<EMAIL>")
@pytest.fixture
def sample_shop(db):
""" Return a sample Shop """
return Shop.create(name="Zentrale")
@pytest.fixture
def sample_repair(sample_device):
""" Return a sample repair """
return Repair.create(name="Display", price=69, device=sample_device)
@pytest.fixture
def another_repair(another_device):
return Repair.create(name="Battery", price=49, device=another_device)
@pytest.fixture
def some_devices(sample_series, sample_color):
return [
Device.create(name="iPhone 6S Plus", colors=[sample_color], series=sample_series),
Device.create(name="iPhone 6S +", colors=[sample_color], series=sample_series),
Device.create(name="iPhone 9", colors=[sample_color], series=sample_series),
Device.create(name="iPhone 7", colors=[sample_color], series=sample_series),
Device.create(name="iPhone SE", colors=[sample_color], series=sample_series),
Device.create(name="iPhone XS Max", colors=[sample_color], series=sample_series),
Device.create(name="iPhone XS", colors=[sample_color], series=sample_series),
Device.create(name="iPhone X", colors=[sample_color], series=sample_series),
Device.create(name="iPhone 11", colors=[sample_color], series=sample_series),
Device.create(name="iPhone Pro", colors=[sample_color], series=sample_series),
]
@pytest.fixture
def sample_image(db):
""" Return a sample image """
return Image.create(name="iPhone Picture", path="phone-frames/Apple/iphone678.svg")
@pytest.fixture
def sample_order(db, sample_color, sample_repair):
return Order.create(color=sample_color, repairs=sample_repair)
| StarcoderdataPython |
340 | import FWCore.ParameterSet.Config as cms
#
# module to make the MaxSumPtWMass jet combination
#
findTtSemiLepJetCombMaxSumPtWMass = cms.EDProducer("TtSemiLepJetCombMaxSumPtWMass",
## jet input
jets = cms.InputTag("selectedPatJets"),
## lepton input
leps = cms.InputTag("selectedPatMuons"),
## maximum number of jets to be considered
maxNJets = cms.int32(4),
## nominal WMass parameter (in GeV)
wMass = cms.double(80.4),
## use b-tagging two distinguish between light and b jets
useBTagging = cms.bool(False),
## choose algorithm for b-tagging
bTagAlgorithm = cms.string("trackCountingHighEffBJetTags"),
## minimum b discriminator value required for b jets and
## maximum b discriminator value allowed for non-b jets
minBDiscBJets = cms.double(1.0),
maxBDiscLightJets = cms.double(3.0)
)
| StarcoderdataPython |
123584 | <reponame>mkm99/TeamProject_StatsCalculator
# Generate a list of N random numbers with a seed and between a range of numbers - Both Integer and Decimal
from numpy.random import seed
import random
class RandomList():
@staticmethod
def list_Of_Ints(num1, num2, length, theSeed):
if isinstance(num1, float):
return list_Of_Floats(num1, num2, length, theSeed)
aList = []
seed(theSeed)
for each in range(length):
number = random.randint(num1, num2)
aList.append(number)
return aList
@staticmethod
def list_Of_Floats(num1, num2, length, theSeed):
aList = []
seed(theSeed)
for each in range(length):
number = random.uniform(num1, num2)
aList.append(number)
return aList
| StarcoderdataPython |
3373887 | <filename>lucene-experiment/output.py
def output(topic, result, run_id, output_file):
for rank, (docid, score) in enumerate(result.most_common()):
print(topic.num, 0, docid, rank, score, run_id, sep='\t', file=output_file)
| StarcoderdataPython |
3378063 | import bs4
import ClientConstants as CC
import ClientData
import ClientDefaults
import ClientGUICommon
import ClientGUIDialogs
import ClientGUIMenus
import ClientGUIControls
import ClientGUIListBoxes
import ClientGUIListCtrl
import ClientGUIScrolledPanels
import ClientGUISerialisable
import ClientGUITopLevelWindows
import ClientNetworkingJobs
import ClientParsing
import ClientPaths
import ClientSerialisable
import ClientThreading
import HydrusConstants as HC
import HydrusData
import HydrusExceptions
import HydrusGlobals as HG
import HydrusSerialisable
import HydrusTags
import json
import os
import sys
import threading
import traceback
import time
import wx
( StringConverterEvent, EVT_STRING_CONVERTER ) = wx.lib.newevent.NewCommandEvent()
class StringConverterButton( ClientGUICommon.BetterButton ):
def __init__( self, parent, string_converter ):
ClientGUICommon.BetterButton.__init__( self, parent, 'edit string converter', self._Edit )
self._string_converter = string_converter
self._example_string_override = None
self._UpdateLabel()
def _Edit( self ):
with ClientGUITopLevelWindows.DialogEdit( self, 'edit string converter', frame_key = 'deeply_nested_dialog' ) as dlg:
panel = EditStringConverterPanel( dlg, self._string_converter, example_string_override = self._example_string_override )
dlg.SetPanel( panel )
if dlg.ShowModal() == wx.ID_OK:
self._string_converter = panel.GetValue()
self._UpdateLabel()
wx.QueueEvent( self.GetEventHandler(), StringConverterEvent( -1 ) )
def _UpdateLabel( self ):
num_rules = len( self._string_converter.transformations )
if num_rules == 0:
label = 'no string transformations'
else:
label = HydrusData.ConvertIntToPrettyString( num_rules ) + ' string transformations'
self.SetLabelText( label )
def GetValue( self ):
return self._string_converter
def SetExampleString( self, example_string ):
self._example_string_override = example_string
def SetValue( self, string_converter ):
self._string_converter = string_converter
self._UpdateLabel()
class StringMatchButton( ClientGUICommon.BetterButton ):
def __init__( self, parent, string_match ):
ClientGUICommon.BetterButton.__init__( self, parent, 'edit string match', self._Edit )
self._string_match = string_match
self._UpdateLabel()
def _Edit( self ):
with ClientGUITopLevelWindows.DialogEdit( self, 'edit string match', frame_key = 'deeply_nested_dialog' ) as dlg:
panel = EditStringMatchPanel( dlg, self._string_match )
dlg.SetPanel( panel )
if dlg.ShowModal() == wx.ID_OK:
self._string_match = panel.GetValue()
self._UpdateLabel()
def _UpdateLabel( self ):
label = self._string_match.ToUnicode()
self.SetLabelText( label )
def GetValue( self ):
return self._string_match
def SetValue( self, string_match ):
self._string_match = string_match
self._UpdateLabel()
class EditCompoundFormulaPanel( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent, formula, test_context ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
#
menu_items = []
page_func = HydrusData.Call( ClientPaths.LaunchPathInWebBrowser, os.path.join( HC.HELP_DIR, 'downloader_parsers_formulae.html#compound_formula' ) )
menu_items.append( ( 'normal', 'open the compound formula help', 'Open the help page for compound formulae in your web browesr.', page_func ) )
help_button = ClientGUICommon.MenuBitmapButton( self, CC.GlobalBMPs.help, menu_items )
help_hbox = ClientGUICommon.WrapInText( help_button, self, 'help for this panel -->', wx.Colour( 0, 0, 255 ) )
#
edit_panel = ClientGUICommon.StaticBox( self, 'edit' )
edit_panel.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_FRAMEBK ) )
self._formulae = wx.ListBox( edit_panel, style = wx.LB_SINGLE )
self._formulae.Bind( wx.EVT_LEFT_DCLICK, self.EventEdit )
self._add_formula = ClientGUICommon.BetterButton( edit_panel, 'add', self.Add )
self._edit_formula = ClientGUICommon.BetterButton( edit_panel, 'edit', self.Edit )
self._move_formula_up = ClientGUICommon.BetterButton( edit_panel, u'\u2191', self.MoveUp )
self._delete_formula = ClientGUICommon.BetterButton( edit_panel, 'X', self.Delete )
self._move_formula_down = ClientGUICommon.BetterButton( edit_panel, u'\u2193', self.MoveDown )
self._sub_phrase = wx.TextCtrl( edit_panel )
( formulae, sub_phrase, string_match, string_converter ) = formula.ToTuple()
self._string_match_button = StringMatchButton( edit_panel, string_match )
self._string_converter_button = StringConverterButton( edit_panel, string_converter )
#
test_panel = ClientGUICommon.StaticBox( self, 'test' )
test_panel.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_FRAMEBK ) )
self._test_panel = TestPanel( test_panel, self.GetValue, test_context = test_context )
#
for formula in formulae:
pretty_formula = formula.ToPrettyString()
self._formulae.Append( pretty_formula, formula )
self._sub_phrase.SetValue( sub_phrase )
#
udd_button_vbox = wx.BoxSizer( wx.VERTICAL )
udd_button_vbox.Add( ( 20, 20 ), CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
udd_button_vbox.Add( self._move_formula_up, CC.FLAGS_VCENTER )
udd_button_vbox.Add( self._delete_formula, CC.FLAGS_VCENTER )
udd_button_vbox.Add( self._move_formula_down, CC.FLAGS_VCENTER )
udd_button_vbox.Add( ( 20, 20 ), CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
formulae_hbox = wx.BoxSizer( wx.HORIZONTAL )
formulae_hbox.Add( self._formulae, CC.FLAGS_EXPAND_BOTH_WAYS )
formulae_hbox.Add( udd_button_vbox, CC.FLAGS_VCENTER )
ae_button_hbox = wx.BoxSizer( wx.HORIZONTAL )
ae_button_hbox.Add( self._add_formula, CC.FLAGS_VCENTER )
ae_button_hbox.Add( self._edit_formula, CC.FLAGS_VCENTER )
rows = []
rows.append( ( 'substitution phrase:', self._sub_phrase ) )
gridbox = ClientGUICommon.WrapInGrid( edit_panel, rows )
edit_panel.Add( formulae_hbox, CC.FLAGS_EXPAND_BOTH_WAYS )
edit_panel.Add( ae_button_hbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
edit_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
edit_panel.Add( self._string_match_button, CC.FLAGS_EXPAND_PERPENDICULAR )
edit_panel.Add( self._string_converter_button, CC.FLAGS_EXPAND_PERPENDICULAR )
#
test_panel.Add( self._test_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
#
hbox = wx.BoxSizer( wx.HORIZONTAL )
hbox.Add( edit_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
hbox.Add( test_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( help_hbox, CC.FLAGS_BUTTON_SIZER )
vbox.Add( hbox, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
self.SetSizer( vbox )
def Add( self ):
existing_formula = ClientParsing.ParseFormulaHTML()
with ClientGUITopLevelWindows.DialogEdit( self, 'edit formula', frame_key = 'deeply_nested_dialog' ) as dlg:
panel = EditFormulaPanel( dlg, existing_formula, self._test_panel.GetTestContext )
dlg.SetPanel( panel )
if dlg.ShowModal() == wx.ID_OK:
new_formula = panel.GetValue()
pretty_formula = new_formula.ToPrettyString()
self._formulae.Append( pretty_formula, new_formula )
def Delete( self ):
selection = self._formulae.GetSelection()
if selection != wx.NOT_FOUND:
if self._formulae.GetCount() == 1:
wx.MessageBox( 'A compound formula needs at least one sub-formula!' )
else:
self._formulae.Delete( selection )
def Edit( self ):
selection = self._formulae.GetSelection()
if selection != wx.NOT_FOUND:
old_formula = self._formulae.GetClientData( selection )
with ClientGUITopLevelWindows.DialogEdit( self, 'edit formula', frame_key = 'deeply_nested_dialog' ) as dlg:
panel = EditFormulaPanel( dlg, old_formula, self._test_panel.GetTestContext )
dlg.SetPanel( panel )
if dlg.ShowModal() == wx.ID_OK:
new_formula = panel.GetValue()
pretty_formula = new_formula.ToPrettyString()
self._formulae.SetString( selection, pretty_formula )
self._formulae.SetClientData( selection, new_formula )
def EventEdit( self, event ):
self.Edit()
def GetValue( self ):
formulae = [ self._formulae.GetClientData( i ) for i in range( self._formulae.GetCount() ) ]
sub_phrase = self._sub_phrase.GetValue()
string_match = self._string_match_button.GetValue()
string_converter = self._string_converter_button.GetValue()
formula = ClientParsing.ParseFormulaCompound( formulae, sub_phrase, string_match, string_converter )
return formula
def MoveDown( self ):
selection = self._formulae.GetSelection()
if selection != wx.NOT_FOUND and selection + 1 < self._formulae.GetCount():
pretty_rule = self._formulae.GetString( selection )
rule = self._formulae.GetClientData( selection )
self._formulae.Delete( selection )
self._formulae.Insert( pretty_rule, selection + 1, rule )
def MoveUp( self ):
selection = self._formulae.GetSelection()
if selection != wx.NOT_FOUND and selection > 0:
pretty_rule = self._formulae.GetString( selection )
rule = self._formulae.GetClientData( selection )
self._formulae.Delete( selection )
self._formulae.Insert( pretty_rule, selection - 1, rule )
class EditContextVariableFormulaPanel( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent, formula, test_context ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
#
menu_items = []
page_func = HydrusData.Call( ClientPaths.LaunchPathInWebBrowser, os.path.join( HC.HELP_DIR, 'downloader_parsers_formulae.html#context_variable_formula' ) )
menu_items.append( ( 'normal', 'open the context variable formula help', 'Open the help page for context variable formulae in your web browesr.', page_func ) )
help_button = ClientGUICommon.MenuBitmapButton( self, CC.GlobalBMPs.help, menu_items )
help_hbox = ClientGUICommon.WrapInText( help_button, self, 'help for this panel -->', wx.Colour( 0, 0, 255 ) )
#
edit_panel = ClientGUICommon.StaticBox( self, 'edit' )
edit_panel.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_FRAMEBK ) )
self._variable_name = wx.TextCtrl( edit_panel )
( variable_name, string_match, string_converter ) = formula.ToTuple()
self._string_match_button = StringMatchButton( edit_panel, string_match )
self._string_converter_button = StringConverterButton( edit_panel, string_converter )
#
test_panel = ClientGUICommon.StaticBox( self, 'test' )
test_panel.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_FRAMEBK ) )
self._test_panel = TestPanel( test_panel, self.GetValue, test_context = test_context )
#
self._variable_name.SetValue( variable_name )
#
rows = []
rows.append( ( 'variable name:', self._variable_name ) )
gridbox = ClientGUICommon.WrapInGrid( edit_panel, rows )
edit_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
edit_panel.Add( self._string_match_button, CC.FLAGS_EXPAND_PERPENDICULAR )
edit_panel.Add( self._string_converter_button, CC.FLAGS_EXPAND_PERPENDICULAR )
#
test_panel.Add( self._test_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
#
hbox = wx.BoxSizer( wx.HORIZONTAL )
hbox.Add( edit_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
hbox.Add( test_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( help_hbox, CC.FLAGS_BUTTON_SIZER )
vbox.Add( hbox, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
self.SetSizer( vbox )
def GetValue( self ):
variable_name = self._variable_name.GetValue()
string_match = self._string_match_button.GetValue()
string_converter = self._string_converter_button.GetValue()
formula = ClientParsing.ParseFormulaContextVariable( variable_name, string_match, string_converter )
return formula
class EditFormulaPanel( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent, formula, test_context_callable ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
self._current_formula = formula
self._test_context_callable = test_context_callable
#
my_panel = ClientGUICommon.StaticBox( self, 'formula' )
self._formula_description = ClientGUICommon.SaneMultilineTextCtrl( my_panel )
( width, height ) = ClientGUICommon.ConvertTextToPixels( self._formula_description, ( 90, 8 ) )
self._formula_description.SetInitialSize( ( width, height ) )
self._formula_description.Disable()
self._edit_formula = ClientGUICommon.BetterButton( my_panel, 'edit formula', self._EditFormula )
self._change_formula_type = ClientGUICommon.BetterButton( my_panel, 'change formula type', self._ChangeFormulaType )
#
self._UpdateControls()
#
button_hbox = wx.BoxSizer( wx.HORIZONTAL )
button_hbox.Add( self._edit_formula, CC.FLAGS_EXPAND_BOTH_WAYS )
button_hbox.Add( self._change_formula_type, CC.FLAGS_EXPAND_BOTH_WAYS )
my_panel.Add( self._formula_description, CC.FLAGS_EXPAND_BOTH_WAYS )
my_panel.Add( button_hbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
#
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( my_panel, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
self.SetSizer( vbox )
def _ChangeFormulaType( self ):
if self._current_formula.ParsesSeparatedContent():
new_html = ClientParsing.ParseFormulaHTML( content_to_fetch = ClientParsing.HTML_CONTENT_HTML )
new_json = ClientParsing.ParseFormulaJSON( content_to_fetch = ClientParsing.JSON_CONTENT_JSON )
else:
new_html = ClientParsing.ParseFormulaHTML()
new_json = ClientParsing.ParseFormulaJSON()
new_compound = ClientParsing.ParseFormulaCompound()
new_context_variable = ClientParsing.ParseFormulaContextVariable()
if isinstance( self._current_formula, ClientParsing.ParseFormulaHTML ):
order = ( 'json', 'compound', 'context_variable' )
elif isinstance( self._current_formula, ClientParsing.ParseFormulaJSON ):
order = ( 'html', 'compound', 'context_variable' )
elif isinstance( self._current_formula, ClientParsing.ParseFormulaCompound ):
order = ( 'html', 'json', 'context_variable' )
elif isinstance( self._current_formula, ClientParsing.ParseFormulaContextVariable ):
order = ( 'html', 'json', 'compound', 'context_variable' )
choice_tuples = []
for formula_type in order:
if formula_type == 'html':
choice_tuples.append( ( 'change to a new HTML formula', new_html ) )
elif formula_type == 'json':
choice_tuples.append( ( 'change to a new JSON formula', new_json ) )
elif formula_type == 'compound':
choice_tuples.append( ( 'change to a new COMPOUND formula', new_compound ) )
elif formula_type == 'context_variable':
choice_tuples.append( ( 'change to a new CONTEXT VARIABLE formula', new_context_variable ) )
with ClientGUIDialogs.DialogSelectFromList( self, 'select formula type', choice_tuples ) as dlg:
if dlg.ShowModal() == wx.ID_OK:
self._current_formula = dlg.GetChoice()
self._UpdateControls()
def _EditFormula( self ):
if isinstance( self._current_formula, ClientParsing.ParseFormulaHTML ):
panel_class = EditHTMLFormulaPanel
elif isinstance( self._current_formula, ClientParsing.ParseFormulaJSON ):
panel_class = EditJSONFormulaPanel
elif isinstance( self._current_formula, ClientParsing.ParseFormulaCompound ):
panel_class = EditCompoundFormulaPanel
elif isinstance( self._current_formula, ClientParsing.ParseFormulaContextVariable ):
panel_class = EditContextVariableFormulaPanel
test_context = self._test_context_callable()
dlg_title = 'edit formula'
with ClientGUITopLevelWindows.DialogEdit( self, dlg_title, frame_key = 'deeply_nested_dialog' ) as dlg:
panel = panel_class( dlg, self._current_formula, test_context )
dlg.SetPanel( panel )
if dlg.ShowModal() == wx.ID_OK:
self._current_formula = panel.GetValue()
self._UpdateControls()
def _UpdateControls( self ):
if self._current_formula is None:
self._formula_description.SetValue( '' )
self._edit_formula.Disable()
self._change_formula_type.Disable()
else:
self._formula_description.SetValue( self._current_formula.ToPrettyMultilineString() )
self._edit_formula.Enable()
self._change_formula_type.Enable()
def GetValue( self ):
return self._current_formula
class EditHTMLTagRulePanel( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent, tag_rule ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
( rule_type, tag_name, tag_attributes, tag_index, tag_depth, should_test_tag_string, tag_string_string_match ) = tag_rule.ToTuple()
if tag_name is None:
tag_name = ''
if tag_attributes is None:
tag_attributes = {}
if tag_depth is None:
tag_depth = 1
self._current_description = ClientGUICommon.BetterStaticText( self )
self._rule_type = ClientGUICommon.BetterChoice( self )
self._rule_type.Append( 'search descendents', ClientParsing.HTML_RULE_TYPE_DESCENDING )
self._rule_type.Append( 'walk back up ancestors', ClientParsing.HTML_RULE_TYPE_ASCENDING )
self._tag_name = wx.TextCtrl( self )
self._tag_attributes = ClientGUIControls.EditStringToStringDictControl( self, tag_attributes )
self._tag_index = ClientGUICommon.NoneableSpinCtrl( self, 'index to fetch', none_phrase = 'get all', min = 0, max = 255 )
self._tag_depth = wx.SpinCtrl( self, min = 1, max = 255 )
self._should_test_tag_string = wx.CheckBox( self )
self._tag_string_string_match = StringMatchButton( self, tag_string_string_match )
#
self._rule_type.SelectClientData( rule_type )
self._tag_name.SetValue( tag_name )
self._tag_index.SetValue( tag_index )
self._tag_depth.SetValue( tag_depth )
self._should_test_tag_string.SetValue( should_test_tag_string )
self._UpdateTypeControls()
#
vbox = wx.BoxSizer( wx.VERTICAL )
rows = []
rows.append( ( 'rule type: ', self._rule_type ) )
rows.append( ( 'tag name: ', self._tag_name ) )
gridbox_1 = ClientGUICommon.WrapInGrid( self, rows )
rows = []
rows.append( ( 'index to fetch: ', self._tag_index ) )
rows.append( ( 'depth to climb: ', self._tag_depth ) )
gridbox_2 = ClientGUICommon.WrapInGrid( self, rows )
rows = []
rows.append( ( 'should test tag string: ', self._should_test_tag_string ) )
rows.append( ( 'tag string match: ', self._tag_string_string_match ) )
gridbox_3 = ClientGUICommon.WrapInGrid( self, rows )
vbox.Add( self._current_description, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.Add( gridbox_1, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
vbox.Add( self._tag_attributes, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox.Add( gridbox_2, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
vbox.Add( gridbox_3, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self.SetSizer( vbox )
self._UpdateShouldTest()
#
self._rule_type.Bind( wx.EVT_CHOICE, self.EventTypeChanged )
self._tag_name.Bind( wx.EVT_TEXT, self.EventVariableChanged )
self._tag_attributes.Bind( ClientGUIListCtrl.EVT_LIST_CTRL, self.EventVariableChanged)
self._tag_index.Bind( wx.EVT_SPINCTRL, self.EventVariableChanged )
self._tag_depth.Bind( wx.EVT_SPINCTRL, self.EventVariableChanged )
self._should_test_tag_string.Bind( wx.EVT_CHECKBOX, self.EventShouldTestChanged )
def _UpdateShouldTest( self ):
if self._should_test_tag_string.GetValue():
self._tag_string_string_match.Enable()
else:
self._tag_string_string_match.Disable()
def _UpdateTypeControls( self ):
rule_type = self._rule_type.GetChoice()
if rule_type == ClientParsing.HTML_RULE_TYPE_DESCENDING:
self._tag_attributes.Enable()
self._tag_index.Enable()
self._tag_depth.Disable()
else:
self._tag_attributes.Disable()
self._tag_index.Disable()
self._tag_depth.Enable()
self._UpdateDescription()
def _UpdateDescription( self ):
tag_rule = self.GetValue()
label = tag_rule.ToString()
self._current_description.SetLabelText( label )
def EventShouldTestChanged( self, event ):
self._UpdateShouldTest()
def EventTypeChanged( self, event ):
self._UpdateTypeControls()
event.Skip()
def EventVariableChanged( self, event ):
self._UpdateDescription()
event.Skip()
def GetValue( self ):
rule_type = self._rule_type.GetChoice()
tag_name = self._tag_name.GetValue()
if tag_name == '':
tag_name = None
should_test_tag_string = self._should_test_tag_string.GetValue()
tag_string_string_match = self._tag_string_string_match.GetValue()
if rule_type == ClientParsing.HTML_RULE_TYPE_DESCENDING:
tag_attributes = self._tag_attributes.GetValue()
tag_index = self._tag_index.GetValue()
tag_rule = ClientParsing.ParseRuleHTML( rule_type = rule_type, tag_name = tag_name, tag_attributes = tag_attributes, tag_index = tag_index, should_test_tag_string = should_test_tag_string, tag_string_string_match = tag_string_string_match )
elif rule_type == ClientParsing.HTML_RULE_TYPE_ASCENDING:
tag_depth = self._tag_depth.GetValue()
tag_rule = ClientParsing.ParseRuleHTML( rule_type = rule_type, tag_name = tag_name, tag_depth = tag_depth, should_test_tag_string = should_test_tag_string, tag_string_string_match = tag_string_string_match )
return tag_rule
class EditHTMLFormulaPanel( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent, formula, test_context ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
#
menu_items = []
page_func = HydrusData.Call( ClientPaths.LaunchPathInWebBrowser, os.path.join( HC.HELP_DIR, 'downloader_parsers_formulae.html#html_formula' ) )
menu_items.append( ( 'normal', 'open the html formula help', 'Open the help page for html formulae in your web browesr.', page_func ) )
help_button = ClientGUICommon.MenuBitmapButton( self, CC.GlobalBMPs.help, menu_items )
help_hbox = ClientGUICommon.WrapInText( help_button, self, 'help for this panel -->', wx.Colour( 0, 0, 255 ) )
#
edit_panel = ClientGUICommon.StaticBox( self, 'edit' )
edit_panel.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_FRAMEBK ) )
self._tag_rules = wx.ListBox( edit_panel, style = wx.LB_SINGLE )
self._tag_rules.Bind( wx.EVT_LEFT_DCLICK, self.EventEdit )
self._add_rule = ClientGUICommon.BetterButton( edit_panel, 'add', self.Add )
self._edit_rule = ClientGUICommon.BetterButton( edit_panel, 'edit', self.Edit )
self._move_rule_up = ClientGUICommon.BetterButton( edit_panel, u'\u2191', self.MoveUp )
self._delete_rule = ClientGUICommon.BetterButton( edit_panel, 'X', self.Delete )
self._move_rule_down = ClientGUICommon.BetterButton( edit_panel, u'\u2193', self.MoveDown )
self._content_to_fetch = ClientGUICommon.BetterChoice( edit_panel )
self._content_to_fetch.Append( 'attribute', ClientParsing.HTML_CONTENT_ATTRIBUTE )
self._content_to_fetch.Append( 'string', ClientParsing.HTML_CONTENT_STRING )
self._content_to_fetch.Append( 'html', ClientParsing.HTML_CONTENT_HTML )
self._content_to_fetch.Bind( wx.EVT_CHOICE, self.EventContentChoice )
self._attribute_to_fetch = wx.TextCtrl( edit_panel )
( tag_rules, content_to_fetch, attribute_to_fetch, string_match, string_converter ) = formula.ToTuple()
self._string_match_button = StringMatchButton( edit_panel, string_match )
self._string_converter_button = StringConverterButton( edit_panel, string_converter )
#
test_panel = ClientGUICommon.StaticBox( self, 'test' )
test_panel.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_FRAMEBK ) )
self._test_panel = TestPanel( test_panel, self.GetValue, test_context = test_context )
#
for rule in tag_rules:
pretty_rule = rule.ToString()
self._tag_rules.Append( pretty_rule, rule )
self._content_to_fetch.SelectClientData( content_to_fetch )
self._attribute_to_fetch.SetValue( attribute_to_fetch )
self._UpdateControls()
#
udd_button_vbox = wx.BoxSizer( wx.VERTICAL )
udd_button_vbox.Add( ( 20, 20 ), CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
udd_button_vbox.Add( self._move_rule_up, CC.FLAGS_VCENTER )
udd_button_vbox.Add( self._delete_rule, CC.FLAGS_VCENTER )
udd_button_vbox.Add( self._move_rule_down, CC.FLAGS_VCENTER )
udd_button_vbox.Add( ( 20, 20 ), CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
tag_rules_hbox = wx.BoxSizer( wx.HORIZONTAL )
tag_rules_hbox.Add( self._tag_rules, CC.FLAGS_EXPAND_BOTH_WAYS )
tag_rules_hbox.Add( udd_button_vbox, CC.FLAGS_VCENTER )
ae_button_hbox = wx.BoxSizer( wx.HORIZONTAL )
ae_button_hbox.Add( self._add_rule, CC.FLAGS_VCENTER )
ae_button_hbox.Add( self._edit_rule, CC.FLAGS_VCENTER )
rows = []
rows.append( ( 'content to fetch:', self._content_to_fetch ) )
rows.append( ( 'attribute to fetch: ', self._attribute_to_fetch ) )
gridbox = ClientGUICommon.WrapInGrid( edit_panel, rows )
edit_panel.Add( tag_rules_hbox, CC.FLAGS_EXPAND_BOTH_WAYS )
edit_panel.Add( ae_button_hbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
edit_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
edit_panel.Add( self._string_match_button, CC.FLAGS_EXPAND_PERPENDICULAR )
edit_panel.Add( self._string_converter_button, CC.FLAGS_EXPAND_PERPENDICULAR )
#
test_panel.Add( self._test_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
#
hbox = wx.BoxSizer( wx.HORIZONTAL )
hbox.Add( edit_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
hbox.Add( test_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( help_hbox, CC.FLAGS_BUTTON_SIZER )
vbox.Add( hbox, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
self.SetSizer( vbox )
def _UpdateControls( self ):
if self._content_to_fetch.GetChoice() == ClientParsing.HTML_CONTENT_ATTRIBUTE:
self._attribute_to_fetch.Enable()
else:
self._attribute_to_fetch.Disable()
def Add( self ):
dlg_title = 'edit tag rule'
with ClientGUITopLevelWindows.DialogEdit( self, dlg_title, frame_key = 'deeply_nested_dialog' ) as dlg:
new_rule = ClientParsing.ParseRuleHTML()
panel = EditHTMLTagRulePanel( dlg, new_rule )
dlg.SetPanel( panel )
if dlg.ShowModal() == wx.ID_OK:
rule = panel.GetValue()
pretty_rule = rule.ToString()
self._tag_rules.Append( pretty_rule, rule )
def Delete( self ):
selection = self._tag_rules.GetSelection()
if selection != wx.NOT_FOUND:
self._tag_rules.Delete( selection )
def Edit( self ):
selection = self._tag_rules.GetSelection()
if selection != wx.NOT_FOUND:
rule = self._tag_rules.GetClientData( selection )
dlg_title = 'edit tag rule'
with ClientGUITopLevelWindows.DialogEdit( self, dlg_title, frame_key = 'deeply_nested_dialog' ) as dlg:
panel = EditHTMLTagRulePanel( dlg, rule )
dlg.SetPanel( panel )
if dlg.ShowModal() == wx.ID_OK:
rule = panel.GetValue()
pretty_rule = rule.ToString()
self._tag_rules.SetString( selection, pretty_rule )
self._tag_rules.SetClientData( selection, rule )
def EventContentChoice( self, event ):
self._UpdateControls()
def EventEdit( self, event ):
self.Edit()
def GetValue( self ):
tags_rules = [ self._tag_rules.GetClientData( i ) for i in range( self._tag_rules.GetCount() ) ]
content_to_fetch = self._content_to_fetch.GetChoice()
attribute_to_fetch = self._attribute_to_fetch.GetValue()
if content_to_fetch == ClientParsing.HTML_CONTENT_ATTRIBUTE and attribute_to_fetch == '':
raise HydrusExceptions.VetoException( 'Please enter an attribute to fetch!' )
string_match = self._string_match_button.GetValue()
string_converter = self._string_converter_button.GetValue()
formula = ClientParsing.ParseFormulaHTML( tags_rules, content_to_fetch, attribute_to_fetch, string_match, string_converter )
return formula
def MoveDown( self ):
selection = self._tag_rules.GetSelection()
if selection != wx.NOT_FOUND and selection + 1 < self._tag_rules.GetCount():
pretty_rule = self._tag_rules.GetString( selection )
rule = self._tag_rules.GetClientData( selection )
self._tag_rules.Delete( selection )
self._tag_rules.Insert( pretty_rule, selection + 1, rule )
def MoveUp( self ):
selection = self._tag_rules.GetSelection()
if selection != wx.NOT_FOUND and selection > 0:
pretty_rule = self._tag_rules.GetString( selection )
rule = self._tag_rules.GetClientData( selection )
self._tag_rules.Delete( selection )
self._tag_rules.Insert( pretty_rule, selection - 1, rule )
class EditJSONParsingRulePanel( ClientGUIScrolledPanels.EditPanel ):
DICT_ENTRY = 0
ALL_LIST_ITEMS = 1
INDEXED_LIST_ITEM = 2
def __init__( self, parent, rule ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
self._type = ClientGUICommon.BetterChoice( self )
self._type.Append( 'dictionary entry', self.DICT_ENTRY )
self._type.Append( 'all list items', self.ALL_LIST_ITEMS )
self._type.Append( 'indexed list item', self.INDEXED_LIST_ITEM)
self._key = wx.TextCtrl( self )
self._index = wx.SpinCtrl( self, min = 0, max = 65535 )
#
if rule is None:
self._type.SelectClientData( self.ALL_LIST_ITEMS )
elif isinstance( rule, int ):
self._type.SelectClientData( self.INDEXED_LIST_ITEM )
self._index.SetValue( rule )
else:
self._type.SelectClientData( self.DICT_ENTRY )
self._key.SetValue( rule )
self._UpdateHideShow()
#
vbox = wx.BoxSizer( wx.VERTICAL )
rows = []
rows.append( ( 'dict entry: ', self._key ) )
rows.append( ( 'list index: ', self._index ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
vbox.Add( self._type, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.Add( gridbox, CC.FLAGS_EXPAND_PERPENDICULAR )
self.SetSizer( vbox )
#
self._type.Bind( wx.EVT_CHOICE, self.EventChoice )
def _UpdateHideShow( self ):
self._key.Disable()
self._index.Disable()
choice = self._type.GetChoice()
if choice == self.DICT_ENTRY:
self._key.Enable()
elif choice == self.INDEXED_LIST_ITEM:
self._index.Enable()
def EventChoice( self, event ):
self._UpdateHideShow()
def GetValue( self ):
choice = self._type.GetChoice()
if choice == self.DICT_ENTRY:
rule = self._key.GetValue()
elif choice == self.INDEXED_LIST_ITEM:
rule = self._index.GetValue()
else:
rule = None
return rule
class EditJSONFormulaPanel( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent, formula, test_context ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
#
menu_items = []
page_func = HydrusData.Call( ClientPaths.LaunchPathInWebBrowser, os.path.join( HC.HELP_DIR, 'downloader_parsers_formulae.html#json_formula' ) )
menu_items.append( ( 'normal', 'open the json formula help', 'Open the help page for json formulae in your web browesr.', page_func ) )
help_button = ClientGUICommon.MenuBitmapButton( self, CC.GlobalBMPs.help, menu_items )
help_hbox = ClientGUICommon.WrapInText( help_button, self, 'help for this panel -->', wx.Colour( 0, 0, 255 ) )
#
edit_panel = ClientGUICommon.StaticBox( self, 'edit' )
edit_panel.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_FRAMEBK ) )
self._parse_rules = wx.ListBox( edit_panel, style = wx.LB_SINGLE )
self._parse_rules.Bind( wx.EVT_LEFT_DCLICK, self.EventEdit )
self._add_rule = ClientGUICommon.BetterButton( edit_panel, 'add', self.Add )
self._edit_rule = ClientGUICommon.BetterButton( edit_panel, 'edit', self.Edit )
self._move_rule_up = ClientGUICommon.BetterButton( edit_panel, u'\u2191', self.MoveUp )
self._delete_rule = ClientGUICommon.BetterButton( edit_panel, 'X', self.Delete )
self._move_rule_down = ClientGUICommon.BetterButton( edit_panel, u'\u2193', self.MoveDown )
self._content_to_fetch = ClientGUICommon.BetterChoice( edit_panel )
self._content_to_fetch.Append( 'string', ClientParsing.JSON_CONTENT_STRING )
self._content_to_fetch.Append( 'json', ClientParsing.JSON_CONTENT_JSON )
( parse_rules, content_to_fetch, string_match, string_converter ) = formula.ToTuple()
self._string_match_button = StringMatchButton( edit_panel, string_match )
self._string_converter_button = StringConverterButton( edit_panel, string_converter )
#
test_panel = ClientGUICommon.StaticBox( self, 'test' )
test_panel.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_FRAMEBK ) )
self._test_panel = TestPanel( test_panel, self.GetValue, test_context = test_context )
#
for rule in parse_rules:
pretty_rule = ClientParsing.RenderJSONParseRule( rule )
self._parse_rules.Append( pretty_rule, rule )
self._content_to_fetch.SelectClientData( content_to_fetch )
#
udd_button_vbox = wx.BoxSizer( wx.VERTICAL )
udd_button_vbox.Add( ( 20, 20 ), CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
udd_button_vbox.Add( self._move_rule_up, CC.FLAGS_VCENTER )
udd_button_vbox.Add( self._delete_rule, CC.FLAGS_VCENTER )
udd_button_vbox.Add( self._move_rule_down, CC.FLAGS_VCENTER )
udd_button_vbox.Add( ( 20, 20 ), CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
parse_rules_hbox = wx.BoxSizer( wx.HORIZONTAL )
parse_rules_hbox.Add( self._parse_rules, CC.FLAGS_EXPAND_BOTH_WAYS )
parse_rules_hbox.Add( udd_button_vbox, CC.FLAGS_VCENTER )
ae_button_hbox = wx.BoxSizer( wx.HORIZONTAL )
ae_button_hbox.Add( self._add_rule, CC.FLAGS_VCENTER )
ae_button_hbox.Add( self._edit_rule, CC.FLAGS_VCENTER )
rows = []
rows.append( ( 'content to fetch:', self._content_to_fetch ) )
gridbox = ClientGUICommon.WrapInGrid( edit_panel, rows )
edit_panel.Add( parse_rules_hbox, CC.FLAGS_EXPAND_BOTH_WAYS )
edit_panel.Add( ae_button_hbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
edit_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
edit_panel.Add( self._string_match_button, CC.FLAGS_EXPAND_PERPENDICULAR )
edit_panel.Add( self._string_converter_button, CC.FLAGS_EXPAND_PERPENDICULAR )
#
test_panel.Add( self._test_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
#
hbox = wx.BoxSizer( wx.HORIZONTAL )
hbox.Add( edit_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
hbox.Add( test_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( help_hbox, CC.FLAGS_BUTTON_SIZER )
vbox.Add( hbox, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
self.SetSizer( vbox )
def Add( self ):
dlg_title = 'edit parse rule'
with ClientGUITopLevelWindows.DialogEdit( self, dlg_title, frame_key = 'deeply_nested_dialog' ) as dlg:
new_rule = 'post'
panel = EditJSONParsingRulePanel( dlg, new_rule )
dlg.SetPanel( panel )
if dlg.ShowModal() == wx.ID_OK:
rule = panel.GetValue()
pretty_rule = ClientParsing.RenderJSONParseRule( rule )
self._parse_rules.Append( pretty_rule, rule )
def Delete( self ):
selection = self._parse_rules.GetSelection()
if selection != wx.NOT_FOUND:
self._parse_rules.Delete( selection )
def Edit( self ):
selection = self._parse_rules.GetSelection()
if selection != wx.NOT_FOUND:
rule = self._parse_rules.GetClientData( selection )
dlg_title = 'edit parse rule'
with ClientGUITopLevelWindows.DialogEdit( self, dlg_title, frame_key = 'deeply_nested_dialog' ) as dlg:
panel = EditJSONParsingRulePanel( dlg, rule )
dlg.SetPanel( panel )
if dlg.ShowModal() == wx.ID_OK:
rule = panel.GetValue()
pretty_rule = ClientParsing.RenderJSONParseRule( rule )
self._parse_rules.SetString( selection, pretty_rule )
self._parse_rules.SetClientData( selection, rule )
def EventEdit( self, event ):
self.Edit()
def GetValue( self ):
parse_rules = [ self._parse_rules.GetClientData( i ) for i in range( self._parse_rules.GetCount() ) ]
content_to_fetch = self._content_to_fetch.GetChoice()
string_match = self._string_match_button.GetValue()
string_converter = self._string_converter_button.GetValue()
formula = ClientParsing.ParseFormulaJSON( parse_rules, content_to_fetch, string_match, string_converter )
return formula
def MoveDown( self ):
selection = self._parse_rules.GetSelection()
if selection != wx.NOT_FOUND and selection + 1 < self._parse_rules.GetCount():
pretty_rule = self._parse_rules.GetString( selection )
rule = self._parse_rules.GetClientData( selection )
self._parse_rules.Delete( selection )
self._parse_rules.Insert( pretty_rule, selection + 1, rule )
def MoveUp( self ):
selection = self._parse_rules.GetSelection()
if selection != wx.NOT_FOUND and selection > 0:
pretty_rule = self._parse_rules.GetString( selection )
rule = self._parse_rules.GetClientData( selection )
self._parse_rules.Delete( selection )
self._parse_rules.Insert( pretty_rule, selection - 1, rule )
class EditContentParserPanel( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent, content_parser, test_context ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
#
menu_items = []
page_func = HydrusData.Call( ClientPaths.LaunchPathInWebBrowser, os.path.join( HC.HELP_DIR, 'downloader_parsers_content_parsers.html#content_parsers' ) )
menu_items.append( ( 'normal', 'open the content parsers help', 'Open the help page for content parsers in your web browesr.', page_func ) )
help_button = ClientGUICommon.MenuBitmapButton( self, CC.GlobalBMPs.help, menu_items )
help_hbox = ClientGUICommon.WrapInText( help_button, self, 'help for this panel -->', wx.Colour( 0, 0, 255 ) )
#
self._edit_panel = ClientGUICommon.StaticBox( self, 'edit' )
self._edit_panel.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_FRAMEBK ) )
self._name = wx.TextCtrl( self._edit_panel )
self._content_panel = ClientGUICommon.StaticBox( self._edit_panel, 'content type' )
self._content_type = ClientGUICommon.BetterChoice( self._content_panel )
self._content_type.Append( 'urls', HC.CONTENT_TYPE_URLS )
self._content_type.Append( 'tags', HC.CONTENT_TYPE_MAPPINGS )
self._content_type.Append( 'file hash', HC.CONTENT_TYPE_HASH )
self._content_type.Append( 'timestamp', HC.CONTENT_TYPE_TIMESTAMP )
self._content_type.Append( 'watcher page title', HC.CONTENT_TYPE_TITLE )
self._content_type.Append( 'veto', HC.CONTENT_TYPE_VETO )
self._content_type.Bind( wx.EVT_CHOICE, self.EventContentTypeChange )
self._urls_panel = wx.Panel( self._content_panel )
self._url_type = ClientGUICommon.BetterChoice( self._urls_panel )
self._url_type.Append( 'url to download/pursue (file/post url)', HC.URL_TYPE_DESIRED )
self._url_type.Append( 'url to associate (source url)', HC.URL_TYPE_SOURCE )
self._url_type.Append( 'next gallery page', HC.URL_TYPE_NEXT )
self._file_priority = wx.SpinCtrl( self._urls_panel, min = 0, max = 100 )
self._mappings_panel = wx.Panel( self._content_panel )
self._namespace = wx.TextCtrl( self._mappings_panel )
self._hash_panel = wx.Panel( self._content_panel )
self._hash_type = ClientGUICommon.BetterChoice( self._hash_panel )
for hash_type in ( 'md5', 'sha1', 'sha256', 'sha512' ):
self._hash_type.Append( hash_type, hash_type )
self._timestamp_panel = wx.Panel( self._content_panel )
self._timestamp_type = ClientGUICommon.BetterChoice( self._timestamp_panel )
self._timestamp_type.Append( 'source time', HC.TIMESTAMP_TYPE_SOURCE )
self._title_panel = wx.Panel( self._content_panel )
self._title_priority = wx.SpinCtrl( self._title_panel, min = 0, max = 100 )
self._veto_panel = wx.Panel( self._content_panel )
self._veto_if_matches_found = wx.CheckBox( self._veto_panel )
self._string_match = EditStringMatchPanel( self._veto_panel )
( name, content_type, formula, additional_info ) = content_parser.ToTuple()
self._formula = EditFormulaPanel( self._edit_panel, formula, self.GetTestContext )
#
test_panel = ClientGUICommon.StaticBox( self, 'test' )
test_panel.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_FRAMEBK ) )
self._test_panel = TestPanel( test_panel, self.GetValue, test_context = test_context )
#
self._name.SetValue( name )
self._content_type.SelectClientData( content_type )
if content_type == HC.CONTENT_TYPE_URLS:
( url_type, priority ) = additional_info
self._url_type.SelectClientData( url_type )
self._file_priority.SetValue( priority )
elif content_type == HC.CONTENT_TYPE_MAPPINGS:
namespace = additional_info
self._namespace.SetValue( namespace )
elif content_type == HC.CONTENT_TYPE_HASH:
hash_type = additional_info
self._hash_type.SelectClientData( hash_type )
elif content_type == HC.CONTENT_TYPE_TIMESTAMP:
timestamp_type = additional_info
self._timestamp_type.SelectClientData( timestamp_type )
elif content_type == HC.CONTENT_TYPE_TITLE:
priority = additional_info
self._title_priority.SetValue( priority )
elif content_type == HC.CONTENT_TYPE_VETO:
( veto_if_matches_found, string_match ) = additional_info
self._veto_if_matches_found.SetValue( veto_if_matches_found )
self._string_match.SetValue( string_match )
#
rows = []
rows.append( ( 'url type: ', self._url_type ) )
rows.append( ( 'file url quality precedence (higher is better): ', self._file_priority ) )
gridbox = ClientGUICommon.WrapInGrid( self._urls_panel, rows )
self._urls_panel.SetSizer( gridbox )
#
rows = []
rows.append( ( 'namespace: ', self._namespace ) )
gridbox = ClientGUICommon.WrapInGrid( self._mappings_panel, rows )
self._mappings_panel.SetSizer( gridbox )
#
rows = []
rows.append( ( 'hash type: ', self._hash_type ) )
gridbox = ClientGUICommon.WrapInGrid( self._hash_panel, rows )
self._hash_panel.SetSizer( gridbox )
#
rows = []
rows.append( ( 'timestamp type: ', self._timestamp_type ) )
gridbox = ClientGUICommon.WrapInGrid( self._timestamp_panel, rows )
self._timestamp_panel.SetSizer( gridbox )
#
rows = []
rows.append( ( 'title precedence (higher is better): ', self._title_priority ) )
gridbox = ClientGUICommon.WrapInGrid( self._title_panel, rows )
self._title_panel.SetSizer( gridbox )
#
vbox = wx.BoxSizer( wx.VERTICAL )
rows = []
rows.append( ( 'veto if match found (OFF means \'veto if match not found\'): ', self._veto_if_matches_found ) )
gridbox = ClientGUICommon.WrapInGrid( self._veto_panel, rows )
vbox.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
vbox.Add( self._string_match, CC.FLAGS_EXPAND_BOTH_WAYS )
self._veto_panel.SetSizer( vbox )
#
rows = []
rows.append( ( 'content type: ', self._content_type ) )
gridbox = ClientGUICommon.WrapInGrid( self._content_panel, rows )
self._content_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self._content_panel.Add( self._urls_panel, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self._content_panel.Add( self._mappings_panel, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self._content_panel.Add( self._hash_panel, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self._content_panel.Add( self._timestamp_panel, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self._content_panel.Add( self._title_panel, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self._content_panel.Add( self._veto_panel, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
#
vbox = wx.BoxSizer( wx.VERTICAL )
rows = []
rows.append( ( 'name or description (optional): ', self._name ) )
gridbox = ClientGUICommon.WrapInGrid( self._edit_panel, rows )
self._edit_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self._edit_panel.Add( self._content_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
self._edit_panel.Add( self._formula, CC.FLAGS_EXPAND_BOTH_WAYS )
#
test_panel.Add( self._test_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
#
hbox = wx.BoxSizer( wx.HORIZONTAL )
hbox.Add( self._edit_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
hbox.Add( test_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( help_hbox, CC.FLAGS_BUTTON_SIZER )
vbox.Add( hbox, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
self.SetSizer( vbox )
self.EventContentTypeChange( None )
def EventContentTypeChange( self, event ):
choice = self._content_type.GetChoice()
self._urls_panel.Hide()
self._mappings_panel.Hide()
self._hash_panel.Hide()
self._timestamp_panel.Hide()
self._title_panel.Hide()
self._veto_panel.Hide()
if choice == HC.CONTENT_TYPE_URLS:
self._urls_panel.Show()
elif choice == HC.CONTENT_TYPE_MAPPINGS:
self._mappings_panel.Show()
elif choice == HC.CONTENT_TYPE_HASH:
self._hash_panel.Show()
elif choice == HC.CONTENT_TYPE_TIMESTAMP:
self._timestamp_panel.Show()
elif choice == HC.CONTENT_TYPE_TITLE:
self._title_panel.Show()
elif choice == HC.CONTENT_TYPE_VETO:
self._veto_panel.Show()
self._content_panel.Layout()
self._edit_panel.Layout()
def GetTestContext( self ):
return self._test_panel.GetTestContext()
def GetValue( self ):
name = self._name.GetValue()
content_type = self._content_type.GetChoice()
formula = self._formula.GetValue()
if content_type == HC.CONTENT_TYPE_URLS:
url_type = self._url_type.GetChoice()
priority = self._file_priority.GetValue()
additional_info = ( url_type, priority )
elif content_type == HC.CONTENT_TYPE_MAPPINGS:
namespace = self._namespace.GetValue()
additional_info = namespace
elif content_type == HC.CONTENT_TYPE_HASH:
hash_type = self._hash_type.GetChoice()
additional_info = hash_type
elif content_type == HC.CONTENT_TYPE_TIMESTAMP:
timestamp_type = self._timestamp_type.GetChoice()
additional_info = timestamp_type
elif content_type == HC.CONTENT_TYPE_TITLE:
priority = self._title_priority.GetValue()
additional_info = priority
elif content_type == HC.CONTENT_TYPE_VETO:
veto_if_matches_found = self._veto_if_matches_found.GetValue()
string_match = self._string_match.GetValue()
additional_info = ( veto_if_matches_found, string_match )
content_parser = ClientParsing.ContentParser( name = name, content_type = content_type, formula = formula, additional_info = additional_info )
return content_parser
class EditContentParsersPanel( ClientGUICommon.StaticBox ):
def __init__( self, parent, test_context_callable ):
ClientGUICommon.StaticBox.__init__( self, parent, 'content parsers' )
self._test_context_callable = test_context_callable
content_parsers_panel = ClientGUIListCtrl.BetterListCtrlPanel( self )
self._content_parsers = ClientGUIListCtrl.BetterListCtrl( content_parsers_panel, 'content_parsers', 10, 24, [ ( 'name', -1 ), ( 'produces', 40 ) ], self._ConvertContentParserToListCtrlTuples, delete_key_callback = self._Delete, activation_callback = self._Edit )
content_parsers_panel.SetListCtrl( self._content_parsers )
content_parsers_panel.AddButton( 'add', self._Add )
content_parsers_panel.AddButton( 'edit', self._Edit, enabled_only_on_selection = True )
content_parsers_panel.AddButton( 'delete', self._Delete, enabled_only_on_selection = True )
content_parsers_panel.AddSeparator()
content_parsers_panel.AddImportExportButtons( ( ClientParsing.ContentParser, ), self._AddContentParser )
#
self.Add( content_parsers_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
def _Add( self ):
dlg_title = 'edit content node'
content_parser = ClientParsing.ContentParser( 'new content parser' )
with ClientGUITopLevelWindows.DialogEdit( self, 'edit content parser', frame_key = 'deeply_nested_dialog' ) as dlg_edit:
test_context = self._test_context_callable()
panel = EditContentParserPanel( dlg_edit, content_parser, test_context )
dlg_edit.SetPanel( panel )
if dlg_edit.ShowModal() == wx.ID_OK:
new_content_parser = panel.GetValue()
self._AddContentParser( new_content_parser )
def _AddContentParser( self, content_parser ):
HydrusSerialisable.SetNonDupeName( content_parser, self._GetExistingNames() )
self._content_parsers.AddDatas( ( content_parser, ) )
self._content_parsers.Sort()
def _ConvertContentParserToListCtrlTuples( self, content_parser ):
name = content_parser.GetName()
produces = list( content_parser.GetParsableContent() )
pretty_name = name
pretty_produces = ClientParsing.ConvertParsableContentToPrettyString( produces, include_veto = True )
display_tuple = ( pretty_name, pretty_produces )
sort_tuple = ( name, produces )
return ( display_tuple, sort_tuple )
def _Delete( self ):
with ClientGUIDialogs.DialogYesNo( self, 'Remove all selected?' ) as dlg:
if dlg.ShowModal() == wx.ID_YES:
self._content_parsers.DeleteSelected()
def _Edit( self ):
content_parsers = self._content_parsers.GetData( only_selected = True )
for content_parser in content_parsers:
with ClientGUITopLevelWindows.DialogEdit( self, 'edit content parser', frame_key = 'deeply_nested_dialog' ) as dlg:
test_context = self._test_context_callable()
panel = EditContentParserPanel( dlg, content_parser, test_context )
dlg.SetPanel( panel )
if dlg.ShowModal() == wx.ID_OK:
edited_content_parser = panel.GetValue()
self._content_parsers.DeleteDatas( ( content_parser, ) )
HydrusSerialisable.SetNonDupeName( edited_content_parser, self._GetExistingNames() )
self._content_parsers.AddDatas( ( edited_content_parser, ) )
else:
break
self._content_parsers.Sort()
def _GetExistingNames( self ):
names = { content_parser.GetName() for content_parser in self._content_parsers.GetData() }
return names
def GetData( self ):
return self._content_parsers.GetData()
def AddDatas( self, content_parsers ):
self._content_parsers.AddDatas( content_parsers )
self._content_parsers.Sort()
class EditNodes( wx.Panel ):
def __init__( self, parent, nodes, referral_url_callable, example_data_callable ):
wx.Panel.__init__( self, parent )
self._referral_url_callable = referral_url_callable
self._example_data_callable = example_data_callable
self._nodes = ClientGUIListCtrl.SaneListCtrlForSingleObject( self, 200, [ ( 'name', 120 ), ( 'node type', 80 ), ( 'produces', -1 ) ], delete_key_callback = self.Delete, activation_callback = self.Edit )
menu_items = []
menu_items.append( ( 'normal', 'content node', 'A node that parses the given data for content.', self.AddContentNode ) )
menu_items.append( ( 'normal', 'link node', 'A node that parses the given data for a link, which it then pursues.', self.AddLinkNode ) )
self._add_button = ClientGUICommon.MenuButton( self, 'add', menu_items )
self._copy_button = ClientGUICommon.BetterButton( self, 'copy', self.Copy )
self._paste_button = ClientGUICommon.BetterButton( self, 'paste', self.Paste )
self._duplicate_button = ClientGUICommon.BetterButton( self, 'duplicate', self.Duplicate )
self._edit_button = ClientGUICommon.BetterButton( self, 'edit', self.Edit )
self._delete_button = ClientGUICommon.BetterButton( self, 'delete', self.Delete )
#
for node in nodes:
( display_tuple, sort_tuple ) = self._ConvertNodeToTuples( node )
self._nodes.Append( display_tuple, sort_tuple, node )
#
vbox = wx.BoxSizer( wx.VERTICAL )
button_hbox = wx.BoxSizer( wx.HORIZONTAL )
button_hbox.Add( self._add_button, CC.FLAGS_VCENTER )
button_hbox.Add( self._copy_button, CC.FLAGS_VCENTER )
button_hbox.Add( self._paste_button, CC.FLAGS_VCENTER )
button_hbox.Add( self._duplicate_button, CC.FLAGS_VCENTER )
button_hbox.Add( self._edit_button, CC.FLAGS_VCENTER )
button_hbox.Add( self._delete_button, CC.FLAGS_VCENTER )
vbox.Add( self._nodes, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox.Add( button_hbox, CC.FLAGS_BUTTON_SIZER )
self.SetSizer( vbox )
def _ConvertNodeToTuples( self, node ):
( name, node_type, produces ) = node.ToPrettyStrings()
return ( ( name, node_type, produces ), ( name, node_type, produces ) )
def _GetExportObject( self ):
to_export = HydrusSerialisable.SerialisableList()
for node in self._nodes.GetObjects( only_selected = True ):
to_export.append( node )
if len( to_export ) == 0:
return None
elif len( to_export ) == 1:
return to_export[0]
else:
return to_export
def _ImportObject( self, obj ):
if isinstance( obj, HydrusSerialisable.SerialisableList ):
for sub_obj in obj:
self._ImportObject( sub_obj )
else:
if isinstance( obj, ( ClientParsing.ContentParser, ClientParsing.ParseNodeContentLink ) ):
node = obj
( display_tuple, sort_tuple ) = self._ConvertNodeToTuples( node )
self._nodes.Append( display_tuple, sort_tuple, node )
else:
wx.MessageBox( 'That was not a script--it was a: ' + type( obj ).__name__ )
def AddContentNode( self ):
dlg_title = 'edit content node'
empty_node = ClientParsing.ContentParser()
panel_class = EditContentParserPanel
self.AddNode( dlg_title, empty_node, panel_class )
def AddLinkNode( self ):
dlg_title = 'edit link node'
empty_node = ClientParsing.ParseNodeContentLink()
panel_class = EditParseNodeContentLinkPanel
self.AddNode( dlg_title, empty_node, panel_class )
def AddNode( self, dlg_title, empty_node, panel_class ):
with ClientGUITopLevelWindows.DialogEdit( self, dlg_title, frame_key = 'deeply_nested_dialog' ) as dlg_edit:
referral_url = self._referral_url_callable()
example_data = self._example_data_callable()
if isinstance( empty_node, ClientParsing.ContentParser ):
panel = panel_class( dlg_edit, empty_node, ( {}, example_data ) )
else:
panel = panel_class( dlg_edit, empty_node, referral_url, example_data )
dlg_edit.SetPanel( panel )
if dlg_edit.ShowModal() == wx.ID_OK:
new_node = panel.GetValue()
( display_tuple, sort_tuple ) = self._ConvertNodeToTuples( new_node )
self._nodes.Append( display_tuple, sort_tuple, new_node )
def Copy( self ):
export_object = self._GetExportObject()
if export_object is not None:
json = export_object.DumpToString()
HG.client_controller.pub( 'clipboard', 'text', json )
def Delete( self ):
with ClientGUIDialogs.DialogYesNo( self, 'Remove all selected?' ) as dlg:
if dlg.ShowModal() == wx.ID_YES:
self._nodes.RemoveAllSelected()
def Duplicate( self ):
nodes_to_dupe = self._nodes.GetObjects( only_selected = True )
for node in nodes_to_dupe:
dupe_node = node.Duplicate()
( display_tuple, sort_tuple ) = self._ConvertNodeToTuples( dupe_node )
self._nodes.Append( display_tuple, sort_tuple, dupe_node )
def Edit( self ):
for i in self._nodes.GetAllSelected():
node = self._nodes.GetObject( i )
with ClientGUITopLevelWindows.DialogEdit( self, 'edit node', frame_key = 'deeply_nested_dialog' ) as dlg:
referral_url = self._referral_url_callable()
example_data = self._example_data_callable()
if isinstance( node, ClientParsing.ContentParser ):
panel = EditContentParserPanel( dlg, node, ( {}, example_data ) )
elif isinstance( node, ClientParsing.ParseNodeContentLink ):
panel = EditParseNodeContentLinkPanel( dlg, node, example_data = example_data )
dlg.SetPanel( panel )
if dlg.ShowModal() == wx.ID_OK:
edited_node = panel.GetValue()
( display_tuple, sort_tuple ) = self._ConvertNodeToTuples( edited_node )
self._nodes.UpdateRow( i, display_tuple, sort_tuple, edited_node )
def GetValue( self ):
return self._nodes.GetObjects()
def Paste( self ):
raw_text = HG.client_controller.GetClipboardText()
try:
obj = HydrusSerialisable.CreateFromString( raw_text )
self._ImportObject( obj )
except:
wx.MessageBox( 'I could not understand what was in the clipboard' )
class EditParseNodeContentLinkPanel( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent, node, referral_url = None, example_data = None ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
if referral_url is None:
referral_url = 'test-url.com/test_query'
self._referral_url = referral_url
if example_data is None:
example_data = ''
self._my_example_url = None
notebook = wx.Notebook( self )
( name, formula, children ) = node.ToTuple()
#
edit_panel = wx.Panel( notebook )
edit_panel.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_FRAMEBK ) )
self._name = wx.TextCtrl( edit_panel )
get_example_parsing_context = lambda: {}
self._formula = EditFormulaPanel( edit_panel, formula, self.GetTestContext )
children_panel = ClientGUICommon.StaticBox( edit_panel, 'content parsing children' )
self._children = EditNodes( children_panel, children, self.GetExampleURL, self.GetExampleData )
#
test_panel = wx.Panel( notebook )
test_panel.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_FRAMEBK ) )
self._example_data = ClientGUICommon.SaneMultilineTextCtrl( test_panel )
self._example_data.SetMinSize( ( -1, 200 ) )
self._example_data.SetValue( example_data )
self._test_parse = wx.Button( test_panel, label = 'test parse' )
self._test_parse.Bind( wx.EVT_BUTTON, self.EventTestParse )
self._results = ClientGUICommon.SaneMultilineTextCtrl( test_panel )
self._results.SetMinSize( ( -1, 200 ) )
self._test_fetch_result = wx.Button( test_panel, label = 'try fetching the first result' )
self._test_fetch_result.Bind( wx.EVT_BUTTON, self.EventTestFetchResult )
self._test_fetch_result.Disable()
self._my_example_data = ClientGUICommon.SaneMultilineTextCtrl( test_panel )
#
info_panel = wx.Panel( notebook )
message = '''This node looks for one or more urls in the data it is given, requests each in turn, and gives the results to its children for further parsing.
If your previous query result responds with links to where the actual content is, use this node to bridge the gap.
The formula should attempt to parse full or relative urls. If the url is relative (like href="/page/123"), it will be appended to the referral url given by this node's parent. It will then attempt to GET them all.'''
info_st = wx.StaticText( info_panel, label = message )
info_st.Wrap( 400 )
#
self._name.SetValue( name )
#
children_panel.Add( self._children, CC.FLAGS_EXPAND_BOTH_WAYS )
#
vbox = wx.BoxSizer( wx.VERTICAL )
rows = []
rows.append( ( 'name or description (optional): ', self._name ) )
gridbox = ClientGUICommon.WrapInGrid( edit_panel, rows )
vbox.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
vbox.Add( self._formula, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox.Add( children_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
edit_panel.SetSizer( vbox )
#
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( self._example_data, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox.Add( self._test_parse, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.Add( self._results, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox.Add( self._test_fetch_result, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.Add( self._my_example_data, CC.FLAGS_EXPAND_BOTH_WAYS )
test_panel.SetSizer( vbox )
#
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( info_st, CC.FLAGS_EXPAND_BOTH_WAYS )
info_panel.SetSizer( vbox )
#
notebook.AddPage( edit_panel, 'edit', select = True )
notebook.AddPage( test_panel, 'test', select = False )
notebook.AddPage( info_panel, 'info', select = False )
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( notebook, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
self.SetSizer( vbox )
def EventTestFetchResult( self, event ):
# this should be published to a job key panel or something so user can see it and cancel if needed
network_job = ClientNetworkingJobs.NetworkJob( 'GET', self._my_example_url, referral_url = self._referral_url )
network_job.OverrideBandwidth()
HG.client_controller.network_engine.AddJob( network_job )
try:
network_job.WaitUntilDone()
except HydrusExceptions.CancelledException:
self._my_example_data.SetValue( 'fetch cancelled' )
return
except HydrusExceptions.NetworkException as e:
self._my_example_data.SetValue( 'fetch failed' )
raise
example_data = network_job.GetContent()
try:
self._example_data.SetValue( example_data )
except UnicodeDecodeError:
self._example_data.SetValue( 'The fetched data, which had length ' + HydrusData.ConvertIntToBytes( len( example_data ) ) + ', did not appear to be displayable text.' )
def EventTestParse( self, event ):
def wx_code( parsed_urls ):
if not self:
return
if len( parsed_urls ) > 0:
self._my_example_url = parsed_urls[0]
self._test_fetch_result.Enable()
result_lines = [ '*** ' + HydrusData.ConvertIntToPrettyString( len( parsed_urls ) ) + ' RESULTS BEGIN ***' ]
result_lines.extend( parsed_urls )
result_lines.append( '*** RESULTS END ***' )
results_text = os.linesep.join( result_lines )
self._results.SetValue( results_text )
def do_it( node, data, referral_url ):
try:
stop_time = HydrusData.GetNow() + 30
job_key = ClientThreading.JobKey( cancellable = True, stop_time = stop_time )
parsed_urls = node.ParseURLs( job_key, data, referral_url )
wx.CallAfter( wx_code, parsed_urls )
except Exception as e:
HydrusData.ShowException( e )
message = 'Could not parse!'
wx.CallAfter( wx.MessageBox, message )
node = self.GetValue()
data = self._example_data.GetValue()
referral_url = self._referral_url
HG.client_controller.CallToThread( do_it, node, data, referral_url )
def GetExampleData( self ):
return self._example_data.GetValue()
def GetExampleURL( self ):
if self._my_example_url is not None:
return self._my_example_url
else:
return ''
def GetTestContext( self ):
return ( {}, self._example_data.GetValue() )
def GetValue( self ):
name = self._name.GetValue()
formula = self._formula.GetValue()
children = self._children.GetValue()
node = ClientParsing.ParseNodeContentLink( name = name, formula = formula, children = children )
return node
class EditPageParserPanel( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent, parser, formula = None, test_context = None ):
self._original_parser = parser
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
#
menu_items = []
page_func = HydrusData.Call( ClientPaths.LaunchPathInWebBrowser, os.path.join( HC.HELP_DIR, 'downloader_parsers_page_parsers.html#page_parsers' ) )
menu_items.append( ( 'normal', 'open the page parser help', 'Open the help page for page parsers in your web browesr.', page_func ) )
help_button = ClientGUICommon.MenuBitmapButton( self, CC.GlobalBMPs.help, menu_items )
help_hbox = ClientGUICommon.WrapInText( help_button, self, 'help for this panel -->', wx.Colour( 0, 0, 255 ) )
#
edit_panel = ClientGUICommon.StaticBox( self, 'edit' )
edit_notebook = wx.Notebook( edit_panel )
#
main_panel = wx.Panel( edit_notebook )
main_panel.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_FRAMEBK ) )
self._name = wx.TextCtrl( main_panel )
#
conversion_panel = ClientGUICommon.StaticBox( main_panel, 'pre-parsing conversion' )
string_converter = parser.GetStringConverter()
self._string_converter = EditStringConverterPanel( conversion_panel, string_converter )
#
example_urls_panel = ClientGUICommon.StaticBox( main_panel, 'example urls' )
self._example_urls = ClientGUIListBoxes.AddEditDeleteListBox( example_urls_panel, 6, HydrusData.ToUnicode, self._AddExampleURL, self._EditExampleURL )
#
formula_panel = wx.Panel( edit_notebook )
self._formula = EditFormulaPanel( formula_panel, formula, self.GetTestContext )
#
sub_page_parsers_notebook_panel = wx.Panel( edit_notebook )
sub_page_parsers_notebook_panel.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_FRAMEBK ) )
#
sub_page_parsers_panel = ClientGUIListCtrl.BetterListCtrlPanel( sub_page_parsers_notebook_panel )
self._sub_page_parsers = ClientGUIListCtrl.BetterListCtrl( sub_page_parsers_panel, 'sub_page_parsers', 4, 36, [ ( 'name', 24 ), ( '\'post\' separation formula', 24 ), ( 'produces', -1 ) ], self._ConvertSubPageParserToListCtrlTuple, delete_key_callback = self._DeleteSubPageParser, activation_callback = self._EditSubPageParser )
sub_page_parsers_panel.SetListCtrl( self._sub_page_parsers )
sub_page_parsers_panel.AddButton( 'add', self._AddSubPageParser )
sub_page_parsers_panel.AddButton( 'edit', self._EditSubPageParser, enabled_only_on_selection = True )
sub_page_parsers_panel.AddButton( 'delete', self._DeleteSubPageParser, enabled_only_on_selection = True )
#
content_parsers_panel = wx.Panel( edit_notebook )
content_parsers_panel.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_FRAMEBK ) )
#
self._content_parsers = EditContentParsersPanel( content_parsers_panel, self.GetTestContext )
#
test_panel = ClientGUICommon.StaticBox( self, 'test' )
test_panel.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_FRAMEBK ) )
test_url_fetch_panel = ClientGUICommon.StaticBox( test_panel, 'fetch test data from url' )
self._test_url = wx.TextCtrl( test_url_fetch_panel )
self._test_referral_url = wx.TextCtrl( test_url_fetch_panel )
self._fetch_example_data = ClientGUICommon.BetterButton( test_url_fetch_panel, 'fetch test data from url', self._FetchExampleData )
self._test_network_job_control = ClientGUIControls.NetworkJobControl( test_url_fetch_panel )
if test_context is None:
example_parsing_context = parser.GetExampleParsingContext()
example_data = ''
test_context = ( example_parsing_context, example_data )
if formula is None:
self._test_panel = TestPanel( test_panel, self.GetValue, test_context = test_context )
else:
self._test_panel = TestPanelSubsidiary( test_panel, self.GetValue, self.GetFormula, test_context = test_context )
#
name = parser.GetName()
( sub_page_parsers, content_parsers ) = parser.GetContentParsers()
example_urls = parser.GetExampleURLs()
if len( example_urls ) > 0:
self._test_url.SetValue( example_urls[0] )
self._name.SetValue( name )
self._sub_page_parsers.AddDatas( sub_page_parsers )
self._sub_page_parsers.Sort()
self._content_parsers.AddDatas( content_parsers )
self._example_urls.AddDatas( example_urls )
#
conversion_panel.Add( self._string_converter, CC.FLAGS_EXPAND_BOTH_WAYS )
example_urls_panel.Add( self._example_urls, CC.FLAGS_EXPAND_BOTH_WAYS )
#
vbox = wx.BoxSizer( wx.VERTICAL )
rows = []
rows.append( ( 'name or description (optional): ', self._name ) )
gridbox = ClientGUICommon.WrapInGrid( main_panel, rows )
vbox.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
vbox.Add( conversion_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox.Add( example_urls_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
main_panel.SetSizer( vbox )
#
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( self._formula, CC.FLAGS_EXPAND_BOTH_WAYS )
formula_panel.SetSizer( vbox )
#
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( sub_page_parsers_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
sub_page_parsers_notebook_panel.SetSizer( vbox )
#
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( self._content_parsers, CC.FLAGS_EXPAND_BOTH_WAYS )
content_parsers_panel.SetSizer( vbox )
#
rows = []
rows.append( ( 'url: ', self._test_url ) )
rows.append( ( 'referral url (optional): ', self._test_referral_url ) )
gridbox = ClientGUICommon.WrapInGrid( test_url_fetch_panel, rows )
test_url_fetch_panel.Add( gridbox, CC.FLAGS_EXPAND_PERPENDICULAR )
test_url_fetch_panel.Add( self._fetch_example_data, CC.FLAGS_EXPAND_PERPENDICULAR )
test_url_fetch_panel.Add( self._test_network_job_control, CC.FLAGS_EXPAND_PERPENDICULAR )
test_panel.Add( test_url_fetch_panel, CC.FLAGS_EXPAND_PERPENDICULAR )
test_panel.Add( self._test_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
if formula is not None:
test_url_fetch_panel.Hide()
#
if formula is None:
formula_panel.Hide()
else:
example_urls_panel.Hide()
edit_notebook.AddPage( formula_panel, 'separation formula', select = False )
edit_notebook.AddPage( main_panel, 'main', select = True )
edit_notebook.AddPage( sub_page_parsers_notebook_panel, 'subsidiary page parsers', select = False )
edit_notebook.AddPage( content_parsers_panel, 'content parsers', select = False )
edit_panel.Add( edit_notebook, CC.FLAGS_EXPAND_BOTH_WAYS )
#
hbox = wx.BoxSizer( wx.HORIZONTAL )
hbox.Add( edit_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
hbox.Add( test_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( help_hbox, CC.FLAGS_BUTTON_SIZER )
vbox.Add( hbox, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
self.SetSizer( vbox )
def _AddExampleURL( self ):
message = 'Enter example URL.'
with ClientGUIDialogs.DialogTextEntry( self, message ) as dlg:
if dlg.ShowModal() == wx.ID_OK:
return ( True, dlg.GetValue() )
else:
return ( False, '' )
def _AddSubPageParser( self ):
formula = ClientParsing.ParseFormulaHTML( tag_rules = [ ClientParsing.ParseRuleHTML( rule_type = ClientParsing.HTML_RULE_TYPE_DESCENDING, tag_name = 'div', tag_attributes = { 'class' : 'thumb' } ) ], content_to_fetch = ClientParsing.HTML_CONTENT_HTML )
page_parser = ClientParsing.PageParser( 'new sub page parser' )
with ClientGUITopLevelWindows.DialogEdit( self, 'edit sub page parser', frame_key = 'deeply_nested_dialog' ) as dlg:
panel = EditPageParserPanel( dlg, page_parser, formula = formula, test_context = self._test_panel.GetTestContext() )
dlg.SetPanel( panel )
if dlg.ShowModal() == wx.ID_OK:
new_page_parser = panel.GetValue()
new_formula = panel.GetFormula()
new_sub_page_parser = ( new_formula, new_page_parser )
self._sub_page_parsers.AddDatas( ( new_sub_page_parser, ) )
self._sub_page_parsers.Sort()
def _ConvertSubPageParserToListCtrlTuple( self, sub_page_parser ):
( formula, page_parser ) = sub_page_parser
name = page_parser.GetName()
produces = page_parser.GetParsableContent()
produces = list( produces )
produces.sort()
pretty_name = name
pretty_formula = formula.ToPrettyString()
pretty_produces = ClientParsing.ConvertParsableContentToPrettyString( produces )
display_tuple = ( pretty_name, pretty_formula, pretty_produces )
sort_tuple = ( name, pretty_formula, produces )
return ( display_tuple, sort_tuple )
def _DeleteSubPageParser( self ):
with ClientGUIDialogs.DialogYesNo( self, 'Remove all selected?' ) as dlg:
if dlg.ShowModal() == wx.ID_YES:
self._sub_page_parsers.DeleteSelected()
def _EditExampleURL( self, example_url ):
message = 'Enter example URL.'
with ClientGUIDialogs.DialogTextEntry( self, message, default = example_url ) as dlg:
if dlg.ShowModal() == wx.ID_OK:
return ( True, dlg.GetValue() )
else:
return ( False, '' )
def _EditSubPageParser( self ):
selected_data = self._sub_page_parsers.GetData( only_selected = True )
for sub_page_parser in selected_data:
( formula, page_parser ) = sub_page_parser
with ClientGUITopLevelWindows.DialogEdit( self, 'edit sub page parser', frame_key = '<PASSWORD>ply_nested_dialog' ) as dlg:
panel = EditPageParserPanel( dlg, page_parser, formula = formula, test_context = self._test_panel.GetTestContext() )
dlg.SetPanel( panel )
if dlg.ShowModal() == wx.ID_OK:
self._sub_page_parsers.DeleteDatas( ( sub_page_parser, ) )
new_page_parser = panel.GetValue()
new_formula = panel.GetFormula()
new_sub_page_parser = ( new_formula, new_page_parser )
self._sub_page_parsers.AddDatas( ( new_sub_page_parser, ) )
else:
break
self._sub_page_parsers.Sort()
def _FetchExampleData( self ):
def wait_and_do_it( network_job ):
def wx_tidy_up( example_data ):
if not self:
return
self._test_panel.SetExampleData( example_data )
self._test_network_job_control.ClearNetworkJob()
try:
network_job.WaitUntilDone()
example_data = network_job.GetContent()
except HydrusExceptions.CancelledException:
example_data = 'fetch cancelled'
except Exception as e:
example_data = 'fetch failed:' + os.linesep * 2 + HydrusData.ToUnicode( e )
HydrusData.ShowException( e )
wx.CallAfter( wx_tidy_up, example_data )
url = self._test_url.GetValue()
referral_url = self._test_referral_url.GetValue()
if referral_url == '':
referral_url = None
network_job = ClientNetworkingJobs.NetworkJob( 'GET', url, referral_url = referral_url )
self._test_network_job_control.SetNetworkJob( network_job )
network_job.OverrideBandwidth()
HG.client_controller.network_engine.AddJob( network_job )
HG.client_controller.CallToThread( wait_and_do_it, network_job )
def GetTestContext( self ):
return self._test_panel.GetTestContext()
def GetFormula( self ):
return self._formula.GetValue()
def GetValue( self ):
name = self._name.GetValue()
parser_key = self._original_parser.GetParserKey()
string_converter = self._string_converter.GetValue()
sub_page_parsers = self._sub_page_parsers.GetData()
content_parsers = self._content_parsers.GetData()
example_urls = self._example_urls.GetData()
example_parsing_context = self._test_panel.GetExampleParsingContext()
parser = ClientParsing.PageParser( name, parser_key = parser_key, string_converter = string_converter, sub_page_parsers = sub_page_parsers, content_parsers = content_parsers, example_urls = example_urls, example_parsing_context = example_parsing_context )
return parser
class EditParsersPanel( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent, parsers ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
parsers_panel = ClientGUIListCtrl.BetterListCtrlPanel( self )
self._parsers = ClientGUIListCtrl.BetterListCtrl( parsers_panel, 'parsers', 20, 24, [ ( 'name', -1 ), ( 'example urls', 40 ), ( 'produces', 40 ) ], self._ConvertParserToListCtrlTuple, delete_key_callback = self._Delete, activation_callback = self._Edit )
parsers_panel.SetListCtrl( self._parsers )
parsers_panel.AddButton( 'add', self._Add )
parsers_panel.AddButton( 'edit', self._Edit, enabled_only_on_selection = True )
parsers_panel.AddButton( 'delete', self._Delete, enabled_only_on_selection = True )
parsers_panel.AddSeparator()
parsers_panel.AddImportExportButtons( ( ClientParsing.PageParser, ), self._AddParser )
parsers_panel.AddSeparator()
parsers_panel.AddDefaultsButton( ClientDefaults.GetDefaultParsers, self._AddParser )
#
self._parsers.AddDatas( parsers )
#
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( parsers_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
self.SetSizer( vbox )
def _Add( self ):
new_parser = ClientParsing.PageParser( 'new page parser' )
with ClientGUITopLevelWindows.DialogEdit( self, 'edit parser', frame_key = 'deeply_nested_dialog' ) as dlg_edit:
panel = EditPageParserPanel( dlg_edit, new_parser )
dlg_edit.SetPanel( panel )
if dlg_edit.ShowModal() == wx.ID_OK:
new_parser = panel.GetValue()
self._AddParser( new_parser )
def _AddParser( self, parser ):
HydrusSerialisable.SetNonDupeName( parser, self._GetExistingNames() )
parser.RegenerateParserKey()
self._parsers.AddDatas( ( parser, ) )
def _ConvertParserToListCtrlTuple( self, parser ):
name = parser.GetName()
example_urls = list( parser.GetExampleURLs() )
example_urls.sort()
produces = list( parser.GetParsableContent() )
produces.sort()
pretty_name = name
pretty_example_urls = ', '.join( example_urls )
pretty_produces = ClientParsing.ConvertParsableContentToPrettyString( produces )
display_tuple = ( pretty_name, pretty_example_urls, pretty_produces )
sort_tuple = ( name, example_urls, produces )
return ( display_tuple, sort_tuple )
def _Delete( self ):
with ClientGUIDialogs.DialogYesNo( self, 'Remove all selected?' ) as dlg:
if dlg.ShowModal() == wx.ID_YES:
self._parsers.DeleteSelected()
def _Edit( self ):
parsers = self._parsers.GetData( only_selected = True )
for parser in parsers:
with ClientGUITopLevelWindows.DialogEdit( self, 'edit parser', frame_key = 'deeply_nested_dialog' ) as dlg:
panel = EditPageParserPanel( dlg, parser )
dlg.SetPanel( panel )
if dlg.ShowModal() == wx.ID_OK:
edited_parser = panel.GetValue()
self._parsers.DeleteDatas( ( parser, ) )
HydrusSerialisable.SetNonDupeName( edited_parser, self._GetExistingNames() )
self._parsers.AddDatas( ( edited_parser, ) )
else:
break
self._parsers.Sort()
def _GetExistingNames( self ):
names = { parser.GetName() for parser in self._parsers.GetData() }
return names
def GetValue( self ):
return self._parsers.GetData()
class EditParsingScriptFileLookupPanel( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent, script ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
( name, url, query_type, file_identifier_type, file_identifier_string_converter, file_identifier_arg_name, static_args, children ) = script.ToTuple()
#
notebook = wx.Notebook( self )
#
edit_panel = wx.Panel( notebook )
edit_panel.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_FRAMEBK ) )
self._name = wx.TextCtrl( edit_panel )
query_panel = ClientGUICommon.StaticBox( edit_panel, 'query' )
self._url = wx.TextCtrl( query_panel )
self._url.SetValue( url )
self._query_type = ClientGUICommon.BetterChoice( query_panel )
self._query_type.Append( 'GET', HC.GET )
self._query_type.Append( 'POST', HC.POST )
self._file_identifier_type = ClientGUICommon.BetterChoice( query_panel )
for t in [ ClientParsing.FILE_IDENTIFIER_TYPE_FILE, ClientParsing.FILE_IDENTIFIER_TYPE_MD5, ClientParsing.FILE_IDENTIFIER_TYPE_SHA1, ClientParsing.FILE_IDENTIFIER_TYPE_SHA256, ClientParsing.FILE_IDENTIFIER_TYPE_SHA512, ClientParsing.FILE_IDENTIFIER_TYPE_USER_INPUT ]:
self._file_identifier_type.Append( ClientParsing.file_identifier_string_lookup[ t ], t )
self._file_identifier_string_converter = StringConverterButton( query_panel, file_identifier_string_converter )
self._file_identifier_arg_name = wx.TextCtrl( query_panel )
static_args_panel = ClientGUICommon.StaticBox( query_panel, 'static arguments' )
self._static_args = ClientGUIControls.EditStringToStringDictControl( static_args_panel, static_args )
children_panel = ClientGUICommon.StaticBox( edit_panel, 'content parsing children' )
self._children = EditNodes( children_panel, children, self.GetExampleURL, self.GetExampleData )
#
test_panel = wx.Panel( notebook )
test_panel.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_FRAMEBK ) )
self._test_script_management = ScriptManagementControl( test_panel )
self._test_arg = wx.TextCtrl( test_panel )
self._test_arg.SetValue( 'enter example file path, hex hash, or raw user input here' )
self._fetch_data = wx.Button( test_panel, label = 'fetch response' )
self._fetch_data.Bind( wx.EVT_BUTTON, self.EventFetchData )
self._example_data = ClientGUICommon.SaneMultilineTextCtrl( test_panel )
self._example_data.SetMinSize( ( -1, 200 ) )
self._test_parsing = wx.Button( test_panel, label = 'test parse (note if you have \'link\' nodes, they will make their requests)' )
self._test_parsing.Bind( wx.EVT_BUTTON, self.EventTestParse )
self._results = ClientGUICommon.SaneMultilineTextCtrl( test_panel )
self._results.SetMinSize( ( -1, 200 ) )
#
info_panel = wx.Panel( notebook )
message = '''This script looks up tags for a single file.
It will download the result of a query that might look something like this:
http://www.file-lookup.com/form.php?q=getsometags&md5=[md5-in-hex]
And pass that html to a number of 'parsing children' that will each look through it in turn and try to find tags.'''
info_st = wx.StaticText( info_panel )
info_st.SetLabelText( message )
info_st.Wrap( 400 )
#
self._name.SetValue( name )
self._query_type.SelectClientData( query_type )
self._file_identifier_type.SelectClientData( file_identifier_type )
self._file_identifier_arg_name.SetValue( file_identifier_arg_name )
self._results.SetValue( 'Successfully parsed results will be printed here.' )
#
rows = []
rows.append( ( 'url', self._url ) )
rows.append( ( 'query type: ', self._query_type ) )
rows.append( ( 'file identifier type: ', self._file_identifier_type ) )
rows.append( ( 'file identifier conversion (typically to hex): ', self._file_identifier_string_converter ) )
rows.append( ( 'file identifier GET/POST argument name: ', self._file_identifier_arg_name ) )
gridbox = ClientGUICommon.WrapInGrid( query_panel, rows )
static_args_panel.Add( self._static_args, CC.FLAGS_EXPAND_BOTH_WAYS )
query_message = 'This query will be executed first.'
query_panel.Add( wx.StaticText( query_panel, label = query_message ), CC.FLAGS_EXPAND_PERPENDICULAR )
query_panel.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
query_panel.Add( static_args_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
children_message = 'The data returned by the query will be passed to each of these children for content parsing.'
children_panel.Add( wx.StaticText( children_panel, label = children_message ), CC.FLAGS_EXPAND_PERPENDICULAR )
children_panel.Add( self._children, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox = wx.BoxSizer( wx.VERTICAL )
rows = []
rows.append( ( 'script name: ', self._name ) )
gridbox = ClientGUICommon.WrapInGrid( edit_panel, rows )
vbox.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
vbox.Add( query_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox.Add( children_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
edit_panel.SetSizer( vbox )
#
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( self._test_script_management, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.Add( self._test_arg, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.Add( self._fetch_data, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.Add( self._example_data, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox.Add( self._test_parsing, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.Add( self._results, CC.FLAGS_EXPAND_BOTH_WAYS )
test_panel.SetSizer( vbox )
#
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( info_st, CC.FLAGS_EXPAND_BOTH_WAYS )
info_panel.SetSizer( vbox )
#
notebook.AddPage( edit_panel, 'edit', select = True )
notebook.AddPage( test_panel, 'test', select = False )
notebook.AddPage( info_panel, 'info', select = False )
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( notebook, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
self.SetSizer( vbox )
def EventFetchData( self, event ):
script = self.GetValue()
test_arg = self._test_arg.GetValue()
file_identifier_type = self._file_identifier_type.GetChoice()
if file_identifier_type == ClientParsing.FILE_IDENTIFIER_TYPE_FILE:
if not os.path.exists( test_arg ):
wx.MessageBox( 'That file does not exist!' )
return
file_identifier = test_arg
elif file_identifier_type == ClientParsing.FILE_IDENTIFIER_TYPE_USER_INPUT:
file_identifier = test_arg
else:
file_identifier = test_arg.decode( 'hex' )
try:
stop_time = HydrusData.GetNow() + 30
job_key = ClientThreading.JobKey( cancellable = True, stop_time = stop_time )
self._test_script_management.SetJobKey( job_key )
example_data = script.FetchData( job_key, file_identifier )
try:
self._example_data.SetValue( example_data )
except UnicodeDecodeError:
self._example_data.SetValue( 'The fetched data, which had length ' + HydrusData.ConvertIntToBytes( len( example_data ) ) + ', did not appear to be displayable text.' )
except Exception as e:
HydrusData.ShowException( e )
message = 'Could not fetch data!'
message += os.linesep * 2
message += HydrusData.ToUnicode( e )
wx.MessageBox( message )
finally:
job_key.Finish()
def EventTestParse( self, event ):
def wx_code( results ):
if not self:
return
result_lines = [ '*** ' + HydrusData.ConvertIntToPrettyString( len( results ) ) + ' RESULTS BEGIN ***' ]
result_lines.extend( ( ClientParsing.ConvertParseResultToPrettyString( result ) for result in results ) )
result_lines.append( '*** RESULTS END ***' )
results_text = os.linesep.join( result_lines )
self._results.SetValue( results_text )
def do_it( script, job_key, data ):
try:
results = script.Parse( job_key, data )
wx.CallAfter( wx_code, results )
except Exception as e:
HydrusData.ShowException( e )
message = 'Could not parse!'
wx.CallAfter( wx.MessageBox, message )
finally:
job_key.Finish()
script = self.GetValue()
stop_time = HydrusData.GetNow() + 30
job_key = ClientThreading.JobKey( cancellable = True, stop_time = stop_time )
self._test_script_management.SetJobKey( job_key )
data = self._example_data.GetValue()
HG.client_controller.CallToThread( do_it, script, job_key, data )
def GetExampleData( self ):
return self._example_data.GetValue()
def GetExampleURL( self ):
return self._url.GetValue()
def GetValue( self ):
name = self._name.GetValue()
url = self._url.GetValue()
query_type = self._query_type.GetChoice()
file_identifier_type = self._file_identifier_type.GetChoice()
file_identifier_string_converter = self._file_identifier_string_converter.GetValue()
file_identifier_arg_name = self._file_identifier_arg_name.GetValue()
static_args = self._static_args.GetValue()
children = self._children.GetValue()
script = ClientParsing.ParseRootFileLookup( name, url = url, query_type = query_type, file_identifier_type = file_identifier_type, file_identifier_string_converter = file_identifier_string_converter, file_identifier_arg_name = file_identifier_arg_name, static_args = static_args, children = children )
return script
class EditStringConverterPanel( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent, string_converter, example_string_override = None ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
transformations_panel = ClientGUIListCtrl.BetterListCtrlPanel( self )
self._transformations = ClientGUIListCtrl.BetterListCtrl( transformations_panel, 'string_converter_transformations', 7, 35, [ ( '#', 3 ), ( 'transformation', 30 ), ( 'result', -1 ) ], self._ConvertTransformationToListCtrlTuple, delete_key_callback = self._DeleteTransformation, activation_callback = self._EditTransformation )
transformations_panel.SetListCtrl( self._transformations )
transformations_panel.AddButton( 'add', self._AddTransformation )
transformations_panel.AddButton( 'edit', self._EditTransformation, enabled_only_on_selection = True )
transformations_panel.AddButton( 'delete', self._DeleteTransformation, enabled_only_on_selection = True )
transformations_panel.AddSeparator()
transformations_panel.AddButton( 'move up', self._MoveUp, enabled_check_func = self._CanMoveUp )
transformations_panel.AddButton( 'move down', self._MoveDown, enabled_check_func = self._CanMoveDown )
self._example_string = wx.TextCtrl( self )
#
self._transformations.AddDatas( [ ( i + 1, transformation_type, data ) for ( i, ( transformation_type, data ) ) in enumerate( string_converter.transformations ) ] )
if example_string_override is None:
self._example_string.SetValue( string_converter.example_string )
else:
self._example_string.SetValue( example_string_override )
self._transformations.UpdateDatas() # to refresh, now they are all in the list
self._transformations.Sort( 0 )
#
rows = []
rows.append( ( 'example string: ', self._example_string ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( transformations_panel, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox.Add( gridbox, CC.FLAGS_EXPAND_PERPENDICULAR )
self.SetSizer( vbox )
#
self._example_string.Bind( wx.EVT_TEXT, self.EventUpdate )
def _AddTransformation( self ):
transformation_type = ClientParsing.STRING_TRANSFORMATION_APPEND_TEXT
data = ' extra text'
with ClientGUITopLevelWindows.DialogEdit( self, 'edit transformation', frame_key = 'deeply_nested_dialog' ) as dlg:
panel = self._TransformationPanel( dlg, transformation_type, data )
dlg.SetPanel( panel )
if dlg.ShowModal() == wx.ID_OK:
number = self._transformations.GetItemCount() + 1
( transformation_type, data ) = panel.GetValue()
enumerated_transformation = ( number, transformation_type, data )
self._transformations.AddDatas( ( enumerated_transformation, ) )
self._transformations.UpdateDatas() # need to refresh string after the insertion, so the new row can be included in the parsing calcs
self._transformations.Sort()
def _CanMoveDown( self ):
selected_data = self._transformations.GetData( only_selected = True )
if len( selected_data ) == 1:
( number, transformation_type, data ) = selected_data[0]
if number < self._transformations.GetItemCount():
return True
return False
def _CanMoveUp( self ):
selected_data = self._transformations.GetData( only_selected = True )
if len( selected_data ) == 1:
( number, transformation_type, data ) = selected_data[0]
if number > 1:
return True
return False
def _ConvertTransformationToListCtrlTuple( self, transformation ):
( number, transformation_type, data ) = transformation
pretty_number = HydrusData.ConvertIntToPrettyString( number )
pretty_transformation = ClientParsing.StringConverter.TransformationToUnicode( ( transformation_type, data ) )
string_converter = self._GetValue()
try:
pretty_result = ClientParsing.MakeParsedTextPretty( string_converter.Convert( self._example_string.GetValue(), number ) )
except HydrusExceptions.StringConvertException as e:
pretty_result = str( e )
display_tuple = ( pretty_number, pretty_transformation, pretty_result )
sort_tuple = ( number, number, number )
return ( display_tuple, sort_tuple )
def _DeleteTransformation( self ):
if len( self._transformations.GetData( only_selected = True ) ) > 0:
with ClientGUIDialogs.DialogYesNo( self, 'Delete all selected?' ) as dlg:
if dlg.ShowModal() == wx.ID_YES:
self._transformations.DeleteSelected()
# now we need to shuffle up any missing numbers
num_rows = self._transformations.GetItemCount()
i = 1
search_i = i
while i <= num_rows:
try:
transformation = self._GetTransformation( search_i )
if search_i != i:
self._transformations.DeleteDatas( ( transformation, ) )
( search_i, transformation_type, data ) = transformation
transformation = ( i, transformation_type, data )
self._transformations.AddDatas( ( transformation, ) )
i += 1
search_i = i
except HydrusExceptions.DataMissing:
search_i += 1
self._transformations.UpdateDatas()
self._transformations.Sort()
def _EditTransformation( self ):
selected_data = self._transformations.GetData( only_selected = True )
for enumerated_transformation in selected_data:
( number, transformation_type, data ) = enumerated_transformation
with ClientGUITopLevelWindows.DialogEdit( self, 'edit transformation', frame_key = 'deeply_nested_dialog' ) as dlg:
panel = self._TransformationPanel( dlg, transformation_type, data )
dlg.SetPanel( panel )
if dlg.ShowModal() == wx.ID_OK:
self._transformations.DeleteDatas( ( enumerated_transformation, ) )
( transformation_type, data ) = panel.GetValue()
enumerated_transformation = ( number, transformation_type, data )
self._transformations.AddDatas( ( enumerated_transformation, ) )
else:
break
self._transformations.UpdateDatas()
self._transformations.Sort()
def _GetTransformation( self, desired_number ):
for transformation in self._transformations.GetData():
( number, transformation_type, data ) = transformation
if number == desired_number:
return transformation
raise HydrusExceptions.DataMissing()
def _GetValue( self ):
enumerated_transformations = list( self._transformations.GetData() )
enumerated_transformations.sort()
transformations = [ ( transformation_type, data ) for ( number, transformation_type, data ) in enumerated_transformations ]
example_string = self._example_string.GetValue()
string_converter = ClientParsing.StringConverter( transformations, example_string )
return string_converter
def _MoveDown( self ):
selected_transformation = self._transformations.GetData( only_selected = True )[0]
( number, transformation_type, data ) = selected_transformation
swap_transformation = self._GetTransformation( number + 1 )
self._SwapTransformations( selected_transformation, swap_transformation )
self._transformations.UpdateDatas()
self._transformations.Sort()
def _MoveUp( self ):
selected_transformation = self._transformations.GetData( only_selected = True )[0]
( number, transformation_type, data ) = selected_transformation
swap_transformation = self._GetTransformation( number - 1 )
self._SwapTransformations( selected_transformation, swap_transformation )
self._transformations.UpdateDatas()
self._transformations.Sort()
def _SwapTransformations( self, one, two ):
selected_data = self._transformations.GetData( only_selected = True )
one_selected = one in selected_data
two_selected = two in selected_data
self._transformations.DeleteDatas( ( one, two ) )
( number_1, transformation_type_1, data_1 ) = one
( number_2, transformation_type_2, data_2 ) = two
one = ( number_2, transformation_type_1, data_1 )
two = ( number_1, transformation_type_2, data_2 )
self._transformations.AddDatas( ( one, two ) )
if one_selected:
self._transformations.SelectDatas( ( one, ) )
if two_selected:
self._transformations.SelectDatas( ( two, ) )
def EventUpdate( self, event ):
self._transformations.UpdateDatas()
def GetValue( self ):
string_converter = self._GetValue()
try:
string_converter.Convert( self._example_string.GetValue() )
except HydrusExceptions.StringConvertException:
raise HydrusExceptions.VetoException( 'Please enter an example text that can be converted!' )
return string_converter
class _TransformationPanel( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent, transformation_type, data ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
self._transformation_type = ClientGUICommon.BetterChoice( self )
for t_type in ( ClientParsing.STRING_TRANSFORMATION_REMOVE_TEXT_FROM_BEGINNING, ClientParsing.STRING_TRANSFORMATION_REMOVE_TEXT_FROM_END, ClientParsing.STRING_TRANSFORMATION_CLIP_TEXT_FROM_BEGINNING, ClientParsing.STRING_TRANSFORMATION_CLIP_TEXT_FROM_END, ClientParsing.STRING_TRANSFORMATION_PREPEND_TEXT, ClientParsing.STRING_TRANSFORMATION_APPEND_TEXT, ClientParsing.STRING_TRANSFORMATION_ENCODE, ClientParsing.STRING_TRANSFORMATION_DECODE, ClientParsing.STRING_TRANSFORMATION_REVERSE, ClientParsing.STRING_TRANSFORMATION_REGEX_SUB, ClientParsing.STRING_TRANSFORMATION_DATE_DECODE, ClientParsing.STRING_TRANSFORMATION_INTEGER_ADDITION ):
self._transformation_type.Append( ClientParsing.transformation_type_str_lookup[ t_type ], t_type )
self._data_text = wx.TextCtrl( self )
self._data_number = wx.SpinCtrl( self, min = 0, max = 65535 )
self._data_encoding = ClientGUICommon.BetterChoice( self )
self._data_regex_pattern = wx.TextCtrl( self )
self._data_regex_repl = wx.TextCtrl( self )
self._data_date_link = wx.adv.HyperlinkCtrl( self, label = 'link to date info', url = 'https://docs.python.org/2/library/datetime.html#strftime-strptime-behavior' )
self._data_timezone = ClientGUICommon.BetterChoice( self )
self._data_timezone_offset = wx.SpinCtrl( self, min = -86400, max = 86400 )
for e in ( 'hex', 'base64' ):
self._data_encoding.Append( e, e )
self._data_timezone.Append( 'GMT', HC.TIMEZONE_GMT )
self._data_timezone.Append( 'Local', HC.TIMEZONE_LOCAL )
self._data_timezone.Append( 'Offset', HC.TIMEZONE_OFFSET )
#
self._transformation_type.SelectClientData( transformation_type )
self._UpdateDataControls()
#
if transformation_type in ( ClientParsing.STRING_TRANSFORMATION_DECODE, ClientParsing.STRING_TRANSFORMATION_ENCODE ):
self._data_encoding.SelectClientData( data )
elif transformation_type == ClientParsing.STRING_TRANSFORMATION_REGEX_SUB:
( pattern, repl ) = data
self._data_regex_pattern.SetValue( pattern )
self._data_regex_repl.SetValue( repl )
elif transformation_type == ClientParsing.STRING_TRANSFORMATION_DATE_DECODE:
( phrase, timezone_type, timezone_offset ) = data
self._data_text.SetValue( phrase )
self._data_timezone.SelectClientData( timezone_type )
self._data_timezone_offset.SetValue( timezone_offset )
elif data is not None:
if isinstance( data, int ):
self._data_number.SetValue( data )
else:
self._data_text.SetValue( data )
#
rows = []
rows.append( ( 'string data: ', self._data_text ) )
rows.append( ( 'number data: ', self._data_number ) )
rows.append( ( 'encoding data: ', self._data_encoding ) )
rows.append( ( 'regex pattern: ', self._data_regex_pattern ) )
rows.append( ( 'regex replacement: ', self._data_regex_repl ) )
rows.append( ( 'date info: ', self._data_date_link ) )
rows.append( ( 'date timezone: ', self._data_timezone ) )
rows.append( ( 'timezone offset: ', self._data_timezone_offset ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( self._transformation_type, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.Add( gridbox, CC.FLAGS_EXPAND_SIZER_PERPENDICULAR )
self.SetSizer( vbox )
#
self._transformation_type.Bind( wx.EVT_CHOICE, self.EventChoice )
self._data_timezone.Bind( wx.EVT_CHOICE, self.EventChoice )
def _UpdateDataControls( self ):
self._data_text.Disable()
self._data_number.Disable()
self._data_encoding.Disable()
self._data_regex_pattern.Disable()
self._data_regex_repl.Disable()
self._data_timezone.Disable()
self._data_timezone_offset.Disable()
transformation_type = self._transformation_type.GetChoice()
if transformation_type in ( ClientParsing.STRING_TRANSFORMATION_ENCODE, ClientParsing.STRING_TRANSFORMATION_DECODE ):
self._data_encoding.Enable()
elif transformation_type in ( ClientParsing.STRING_TRANSFORMATION_PREPEND_TEXT, ClientParsing.STRING_TRANSFORMATION_APPEND_TEXT, ClientParsing.STRING_TRANSFORMATION_DATE_DECODE ):
self._data_text.Enable()
if transformation_type == ClientParsing.STRING_TRANSFORMATION_DATE_DECODE:
self._data_timezone.Enable()
if self._data_timezone.GetChoice() == HC.TIMEZONE_OFFSET:
self._data_timezone_offset.Enable()
elif transformation_type in ( ClientParsing.STRING_TRANSFORMATION_REMOVE_TEXT_FROM_BEGINNING, ClientParsing.STRING_TRANSFORMATION_REMOVE_TEXT_FROM_END, ClientParsing.STRING_TRANSFORMATION_CLIP_TEXT_FROM_BEGINNING, ClientParsing.STRING_TRANSFORMATION_CLIP_TEXT_FROM_END, ClientParsing.STRING_TRANSFORMATION_INTEGER_ADDITION ):
self._data_number.Enable()
if transformation_type == ClientParsing.STRING_TRANSFORMATION_INTEGER_ADDITION:
self._data_number.SetMin( -65535 )
else:
self._data_number.SetMin( 0 )
elif transformation_type == ClientParsing.STRING_TRANSFORMATION_REGEX_SUB:
self._data_regex_pattern.Enable()
self._data_regex_repl.Enable()
def EventChoice( self, event ):
self._UpdateDataControls()
def GetValue( self ):
transformation_type = self._transformation_type.GetChoice()
if transformation_type in ( ClientParsing.STRING_TRANSFORMATION_ENCODE, ClientParsing.STRING_TRANSFORMATION_DECODE ):
data = self._data_encoding.GetChoice()
elif transformation_type in ( ClientParsing.STRING_TRANSFORMATION_PREPEND_TEXT, ClientParsing.STRING_TRANSFORMATION_APPEND_TEXT ):
data = self._data_text.GetValue()
elif transformation_type in ( ClientParsing.STRING_TRANSFORMATION_REMOVE_TEXT_FROM_BEGINNING, ClientParsing.STRING_TRANSFORMATION_REMOVE_TEXT_FROM_END, ClientParsing.STRING_TRANSFORMATION_CLIP_TEXT_FROM_BEGINNING, ClientParsing.STRING_TRANSFORMATION_CLIP_TEXT_FROM_END, ClientParsing.STRING_TRANSFORMATION_INTEGER_ADDITION ):
data = self._data_number.GetValue()
elif transformation_type == ClientParsing.STRING_TRANSFORMATION_REGEX_SUB:
pattern = self._data_regex_pattern.GetValue()
repl = self._data_regex_repl.GetValue()
data = ( pattern, repl )
elif transformation_type == ClientParsing.STRING_TRANSFORMATION_DATE_DECODE:
phrase = self._data_text.GetValue()
timezone_time = self._data_timezone.GetChoice()
timezone_offset = self._data_timezone_offset.GetValue()
data = ( phrase, timezone_time, timezone_offset )
else:
data = None
return ( transformation_type, data )
class EditStringMatchPanel( ClientGUIScrolledPanels.EditPanel ):
def __init__( self, parent, string_match = None ):
ClientGUIScrolledPanels.EditPanel.__init__( self, parent )
if string_match is None:
string_match = ClientParsing.StringMatch()
self._match_type = ClientGUICommon.BetterChoice( self )
self._match_type.Append( 'any characters', ClientParsing.STRING_MATCH_ANY )
self._match_type.Append( 'fixed characters', ClientParsing.STRING_MATCH_FIXED )
self._match_type.Append( 'character set', ClientParsing.STRING_MATCH_FLEXIBLE )
self._match_type.Append( 'regex', ClientParsing.STRING_MATCH_REGEX )
self._match_value_text_input = wx.TextCtrl( self )
self._match_value_flexible_input = ClientGUICommon.BetterChoice( self )
self._match_value_flexible_input.Append( 'alphabetic characters (a-zA-Z)', ClientParsing.ALPHA )
self._match_value_flexible_input.Append( 'alphanumeric characters (a-zA-Z0-9)', ClientParsing.ALPHANUMERIC )
self._match_value_flexible_input.Append( 'numeric characters (0-9)', ClientParsing.NUMERIC )
self._min_chars = ClientGUICommon.NoneableSpinCtrl( self, min = 1, max = 65535, unit = 'characters', none_phrase = 'no limit' )
self._max_chars = ClientGUICommon.NoneableSpinCtrl( self, min = 1, max = 65535, unit = 'characters', none_phrase = 'no limit' )
self._example_string = wx.TextCtrl( self )
self._example_string_matches = ClientGUICommon.BetterStaticText( self )
#
self.SetValue( string_match )
#
rows = []
rows.append( ( 'match type: ', self._match_type ) )
rows.append( ( 'match text: ', self._match_value_text_input ) )
rows.append( ( 'match value (character set): ', self._match_value_flexible_input ) )
rows.append( ( 'minumum allowed number of characters: ', self._min_chars ) )
rows.append( ( 'maximum allowed number of characters: ', self._max_chars ) )
rows.append( ( 'example string: ', self._example_string ) )
gridbox = ClientGUICommon.WrapInGrid( self, rows )
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( gridbox, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.Add( self._example_string_matches, CC.FLAGS_EXPAND_PERPENDICULAR )
self.SetSizer( vbox )
#
self._match_type.Bind( wx.EVT_CHOICE, self.EventUpdate )
self._match_value_text_input.Bind( wx.EVT_TEXT, self.EventUpdate )
self._match_value_flexible_input.Bind( wx.EVT_CHOICE, self.EventUpdate )
self._min_chars.Bind( wx.EVT_SPINCTRL, self.EventUpdate )
self._max_chars.Bind( wx.EVT_SPINCTRL, self.EventUpdate )
self._example_string.Bind( wx.EVT_TEXT, self.EventUpdate )
def _GetValue( self ):
match_type = self._match_type.GetChoice()
if match_type == ClientParsing.STRING_MATCH_ANY:
match_value = ''
elif match_type == ClientParsing.STRING_MATCH_FLEXIBLE:
match_value = self._match_value_flexible_input.GetChoice()
else:
match_value = self._match_value_text_input.GetValue()
min_chars = self._min_chars.GetValue()
max_chars = self._max_chars.GetValue()
example_string = self._example_string.GetValue()
string_match = ClientParsing.StringMatch( match_type = match_type, match_value = match_value, min_chars = min_chars, max_chars = max_chars, example_string = example_string )
return string_match
def _UpdateControls( self ):
match_type = self._match_type.GetChoice()
if match_type == ClientParsing.STRING_MATCH_ANY:
self._match_value_text_input.Disable()
self._match_value_flexible_input.Disable()
elif match_type == ClientParsing.STRING_MATCH_FLEXIBLE:
self._match_value_text_input.Disable()
self._match_value_flexible_input.Enable()
else:
self._match_value_text_input.Enable()
self._match_value_flexible_input.Disable()
if match_type == ClientParsing.STRING_MATCH_FIXED:
self._min_chars.SetValue( None )
self._max_chars.SetValue( None )
self._min_chars.Disable()
self._max_chars.Disable()
self._example_string.SetValue( self._match_value_text_input.GetValue() )
self._example_string_matches.SetLabelText( '' )
else:
self._min_chars.Enable()
self._max_chars.Enable()
string_match = self._GetValue()
try:
string_match.Test( self._example_string.GetValue() )
self._example_string_matches.SetLabelText( 'Example matches ok!' )
self._example_string_matches.SetForegroundColour( ( 0, 128, 0 ) )
except HydrusExceptions.StringMatchException as e:
reason = HydrusData.ToUnicode( e )
self._example_string_matches.SetLabelText( 'Example does not match - ' + reason )
self._example_string_matches.SetForegroundColour( ( 128, 0, 0 ) )
def EventUpdate( self, event ):
self._UpdateControls()
event.Skip()
def GetValue( self ):
string_match = self._GetValue()
try:
string_match.Test( self._example_string.GetValue() )
except HydrusExceptions.StringMatchException:
raise HydrusExceptions.VetoException( 'Please enter an example text that matches the given rules!' )
return string_match
def SetValue( self, string_match ):
( match_type, match_value, min_chars, max_chars, example_string ) = string_match.ToTuple()
self._match_type.SelectClientData( match_type )
if match_type == ClientParsing.STRING_MATCH_FLEXIBLE:
self._match_value_flexible_input.SelectClientData( match_value )
else:
self._match_value_flexible_input.SelectClientData( ClientParsing.ALPHA )
self._match_value_text_input.SetValue( match_value )
self._min_chars.SetValue( min_chars )
self._max_chars.SetValue( max_chars )
self._example_string.SetValue( example_string )
self._UpdateControls()
class ManageParsingScriptsPanel( ClientGUIScrolledPanels.ManagePanel ):
SCRIPT_TYPES = []
SCRIPT_TYPES.append( HydrusSerialisable.SERIALISABLE_TYPE_PARSE_ROOT_FILE_LOOKUP )
def __init__( self, parent ):
ClientGUIScrolledPanels.ManagePanel.__init__( self, parent )
self._scripts = ClientGUIListCtrl.SaneListCtrlForSingleObject( self, 200, [ ( 'name', 140 ), ( 'query type', 80 ), ( 'script type', 80 ), ( 'produces', -1 ) ], delete_key_callback = self.Delete, activation_callback = self.Edit )
menu_items = []
menu_items.append( ( 'normal', 'file lookup script', 'A script that fetches content for a known file.', self.AddFileLookupScript ) )
self._add_button = ClientGUICommon.MenuButton( self, 'add', menu_items )
menu_items = []
menu_items.append( ( 'normal', 'to clipboard', 'Serialise the script and put it on your clipboard.', self.ExportToClipboard ) )
menu_items.append( ( 'normal', 'to png', 'Serialise the script and encode it to an image file you can easily share with other hydrus users.', self.ExportToPng ) )
self._export_button = ClientGUICommon.MenuButton( self, 'export', menu_items )
menu_items = []
menu_items.append( ( 'normal', 'from clipboard', 'Load a script from text in your clipboard.', self.ImportFromClipboard ) )
menu_items.append( ( 'normal', 'from png', 'Load a script from an encoded png.', self.ImportFromPng ) )
self._import_button = ClientGUICommon.MenuButton( self, 'import', menu_items )
self._duplicate_button = ClientGUICommon.BetterButton( self, 'duplicate', self.Duplicate )
self._edit_button = ClientGUICommon.BetterButton( self, 'edit', self.Edit )
self._delete_button = ClientGUICommon.BetterButton( self, 'delete', self.Delete )
#
scripts = []
for script_type in self.SCRIPT_TYPES:
scripts.extend( HG.client_controller.Read( 'serialisable_named', script_type ) )
for script in scripts:
( display_tuple, sort_tuple ) = self._ConvertScriptToTuples( script )
self._scripts.Append( display_tuple, sort_tuple, script )
#
vbox = wx.BoxSizer( wx.VERTICAL )
button_hbox = wx.BoxSizer( wx.HORIZONTAL )
button_hbox.Add( self._add_button, CC.FLAGS_VCENTER )
button_hbox.Add( self._export_button, CC.FLAGS_VCENTER )
button_hbox.Add( self._import_button, CC.FLAGS_VCENTER )
button_hbox.Add( self._duplicate_button, CC.FLAGS_VCENTER )
button_hbox.Add( self._edit_button, CC.FLAGS_VCENTER )
button_hbox.Add( self._delete_button, CC.FLAGS_VCENTER )
vbox.Add( self._scripts, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox.Add( button_hbox, CC.FLAGS_BUTTON_SIZER )
self.SetSizer( vbox )
def _ConvertScriptToTuples( self, script ):
( name, query_type, script_type, produces ) = script.ToPrettyStrings()
return ( ( name, query_type, script_type, produces ), ( name, query_type, script_type, produces ) )
def _GetExportObject( self ):
to_export = HydrusSerialisable.SerialisableList()
for script in self._scripts.GetObjects( only_selected = True ):
to_export.append( script )
if len( to_export ) == 0:
return None
elif len( to_export ) == 1:
return to_export[0]
else:
return to_export
def _ImportObject( self, obj ):
if isinstance( obj, HydrusSerialisable.SerialisableList ):
for sub_obj in obj:
self._ImportObject( sub_obj )
else:
if isinstance( obj, ClientParsing.ParseRootFileLookup ):
script = obj
self._scripts.SetNonDupeName( script )
( display_tuple, sort_tuple ) = self._ConvertScriptToTuples( script )
self._scripts.Append( display_tuple, sort_tuple, script )
else:
wx.MessageBox( 'That was not a script--it was a: ' + type( obj ).__name__ )
def AddFileLookupScript( self ):
name = 'new script'
url = ''
query_type = HC.GET
file_identifier_type = ClientParsing.FILE_IDENTIFIER_TYPE_MD5
file_identifier_string_converter = ClientParsing.StringConverter( ( [ ClientParsing.STRING_TRANSFORMATION_ENCODE, 'hex' ] ), 'some hash bytes' )
file_identifier_arg_name = 'md5'
static_args = {}
children = []
dlg_title = 'edit file metadata lookup script'
empty_script = ClientParsing.ParseRootFileLookup( name, url = url, query_type = query_type, file_identifier_type = file_identifier_type, file_identifier_string_converter = file_identifier_string_converter, file_identifier_arg_name = file_identifier_arg_name, static_args = static_args, children = children)
panel_class = EditParsingScriptFileLookupPanel
self.AddScript( dlg_title, empty_script, panel_class )
def AddScript( self, dlg_title, empty_script, panel_class ):
with ClientGUITopLevelWindows.DialogEdit( self, dlg_title, frame_key = 'deeply_nested_dialog' ) as dlg_edit:
panel = panel_class( dlg_edit, empty_script )
dlg_edit.SetPanel( panel )
if dlg_edit.ShowModal() == wx.ID_OK:
new_script = panel.GetValue()
self._scripts.SetNonDupeName( new_script )
( display_tuple, sort_tuple ) = self._ConvertScriptToTuples( new_script )
self._scripts.Append( display_tuple, sort_tuple, new_script )
def CommitChanges( self ):
scripts = self._scripts.GetObjects()
HG.client_controller.Write( 'serialisables_overwrite', self.SCRIPT_TYPES, scripts )
def Delete( self ):
with ClientGUIDialogs.DialogYesNo( self, 'Remove all selected?' ) as dlg:
if dlg.ShowModal() == wx.ID_YES:
self._scripts.RemoveAllSelected()
def Duplicate( self ):
scripts_to_dupe = self._scripts.GetObjects( only_selected = True )
for script in scripts_to_dupe:
dupe_script = script.Duplicate()
self._scripts.SetNonDupeName( dupe_script )
( display_tuple, sort_tuple ) = self._ConvertScriptToTuples( dupe_script )
self._scripts.Append( display_tuple, sort_tuple, dupe_script )
def Edit( self ):
for i in self._scripts.GetAllSelected():
script = self._scripts.GetObject( i )
if isinstance( script, ClientParsing.ParseRootFileLookup ):
panel_class = EditParsingScriptFileLookupPanel
dlg_title = 'edit file lookup script'
with ClientGUITopLevelWindows.DialogEdit( self, dlg_title, frame_key = 'deeply_nested_dialog' ) as dlg:
original_name = script.GetName()
panel = panel_class( dlg, script )
dlg.SetPanel( panel )
if dlg.ShowModal() == wx.ID_OK:
edited_script = panel.GetValue()
if edited_script.GetName() != original_name:
self._scripts.SetNonDupeName( edited_script )
( display_tuple, sort_tuple ) = self._ConvertScriptToTuples( edited_script )
self._scripts.UpdateRow( i, display_tuple, sort_tuple, edited_script )
def ExportToClipboard( self ):
export_object = self._GetExportObject()
if export_object is not None:
json = export_object.DumpToString()
HG.client_controller.pub( 'clipboard', 'text', json )
def ExportToPng( self ):
export_object = self._GetExportObject()
if export_object is not None:
with ClientGUITopLevelWindows.DialogNullipotent( self, 'export to png' ) as dlg:
panel = ClientGUISerialisable.PngExportPanel( dlg, export_object )
dlg.SetPanel( panel )
dlg.ShowModal()
def ImportFromClipboard( self ):
raw_text = HG.client_controller.GetClipboardText()
try:
obj = HydrusSerialisable.CreateFromString( raw_text )
self._ImportObject( obj )
except Exception as e:
wx.MessageBox( 'I could not understand what was in the clipboard' )
def ImportFromPng( self ):
with wx.FileDialog( self, 'select the png with the encoded script', wildcard = 'PNG (*.png)|*.png' ) as dlg:
if dlg.ShowModal() == wx.ID_OK:
path = HydrusData.ToUnicode( dlg.GetPath() )
try:
payload = ClientSerialisable.LoadFromPng( path )
except Exception as e:
wx.MessageBox( HydrusData.ToUnicode( e ) )
return
try:
obj = HydrusSerialisable.CreateFromNetworkString( payload )
self._ImportObject( obj )
except:
wx.MessageBox( 'I could not understand what was encoded in the png!' )
class ScriptManagementControl( wx.Panel ):
def __init__( self, parent ):
wx.Panel.__init__( self, parent )
self._job_key = None
self._lock = threading.Lock()
self._recent_urls = []
main_panel = ClientGUICommon.StaticBox( self, 'script control' )
self._status = wx.StaticText( main_panel )
self._gauge = ClientGUICommon.Gauge( main_panel )
self._link_button = wx.BitmapButton( main_panel, bitmap = CC.GlobalBMPs.link )
self._link_button.Bind( wx.EVT_BUTTON, self.EventLinkButton )
self._link_button.SetToolTip( 'urls found by the script' )
self._cancel_button = wx.BitmapButton( main_panel, bitmap = CC.GlobalBMPs.stop )
self._cancel_button.Bind( wx.EVT_BUTTON, self.EventCancelButton )
#
hbox = wx.BoxSizer( wx.HORIZONTAL )
hbox.Add( self._gauge, CC.FLAGS_EXPAND_BOTH_WAYS )
hbox.Add( self._link_button, CC.FLAGS_VCENTER )
hbox.Add( self._cancel_button, CC.FLAGS_VCENTER )
main_panel.Add( self._status, CC.FLAGS_EXPAND_PERPENDICULAR )
main_panel.Add( hbox, CC.FLAGS_EXPAND_PERPENDICULAR )
#
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( main_panel, CC.FLAGS_EXPAND_SIZER_BOTH_WAYS )
self.SetSizer( vbox )
#
self._Reset()
def _Reset( self ):
self._status.SetLabelText( '' )
self._gauge.SetRange( 1 )
self._gauge.SetValue( 0 )
self._link_button.Disable()
self._cancel_button.Disable()
def _Update( self ):
if self._job_key is None:
self._Reset()
else:
if self._job_key.HasVariable( 'script_status' ):
status = self._job_key.GetIfHasVariable( 'script_status' )
else:
status = ''
if status != self._status.GetLabelText():
self._status.SetLabelText( status )
if self._job_key.HasVariable( 'script_gauge' ):
( value, range ) = self._job_key.GetIfHasVariable( 'script_gauge' )
else:
( value, range ) = ( 0, 1 )
self._gauge.SetRange( range )
self._gauge.SetValue( value )
urls = self._job_key.GetURLs()
if len( urls ) == 0:
if self._link_button.IsEnabled():
self._link_button.Disable()
else:
if not self._link_button.IsEnabled():
self._link_button.Enable()
if self._job_key.IsDone():
if self._cancel_button.IsEnabled():
self._cancel_button.Disable()
else:
if not self._cancel_button.IsEnabled():
self._cancel_button.Enable()
def TIMERUIUpdate( self ):
with self._lock:
self._Update()
if self._job_key is None:
HG.client_controller.gui.UnregisterUIUpdateWindow( self )
def EventCancelButton( self, event ):
with self._lock:
if self._job_key is not None:
self._job_key.Cancel()
def EventLinkButton( self, event ):
with self._lock:
if self._job_key is None:
return
urls = self._job_key.GetURLs()
menu = wx.Menu()
for url in urls:
ClientGUIMenus.AppendMenuItem( self, menu, url, 'launch this url in your browser', ClientPaths.LaunchURLInWebBrowser, url )
HG.client_controller.PopupMenu( self, menu )
def SetJobKey( self, job_key ):
with self._lock:
self._job_key = job_key
HG.client_controller.gui.RegisterUIUpdateWindow( self )
class TestPanel( wx.Panel ):
def __init__( self, parent, object_callable, test_context = None ):
wx.Panel.__init__( self, parent )
if test_context is None:
test_context = ( {}, '' )
self.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_FRAMEBK ) )
self._object_callable = object_callable
self._example_parsing_context = ClientGUIControls.StringToStringDictButton( self, 'edit example parsing context' )
self._example_data_description = ClientGUICommon.BetterStaticText( self )
self._copy_button = ClientGUICommon.BetterBitmapButton( self, CC.GlobalBMPs.copy, self._Copy )
self._copy_button.SetToolTip( 'Copy the current example data to the clipboard.' )
self._fetch_button = ClientGUICommon.BetterBitmapButton( self, CC.GlobalBMPs.link, self._FetchFromURL )
self._fetch_button.SetToolTip( 'Fetch data from a URL.' )
self._paste_button = ClientGUICommon.BetterBitmapButton( self, CC.GlobalBMPs.paste, self._Paste )
self._paste_button.SetToolTip( 'Paste the current clipboard data into here.' )
self._example_data_preview = ClientGUICommon.SaneMultilineTextCtrl( self, style = wx.TE_READONLY )
size = ClientGUICommon.ConvertTextToPixels( self._example_data_preview, ( 80, 12 ) )
self._example_data_preview.SetInitialSize( size )
self._test_parse = ClientGUICommon.BetterButton( self, 'test parse', self.TestParse )
self._results = ClientGUICommon.SaneMultilineTextCtrl( self )
size = ClientGUICommon.ConvertTextToPixels( self._example_data_preview, ( 80, 12 ) )
self._results.SetInitialSize( size )
#
( example_parsing_context, example_data ) = test_context
self._example_parsing_context.SetValue( example_parsing_context )
self._SetExampleData( example_data )
self._results.SetValue( 'Successfully parsed results will be printed here.' )
#
buttons_hbox = wx.BoxSizer( wx.HORIZONTAL )
buttons_hbox.Add( self._copy_button, CC.FLAGS_VCENTER )
buttons_hbox.Add( self._fetch_button, CC.FLAGS_VCENTER )
buttons_hbox.Add( self._paste_button, CC.FLAGS_VCENTER )
desc_hbox = wx.BoxSizer( wx.HORIZONTAL )
desc_hbox.Add( self._example_data_description, CC.FLAGS_EXPAND_BOTH_WAYS )
desc_hbox.Add( buttons_hbox, CC.FLAGS_BUTTON_SIZER )
vbox = wx.BoxSizer( wx.VERTICAL )
vbox.Add( self._example_parsing_context, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.Add( desc_hbox, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.Add( self._example_data_preview, CC.FLAGS_EXPAND_BOTH_WAYS )
vbox.Add( self._test_parse, CC.FLAGS_EXPAND_PERPENDICULAR )
vbox.Add( self._results, CC.FLAGS_EXPAND_BOTH_WAYS )
self.SetSizer( vbox )
def _Copy( self ):
HG.client_controller.pub( 'clipboard', 'text', self._example_data )
def _FetchFromURL( self ):
def wx_code( example_data ):
if not self:
return
self._SetExampleData( example_data )
def do_it( url ):
network_job = ClientNetworkingJobs.NetworkJob( 'GET', url )
network_job.OverrideBandwidth()
HG.client_controller.network_engine.AddJob( network_job )
try:
network_job.WaitUntilDone()
example_data = network_job.GetContent()
except HydrusExceptions.CancelledException:
example_data = 'fetch cancelled'
except Exception as e:
example_data = 'fetch failed:' + os.linesep * 2 + HydrusData.ToUnicode( e )
HydrusData.ShowException( e )
wx.CallAfter( wx_code, example_data )
message = 'Enter URL to fetch data for.'
with ClientGUIDialogs.DialogTextEntry( self, message, default = 'enter url', allow_blank = False) as dlg:
if dlg.ShowModal() == wx.ID_OK:
url = dlg.GetValue()
HG.client_controller.CallToThread( do_it, url )
def _Paste( self ):
raw_text = HG.client_controller.GetClipboardText()
self._SetExampleData( raw_text )
def _SetExampleData( self, example_data ):
self._example_data = example_data
if len( example_data ) > 0:
parse_phrase = 'uncertain data type'
# can't just throw this at bs4 to see if it 'works', as it'll just wrap any unparsable string in some bare <html><body><p> tags
if '<html' in example_data:
parse_phrase = 'looks like HTML'
# put this second, so if the JSON contains some HTML, it'll overwrite here. decent compromise
try:
json.loads( example_data )
parse_phrase = 'looks like JSON'
except:
pass
description = HydrusData.ConvertIntToBytes( len( example_data ) ) + ' total, ' + parse_phrase
if len( example_data ) > 1024:
preview = 'PREVIEW:' + os.linesep + HydrusData.ToUnicode( example_data[:1024] )
else:
preview = example_data
self._test_parse.Enable()
else:
description = 'no example data set yet'
preview = ''
self._test_parse.Disable()
self._example_data_description.SetLabelText( description )
self._example_data_preview.SetValue( preview )
def GetExampleParsingContext( self ):
return self._example_parsing_context.GetValue()
def GetTestContext( self ):
example_parsing_context = self._example_parsing_context.GetValue()
return ( example_parsing_context, self._example_data )
def TestParse( self ):
obj = self._object_callable()
( example_parsing_context, example_data ) = self.GetTestContext()
try:
results_text = obj.ParsePretty( example_parsing_context, example_data )
self._results.SetValue( results_text )
except Exception as e:
etype = type( e )
value = HydrusData.ToUnicode( e )
( etype, value, tb ) = sys.exc_info()
trace = ''.join( traceback.format_exception( etype, value, tb ) )
message = 'Exception:' + os.linesep + HydrusData.ToUnicode( etype.__name__ ) + ': ' + HydrusData.ToUnicode( value ) + os.linesep + HydrusData.ToUnicode( trace )
self._results.SetValue( message )
def SetExampleData( self, example_data ):
self._SetExampleData( example_data )
class TestPanelSubsidiary( TestPanel ):
def __init__( self, parent, object_callable, formula_callable, test_context = None ):
TestPanel.__init__( self, parent, object_callable, test_context = test_context )
self._formula_callable = formula_callable
self._formula_description = ClientGUICommon.BetterStaticText( self )
self._refresh_formula_description_button = ClientGUICommon.BetterBitmapButton( self, CC.GlobalBMPs.refresh, self._UpdateFormulaDescription )
hbox = wx.BoxSizer( wx.HORIZONTAL )
hbox.Add( self._formula_description, CC.FLAGS_EXPAND_BOTH_WAYS )
hbox.Add( self._refresh_formula_description_button, CC.FLAGS_LONE_BUTTON )
vbox = self.GetSizer()
vbox.Insert( 2, hbox, CC.FLAGS_EXPAND_PERPENDICULAR )
self._UpdateFormulaDescription()
def _UpdateFormulaDescription( self ):
formula = self._formula_callable()
if formula is None:
description = 'No formula set'
else:
try:
example_parsing_context = self._example_parsing_context.GetValue()
posts = formula.Parse( example_parsing_context, self._example_data )
description = HydrusData.ConvertIntToPrettyString( len( posts ) ) + ' subsidiary posts parsed'
except HydrusExceptions.ParseException as e:
description = HydrusData.ToUnicode( e )
self._formula_description.SetLabelText( description )
def TestParse( self ):
self._UpdateFormulaDescription()
formula = self._formula_callable()
page_parser = self._object_callable()
try:
example_parsing_context = self._example_parsing_context.GetValue()
if formula is None:
posts = [ self._example_data ]
else:
posts = formula.Parse( example_parsing_context, self._example_data )
pretty_texts = []
for post in posts:
pretty_text = page_parser.ParsePretty( example_parsing_context, post )
pretty_texts.append( pretty_text )
separator = os.linesep * 2
end_pretty_text = separator.join( pretty_texts )
self._results.SetValue( end_pretty_text )
except Exception as e:
etype = type( e )
value = HydrusData.ToUnicode( e )
( etype, value, tb ) = sys.exc_info()
trace = ''.join( traceback.format_exception( etype, value, tb ) )
message = 'Exception:' + os.linesep + HydrusData.ToUnicode( etype.__name__ ) + ': ' + HydrusData.ToUnicode( value ) + os.linesep + HydrusData.ToUnicode( trace )
self._results.SetValue( message )
| StarcoderdataPython |
4817586 | import random
import datetime
import time
import fcntl
from ip.IPSocket import *
from tcp.TCPPacket import *
def get_ip(ifname='eth0'):
"""
Get ip address of the source, only works for linux machine
"""
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
data = struct.pack('256s', ifname[:15].encode())
ip = socket.inet_ntoa(fcntl.ioctl(s.fileno(), 0x8915, data)[20:24])
return ip
class TCPSocket:
"""
This class is an implementation of TCP built on a custom
implementation of IP. It functions as any old socket would.
"""
def __init__(self):
self.socket = None
self.src = (get_ip(), random.randrange(0, 1 << 16))
self.thread = None
self.data_send_queue = queue.Queue()
self.data_recv_queue = queue.Queue()
self.connected = False
def connect(self, dest):
"""
Create a new connection
"""
# Connection State
self.socket = IPSocket(get_ip())
self.socket.connect(dest)
self.dest = (socket.gethostbyname(dest[0]), dest[1])
# I/O
self.received_packet = None
# The slow start threshold.
self.ss_thresh = float("inf")
# This is the congestion window here.
self.congestion_window = 1
# This is the advertised window at the destination.
self.dest_window = float('inf')
# Round Trip Time in ms
self.RTT = None
# The Max Segment Size. The default is 536 = 576 - IP_HEADER - TCP_HEADER
self.MSS = 536
# This contains information regarding how the next packet should look.
self.next_packet = {
'ack_num': 1,
'ack': 0,
'seq': random.randrange(0, 1 << 32)
}
# collection of packets that are currently in the network, the set has three elements,
# packet, start time and a boolean to show it is being resent
self.packets_in_network = set()
# This is a queue of resending packets, sorted into seq number order
self.resend_queue = queue.PriorityQueue()
# This is the seq number that needs to be acked to move the window.
self.seq = 0
self.out_of_order_packets = queue.PriorityQueue()
self.seen_seq_nums = set()
# start the thread
self.handshake()
self.thread = threading.Thread(name="tcp-loop", target=self.loop)
self.thread.setDaemon(True)
self.thread.start()
def send(self, data):
"""
Send some data over the network. The same as sendall.
"""
self.data_send_queue.put(data)
def sendall(self, data):
"""
Send all the data over the socket.
"""
self.send(data)
def recv(self, max_bytes=None):
"""
Get data from the socket
"""
packet = b''
if not self.connected:
raise Exception("Socket closed")
if self.received_packet is None:
while True:
if not self.connected:
raise Exception("Socket closed")
if not self.data_recv_queue.empty():
packet += self.data_recv_queue.get(block=False)
else:
break
if max_bytes is not None and len(packet) > max_bytes:
self.received_packet = packet[max_bytes:]
packet = packet[:max_bytes]
else:
packet = self.received_packet
if max_bytes is None or len(packet) <= max_bytes:
self.received_packet = None
else:
self.received_packet = packet[max_bytes:]
packet = packet[:max_bytes]
return packet
def close(self):
"""
send the fin packet to close the connection
"""
self.next_packet['fin'] = 1
p = TCPPacket(self.src, self.dest, 0, self.seq)
p.fin = 1
p.checksum()
self.socket.send(p.build())
def loop(self):
"""
Thread target for running TCP separate from application.
"""
while self.connected:
self.send_new_packets()
while True:
packet = self.socket.recv()
if packet is not None:
self.parse_packet(packet)
else:
break
if not self.connected:
self.close()
break
if self.RTT is not None:
self.timeout()
# Gotta avoid busy wait
time.sleep(0.050)
def handshake(self):
"""
Perform the three-way handshake.
"""
# Choose the starting seq number
self.seq = random.randint(0, 65535)
# Send the SYN packet to create the connection
syn = TCPPacket(self.src, self.dest, 0, self.seq)
syn.syn = 1
syn.checksum()
self.socket.send(syn.build())
sent_time = datetime.datetime.now()
# Get packets until we see a SYN_ACK from the destination to us.
p = None
while True:
p = self.socket.recv()
if p is None:
continue
p = TCPPacket.unpack(p, self.dest[0], self.src[0])
if p.src == self.dest and p.dest == self.src and p.syn and p.ack:
break
time.sleep(0.010)
# Calculate Initial RTT
arrive_time = datetime.datetime.now()
self.RTT = (arrive_time - sent_time).total_seconds() * 1000
# Get Advertised Window Info
self.dest_window = p.window
# Pull out MSS Information
for o in p.options:
if o['kind'] == 2 and o['length'] == 4:
self.MSS = o['value']
break
# Calculate next seq numbers to see.
self.next_packet['next_expected_seq'] = p.seq + len(p.data) + 1
self.seq = p.ack_num
# Send the ACK packet to open the connection of both sides.
ack = TCPPacket(self.src, self.dest, self.seq, self.next_packet['next_expected_seq'])
ack.ack = 1
ack.checksum()
self.socket.send(ack.build())
self.connected = True
def parse_packet(self, packet):
"""
Convert the packet to an object.
"""
packet = TCPPacket.unpack(packet, self.dest[0], self.src[0])
packet.checksum()
# Check validity
if packet.check == 0 and packet.src == self.dest and packet.dest == self.src:
# Handle ACK
if packet.ack and packet.ack_num >= self.seq:
self.handle_ack(packet)
# Check if it contains data or FIN or SYN
if (len(packet.data) > 0) or packet.syn:
self.next_packet['ack'] = 1
# Update the next expected seq number
next_seq = packet.seq + len(packet.data)
if len(packet.data) > 0 and packet.seq == self.next_packet['next_expected_seq']:
# This is the packet we need.
self.next_packet['next_expected_seq'] = next_seq
self.data_recv_queue.put(packet.data)
while not self.out_of_order_packets.empty():
p = self.out_of_order_packets.get()
if p.seq == next_seq:
self.data_recv_queue.put(p.data)
next_seq = p.seq + len(p.data)
else:
self.out_of_order_packets.put(p)
break
elif len(packet.data) > 0 and packet.seq > self.next_packet['next_expected_seq'] \
and packet.seq not in self.seen_seq_nums:
# Packet is out of order
self.out_of_order_packets.put(packet)
self.seen_seq_nums.add(packet.seq)
# Ack the packet if it has data
if self.next_packet['ack']:
p = TCPPacket(self.src, self.dest, self.seq, self.next_packet['next_expected_seq'])
p.ack = 1
p.checksum()
self.socket.send(p.build())
self.dest_window = packet.window
if packet.fin or packet.rst:
self.connected = False
def handle_ack(self, packet):
"""
Handles the ACK clocking part of TCP.
"""
# Increase the congestion window.
if self.ss_thresh <= self.congestion_window:
self.congestion_window += (1 / self.congestion_window)
else:
self.congestion_window += 1
self.seq = packet.ack_num
# Find acked packets
acked_packets = set()
packets_in_network = self.packets_in_network.copy()
for p in packets_in_network:
if p[0].seq <= self.next_packet['seq']:
acked_packets.add(p)
self.packets_in_network.remove(p)
# Manage RTT.
now = datetime.datetime.now()
ALPHA = 0.875 # NEW_RTT = ALPHA * OLD_RTT + (1 - ALPHA) * PACKET_RTT
for p in acked_packets:
if not p[2]:
# Packet didn't time out so it's valid for RTT calculation
packet_rtt = now - p[1]
if self.RTT is not None:
self.RTT = ALPHA * self.RTT + (1 - ALPHA) * packet_rtt.total_seconds() * 1000
else:
self.RTT = packet_rtt.total_seconds() * 1000
def timeout(self):
"""
Check to see if any previously sent packets have timed out while waiting to be
ACKed
"""
timeout_packets = []
now = datetime.datetime.now()
for p in self.packets_in_network:
dt = (now - p[1]).total_seconds() * 1000
if dt > 2 * self.RTT:
timeout_packets.append(p)
if len(timeout_packets) > 0:
self.ss_thresh = self.congestion_window / 2
self.congestion_window = 1
for p in timeout_packets:
self.packets_in_network.remove(p)
self.resend_queue.put((p[0].seq, p[0]))
def send_new_packets(self):
"""
Send new packets containing the data passed into the socket via send,
and resend timed out packets. Do so until the window if full.
"""
# space = min(self.congestion_window, self.dest_window) / self.MSS - len(self.packets_in_network)
space = min(self.congestion_window, self.dest_window) / self.MSS - len(self.packets_in_network)
while not self.resend_queue.empty() and space > 0:
self.resend_packet()
while not self.data_send_queue.empty() and space > 0:
self.send_new_packet()
def resend_packet(self):
"""
resend the time out packet
"""
packet = self.resend_queue.get()
self.socket.send(packet.to_bytes())
self.packets_in_network.add((packet, datetime.datetime.now(), True))
def send_new_packet(self):
"""
If there is any data to send, send a packet containing it.
"""
# Get data
if self.connected:
# Send a packet of data or ack another packet.
packet_data = b''
while not self.data_send_queue.empty() and len(packet_data) < self.MSS:
packet_data += self.data_send_queue.get()
# Create packet
packet = TCPPacket(self.src, self.dest, self.seq, self.next_packet['next_expected_seq'], packet_data)
else:
return
packet.ack = 1
packet.checksum()
# add the packet in network to track
self.packets_in_network.add((packet, datetime.datetime.now(), False))
# Send packet in bytes
self.socket.send(packet.build()) | StarcoderdataPython |
3251916 | #!/usr/bin/env python -W ignore
from absl import flags
from absl import app
import pandas as pd
import numpy as np
import sys
from sodapy import Socrata
from os.path import abspath
from os.path import exists
from os import mkdir
FLAGS = flags.FLAGS
# delcare flags
flags.DEFINE_string("token", None, "SPARCS Socrates API token")
flags.DEFINE_string("output", None, "Output directory to save files")
# required flags
flags.mark_flag_as_required("token")
flags.mark_flag_as_required("output")
apr_drg_codes = map(str, [])
ccs_diag_codes = map(str, [])
ccs_proc_codes = map(str, [])
columns_to_keep = map(lambda x: x.lower().replace(' ', '_'), [
"APR Risk of Mortality",
"APR Severity of Illness Code",
"Age Group",
"CCS Diagnosis Code",
"Discharge Year",
"Ethnicity",
"Gender",
"Length of Stay",
"Patient Disposition",
"Source of Payment 1",
"Race",
"Total Costs",
"Total Costs_inflation_adjusted",
"Type of Admission",
'apr_drg_code'
])
pd_list = []
# for Hospital Inpatient Discharges (SPARCS De-Identified) in SPARCS
dataset_ids = [
(2016, 'y93g-4rqn'),
(2015, 'p3tf-wrj8'),
(2014, 'pzzw-8zdv'),
(2013, 'tdf6-7fpk'),
(2012, 'rv8x-4fm3'),
(2011, 'n5y9-zanf'),
(2010, 'dpew-wqcg'),
(2009, 's8d9-z734')
]
def additional_cleaning(df):
# do NOT clean age group for this paper
# if "age_group" in df.columns:
# df1 = df[df.age_group == "70 or Older"]
# df2 = df[df.age_group == "50 to 69"]
# df = pd.concat([df1,df2],ignore_index=True, axis=0, sort=False)
# do NOT clean admission for this paper
# if "type_of_admission" in df.columns:
# df = df[df.type_of_admission != 'Newborn']
# df = df[df.type_of_admission != 'Not Available']
# DO clean out dispositions
# if "patient_disposition" in df.columns:
# df = df[df.patient_disposition != "Left Against Medical Advice"]
# df = df[df.patient_disposition != "Expired"]
# df = df[df.patient_disposition != "Another Type Not Listed"]
return df
'''
Download all the datasets in dataset_ids and return a list of pd dataframes
'''
def download(token, verbose=True):
if not isinstance(token, basestring):
raise ValueError("Token must be a string")
# Setup SPARCS API
client = Socrata("health.data.ny.gov",
token)
# set an arbitrarily high download limit
# only works for python 2
if sys.version_info < (3,0):
lim = sys.maxint
else:
# hardcode max int for python 3
lim = 9223372036854775807
for id in dataset_ids:
year = id[0]
socrata_id = id[1]
filter_command = ''
# has apr_drg_description_and_code
if year == 2011:
filter_command = make_filter_command_by_year(year = 2011,
ccs_diag = ccs_diag_codes,
ccs_proc = ccs_proc_codes,
apr_drg = apr_drg_codes)
# apr_drg_code are integers
elif year == 2015 or year == 2016:
# years 2015 and 2016 are the same, so it doesn't matter which is passed into make_filter_command_by_year
filter_command = make_filter_command_by_year(year = 2015,
ccs_diag = ccs_diag_codes,
ccs_proc = ccs_proc_codes,
apr_drg = apr_drg_codes)
else:
# year only matters if 2011, 2015, or 2016. Don't pass to force default behavior
filter_command = make_filter_command_by_year(ccs_diag = ccs_diag_codes,
ccs_proc = ccs_proc_codes,
apr_drg = apr_drg_codes)
print "Filter: %s" % str(filter_command)
if verbose:
sys.stdout.write('Downloading id: %s (%d) using filter...' % (socrata_id, year))
sys.stdout.flush()
http_get = client.get(socrata_id, limit=lim, where=filter_command)
results_df = pd.DataFrame.from_records(http_get)
if verbose:
print 'Shape = {}'.format(results_df.shape)
pd_list.append(results_df)
return pd_list
def make_filter_command_by_year(year = 0, ccs_diag = None, ccs_proc = None, apr_drg = None):
# SPARCS API format call changes by year
command_filter = []
if year == 2011:
# correct format
# """ccs_diagnosis_code='{ccs_diagnosis_code}' AND \
# ccs_procedure_code='{ccs_procedure_code}' AND \
# apr_drg_description_and_code='{apr_drg_code}'"""
if ccs_diag != None and len(ccs_diag) >= 1:
ccs_diag_codes = '('+' OR '.join(["ccs_diagnosis_code='%s'"%x for x in ccs_diag])+')'
command_filter.append(ccs_diag_codes)
if ccs_proc != None and len(ccs_proc) >= 1:
ccs_proc_codes = '('+' OR '.join(["ccs_procedure_code='%s'"%x for x in ccs_proc])+')'
command_filter.append(ccs_proc_codes)
if apr_drg != None and len(apr_drg) >= 1:
apr_drg_codes = '('+' OR '.join(["apr_drg_description_and_code='%s'"%x for x in apr_drg])+')'
command_filter.append(apr_drg_codes)
return ' AND '.join(command_filter)
# ccs_diagnosis_code, ccs_procedure_code, apr_drg_code are integers (not quoted)
elif year == 2015 or year == 2016:
# Correct format
# """ccs_diagnosis_code={ccs_diagnosis_code} AND \
# ccs_procedure_code={ccs_procedure_code} AND \
# apr_drg_code={apr_drg_code}"""
if ccs_diag != None and len(ccs_diag) >= 1:
ccs_diag_codes = '('+' OR '.join(["ccs_diagnosis_code=%s"%x for x in ccs_diag])+')'
command_filter.append(ccs_diag_codes)
if ccs_proc != None and len(ccs_proc) >= 1:
ccs_proc_codes = '('+' OR '.join(["ccs_procedure_code=%s"%x for x in ccs_proc])+')'
command_filter.append(ccs_proc_codes)
if apr_drg != None and len(apr_drg) >= 1:
apr_drg_codes = '('+' OR '.join(["apr_drg_code=%s"%x for x in apr_drg])+')'
command_filter.append(apr_drg_codes)
return ' AND '.join(command_filter)
else:
# Correct format
# """ccs_diagnosis_code='{ccs_diagnosis_code}' AND \
# ccs_procedure_code='{ccs_procedure_code}' AND \
# apr_drg_code='{apr_drg_code}'"""
if ccs_diag != None and len(ccs_diag) >= 1:
ccs_diag_codes = '('+' OR '.join(["ccs_diagnosis_code='%s'"%x for x in ccs_diag])+')'
command_filter.append(ccs_diag_codes)
if ccs_proc != None and len(ccs_proc) >= 1:
ccs_proc_codes = '('+' OR '.join(["ccs_procedure_code='%s'"%x for x in ccs_proc])+')'
command_filter.append(ccs_proc_codes)
if apr_drg != None and len(apr_drg) >= 1:
apr_drg_codes = '('+' OR '.join(["apr_drg_code='%s'"%x for x in apr_drg])+')'
command_filter.append(apr_drg_codes)
return ' AND '.join(command_filter)
'''
Standardize column names across all datasets
'''
def standardizeColumns(list_of_dfs):
df_list = []
for df in list_of_dfs:
colHeader = df.columns.values
for index,val in enumerate(colHeader):
################
# Rename medicare
#2011 has a mislabeled column header, replace with correct
if val == "payment_topology_2":
df.columns.values[index] = "payment_typology_2"
# replace typology with source of payment
if val == "payment_typology_1":
df.columns.values[index] = "source_of_payment_1"
elif val == "payment_typology_2":
df.columns.values[index] = "source_of_payment_2"
elif val == "payment_typology_3":
df.columns.values[index] = "source_of_payment_3"
##################
# Rename apr_severity_of_illness_descript
if val == 'apr_severity_of_illness_descript':
df.columns.values[index] = 'apr_severity_of_illness_description'
if val == 'apr_drg_description_and_code':
df.columns.values[index] = 'apr_drg_code'
if val == 'age':
df.columns.values[index] = 'age_group'
if val == 'apr_severity_of_illness':
df.columns.values[index] = 'apr_severity_of_illness_code'
if val == 'sex':
df.columns.values[index] = 'gender'
if val == 'operating_provider_license_numbe':
df.columns.values[index] = 'operating_provider_license_number'
if val == 'attending_provider_license_numbe':
df.columns.values[index] = 'attending_provider_license_number'
df_list.append(df)
return df_list
'''
Corrects the headers and filter out patients who do not use medicare
NB: This MUST be called before the header spaces are replaced by _
^ Is not an issue if downloading from socrata since cols already have _
'''
def codeMedicare(df):
medicare_bool = []
for ndx, row in df.iterrows():
_1 = row['source_of_payment_1'].lower() == 'medicare'
try:
_2 = row['source_of_payment_2'].lower() == 'medicare'
except:
_2 = False
try:
_3 = row['source_of_payment_3'].lower() == 'medicare'
except:
_3 = False
bool = _1 | _2 | _3
medicare_bool.append(bool)
df['medicare'] = medicare_bool
return df
def subsetMedicare(df):
return df[df['medicare'] == True]
def assignNumeric(df):
_TC = 'total_costs'
_TCh = 'total_charges'
_LOS = 'length_of_stay'
_YEAR = 'discharge_year'
# remove non-integer rows from LOS
if df.dtypes[_LOS] == 'object':
df = df[df[_LOS] != "120 +"]
df[[_TC, _TCh, _LOS, _YEAR]] = df[[_TC, _TCh, _LOS, _YEAR]].apply(pd.to_numeric)
return df
"""
Combines all dataframes into one master
"""
def combine_dataframes(pd_list):
master = pd.concat(pd_list, ignore_index = True, axis=0, sort=False)
master = master.fillna(0)
return master
def adjustForInflation(df, column_input):
# multiply cost in year by the multiplicative CPI rate according to the BLS
# From: https://beta.bls.gov/dataViewer/view/timeseries/CUUR0000SAM
# CPI-All Urban Consumers (Current Series)
# Series Title : Medical care in U.S. city average, all urban consumers, not seasonally adjusted
# Series ID : CUUR0000SAM
# Seasonality : Not Seasonally Adjusted
# Survey Name : CPI-All Urban Consumers (Current Series)
# Measure Data Type : Medical care
# Area : U.S. city average
# Item : Medical care
inflationDictionary = {
"2016":0.810,
"2015":0.841,
"2014":0.863,
"2013":0.884,
"2012":0.905,
"2011":0.938,
"2010":0.967,
"2009":1.00
}
inflationList = [float(row[column_input])*inflationDictionary[str(row['discharge_year'])] for index,row in df.iterrows()]
df[column_input + '_inflation_adjusted'] = inflationList
return df
'''
Remove outliers from dataset by keeping drop_lower %ile to drop_upper%ile
'''
def removeOutliers(df, drop_lower=0.5, drop_upper=99.5):
_TC = 'total_costs_inflation_adjusted'
# convert all outcome rows to numerical if possible
df[[_TC]] = df[[_TC]].apply(pd.to_numeric)
#remove outliers
# drop rows below 0.5th percentile and above 99.5th percentile
TC_ulimit = np.percentile([float(x) for x in df[_TC]], drop_upper)
TC_llimit = np.percentile([float(x) for x in df[_TC]], drop_lower)
# LOS_ulimit = np.percentile([int(x) for x in df[_LOS]], drop_upper)
# LOS_llimit = np.percentile([int(x) for x in df[_LOS]], drop_lower)
print 'Upper limit: %s, lower limit: %s' % (TC_ulimit, TC_llimit)
df = df.query('{} < {}'.format(_TC, TC_ulimit))
df = df.query('{} > {}'.format(_TC, TC_llimit))
# df = df.query('{} < {}'.format(_LOS, LOS_ulimit))
# df = df.query('{} > {}'.format(_LOS, LOS_llimit))
return df
def load_all_patients(output_dir):
df = pd.read_csv('%s/%s' % (output_dir, 'all_patients.csv'))
return df
def main(argv):
output_dir = abspath(FLAGS.output)
if not exists(output_dir):
sys.out.write('[INFO] Making directory: %s' % output_dir)
mkdir(output_dir)
pd_list = download(FLAGS.token)
pd_list = standardizeColumns(pd_list)
for i in range(len(pd_list)):
print 'Saving %s...' % (dataset_ids[i][0])
name = 'raw_%s.csv' % dataset_ids[i][0]
pd_list[i].to_csv('%s/%s' % (output_dir, name), index=False)
print 'Downloaded and saved dataframes: %s. Running combine_dataframes()... ' % sum(x.shape[0] for x in pd_list)
all_patients = combine_dataframes(pd_list)
print 'Combined dataframes: %s. Running codeMedicare()... ' % sum(x.shape[0] for x in pd_list)
all_patients = codeMedicare(all_patients)
print 'Coded medicare: %s. Running adjustForInflation()... ' % all_patients.shape[0]
all_patients = adjustForInflation(all_patients, 'total_costs')
all_patients = adjustForInflation(all_patients, 'total_charges')
print 'Adjusted for inflation: %s. Running assignNumeric()...' % all_patients.shape[0]
all_patients = assignNumeric(all_patients)
print 'Keeping %s' % (columns_to_keep)
all_patients_keep = all_patients[columns_to_keep]
print 'Assigned numeric: %s. Running subsetMedicare()...' % all_patients.shape[0]
medicare = subsetMedicare(all_patients)
############# medicare made
print ('Subsetted medicare: all = %s, medicare only = %s. '
'Only using medicare now. Running additional_cleaning()... ') % (all_patients.shape[0],medicare.shape[0])
medicare = additional_cleaning(medicare)
print 'Additional_cleaning: %s. Running removeOutliers()... ' % medicare.shape[0]
medicare = removeOutliers(medicare)
print 'removeOutliers - TC/LOS: %s' % medicare.shape[0]
# subset medicare
medicare_keep = medicare[columns_to_keep]
all_out_file = '%s/%s' % (output_dir, 'all_patients.csv')
all_patients.to_csv(all_out_file, index=False)
print 'Saved %s' % all_out_file
all_keep_file = '%s/%s' % (output_dir, 'all_patients_column_subset.csv')
all_patients_keep.to_csv(all_keep_file, index=False)
print 'Saved %s' % all_keep_file
medicare_out_file = '%s/%s' % (output_dir, 'medicare.csv')
medicare.to_csv(medicare_out_file,index=False)
print 'Saved %s' % medicare_out_file
medicare_out_keep_file = '%s/%s' % (output_dir, 'medicare_column_subset.csv')
medicare_keep.to_csv(medicare_out_keep_file,index=False)
print 'Saved %s' % medicare_out_keep_file
print 'DONE'
if __name__ == "__main__":
app.run(main)
| StarcoderdataPython |
46771 | <reponame>laurabondeholst/Mapping_high_dimensional_data<gh_stars>0
import pandas as pd
import plotly.graph_objects as go
import numpy as np
UMAP_TSNE_FOLDER = "reports_from_tobias/reports/fashion_natural_umap_tsne/"
TSNE_FOLDER = "reports/Noiselevel_experiment_pca_tsne/Fashion/"
TRIMAP_FOLDER = "reports_from_pranjal/aml_results/mnist-strat/"
files = ["results_sigma0.csv", "results_sigma02.csv", "results_sigma05.csv", "results_sigma07.csv", "results_sigma1.csv"]
noiselevel = [0, 0.2, 0.5, 0.7, 1]
# files = ["results_sigma0.csv", "results_sigma02.csv", "results_sigma05.csv","results_sigma1.csv"]
# noiselevel = [0, 0.2, 0.5, 1]
for i,file in enumerate(files):
# umap_tsne_df = pd.read_csv(UMAP_TSNE_FOLDER + file)
trimap_df = pd.read_csv(TRIMAP_FOLDER + file)
fig = go.Figure(layout_xaxis_range=[0,np.max(trimap_df.data_points_number)],layout_yaxis_range=[0,1])
# fig.add_trace(go.Scatter(x=umap_tsne_df.data_points_number.values, y=umap_tsne_df.correct_predicted_percent_pca.values, name="PCA", mode='lines'))
fig.add_trace(go.Scatter(x=trimap_df.data_points_number.values, y=trimap_df.correct_predicted_percent_pca.values, name="PCA", mode='lines', fillcolor='green'))
fig.add_trace(go.Scatter(x=trimap_df.data_points_number.values, y=trimap_df.correct_predicted_percent_trimap.values, name="TRIMAP", mode='lines', fillcolor='blue'))
fig.add_trace(go.Scatter(x=trimap_df.data_points_number.values, y=trimap_df.correct_predicted_percent_tsne.values, name="TSNE", mode='lines', fillcolor='red'))
fig.add_trace(go.Scatter(x=trimap_df.data_points_number.values, y=trimap_df.correct_predicted_percent_umap.values, name="UMAP", mode='lines', fillcolor='purple'))
fig.update_layout(title="MNIST stratified distribution", legend_title_text = "Noise level: {}".format(noiselevel[i]))
fig.update_xaxes(title_text="Datapoints")
fig.update_yaxes(title_text="Accuracy [%]")
fig.show()
| StarcoderdataPython |
3396746 | n1 = int(input('Digite um número qualquer: '))
print(f'A tabuada do número {n1}, é: ')
print('''
{0} * 1 = {1}
{0} * 2 = {2}
{0} * 3 = {3}
{0} * 4 = {4}
{0} * 5 = {5}
{0} * 6 = {6}
{0} * 7 = {7}
{0} * 8 = {8}
{0} * 9 = {9}
'''.format(n1, n1 * 1, n1 * 2, n1 * 3, n1 * 4, n1 * 5, n1 * 6, n1 * 7, n1 * 8, n1 * 9))
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.