text
string
size
int64
token_count
int64
import socket import threading helpMessage = '-q -- close connection\n-l -- list of connected devices\n-t -- server time \n-s "arduino/client ""reciever name" "message" -- send message (messages can be max 100 character) \nif reciever is an arduino board it can be controlled by this messsage:\n -s arduino "arduino name" led "0/1/status" \n' print("connecting...\n for command list write '-h' \n"+helpMessage) host = '127.0.0.1' # 127.0.0.1 for local port = 9999 # 9999 for local socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) socket.connect((host, port)) def recvTh(): while True: try: message = socket.recv(100).decode('ascii') if message == 't': socket.send("c".encode('ascii')) elif message == 'n': name = input("Enter your client name: ") socket.send(name.encode('ascii')) else: print(message+"\n") except ConnectionAbortedError: break except: print("connection error") socket.close() break def sendTh(): while True: message = input() if (len(message)<= 1024): tokens = message.split() if tokens[0] == '-h': print(helpMessage) elif tokens[0] == '-q': print("quiting") socket.send('-q'.encode('ascii')) socket.close() break else: socket.send(message.encode('ascii')) else: print("message must be under 1024 char") recvThread = threading.Thread(target=recvTh) sendThread = threading.Thread(target=sendTh) recvThread.start() sendThread.start()
1,794
565
from torchvision import datasets, transforms from base import BaseDataLoader from torch.utils.data import Dataset, DataLoader import pandas as pd import torch from skimage import io#, transform import numpy as np class MnistDataLoader(BaseDataLoader): """ MNIST data loading demo using BaseDataLoader """ def __init__(self, data_dir, batch_size, shuffle=True, validation_split=0.0, num_workers=1, training=True): trsfm = transforms.Compose([ transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,)) ]) self.data_dir = data_dir self.dataset = datasets.MNIST(self.data_dir, train=training, download=True, transform=trsfm) super().__init__(self.dataset, batch_size, shuffle, validation_split, num_workers) class LCZDataLoader(BaseDataLoader): """ MNIST data loading demo using BaseDataLoader """ def __init__(self, data_dir, batch_size, shuffle=True, validation_split=0.0, num_workers=1, training=True): # trsfm = transforms.Compose([ # transforms.ToTensor(), # transforms.Normalize((0.1307,), (0.3081,)) # ]) self.data_dir = data_dir self.dataset = LCZdataset(self.data_dir, transform=transforms.Compose([RandomCrop(64),ToTensor()])) super().__init__(self.dataset, batch_size, shuffle, validation_split, num_workers) class LCZdataset(Dataset): """LCZdataset.""" def __init__(self, csv_file, transform=None): """ Args: csv_file (string): Path to the csv file with annotations. transform (callable, optional): Optional transform to be applied on a sample. """ self.landmarks_frame = pd.read_csv(csv_file) self.transform = transform def __len__(self): return len(self.landmarks_frame) def __getitem__(self, idx): if torch.is_tensor(idx): idx = idx.tolist() img_name = self.landmarks_frame.iloc[idx, 0] image = io.imread(img_name) classLabel = self.landmarks_frame.iloc[idx, 1] classLabel = np.array([classLabel]) #landmarks = landmarks.astype('float').reshape(-1, 2) #print(image.shape) image = image[:,:,[1,2,3,4,5,6,7,10,11,12]]/10000.0 sample = {'image': image, 'label': classLabel} if self.transform: sample = self.transform(sample) return sample['image'], sample['label']-1#sample class RandomCrop(object): """Crop randomly the image in a sample. Args: output_size (tuple or int): Desired output size. If int, square crop is made. """ def __init__(self, output_size): assert isinstance(output_size, (int, tuple)) if isinstance(output_size, int): self.output_size = (output_size, output_size) else: assert len(output_size) == 2 self.output_size = output_size def __call__(self, sample): image, label = sample['image'], sample['label'] h, w = image.shape[:2] new_h, new_w = self.output_size top = np.random.randint(0, h - new_h) left = np.random.randint(0, w - new_w) image = image[top: top + new_h, left: left + new_w] #landmarks = landmarks - [left, top] return {'image': image, 'label': label} class ToTensor(object): """Convert ndarrays in sample to Tensors.""" def __call__(self, sample): image, label = sample['image'], sample['label'] # swap color axis because # numpy image: H x W x C # torch image: C X H X W image = image.transpose((2, 0, 1)) return {'image': torch.from_numpy(image.astype("float")).float(),#torch.from_numpy(image.astype("float")).float() 'label': torch.squeeze(torch.from_numpy(label))}#
3,885
1,252
import evaluation_script import argparse parser = argparse.ArgumentParser(description='Evaluation script used in the eBay SIGIR 2019 eCommerce Search Challenge.') parser.add_argument('-g', '--ground-truth-file', required=True, help="Ground truth file") parser.add_argument('-p', '--prediction-file', required=True, help="Prediction file") parser.add_argument('-d', '--document-file', required=False, default=None, help="Document file") args = parser.parse_args() r = evaluation_script.evaluate_submission(args.ground_truth_file, args.prediction_file, args.document_file) print(); print(r)
591
186
from .core.starfish import starfish starfish()
48
16
# Generated by Django 2.2 on 2020-04-26 08:18 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('coreapp', '0057_projectpricing_custom_supprt'), ] operations = [ migrations.RenameField( model_name='endpointalgorithm', old_name='algorithm', new_name='model_id', ), migrations.RemoveField( model_name='endpointalgorithm', name='prediction_column_name', ), migrations.AddField( model_name='endpointalgorithm', name='accuracy', field=models.IntegerField(blank=True, null=True), ), migrations.AddField( model_name='endpointalgorithm', name='type_of_prediction', field=models.CharField(blank=True, choices=[('Classification', 'Classification'), ('Linear', 'Linear')], max_length=100, null=True), ), ]
968
279
import logging from server.singleton_meta import SingletonMeta log = logging.getLogger(__name__) class PriceCache(metaclass=SingletonMeta): def __init__(self): log.debug("[PriceCache] Init new price cache") self.price_cache = {} def init_cache_for_ticker(self, watched_ticker_id): log.info(f"[PriceCache] Init cache for watched ticker {watched_ticker_id}") if self.price_cache.get(watched_ticker_id): return self.price_cache[watched_ticker_id] = { "high": None, "low": None, "price": None } def cached_prices_for_ticker(self, watched_ticker_id): return self.price_cache.get(watched_ticker_id) def cached_price(self, watched_ticker_id, key): cache = self.price_cache.get(watched_ticker_id) if not cache: return None log.info(f"[PriceCache] Getting {key} for {watched_ticker_id}: {cache[key]}") return cache[key] def update_cached_price(self, watched_ticker_id, key, val): log.info(f"[PriceCache] Updating {key} price for {watched_ticker_id}: {val}") self.price_cache[watched_ticker_id][key] = val def delete_watched_ticker(self, watched_ticker_id): log.info(f"[PriceCache] Deleting cache for {watched_ticker_id}") del self.price_cache[watched_ticker_id] def reset_cached_values(self): for prices in self.price_cache.values(): prices["low"] = None prices["high"] = None prices["price"] = None
1,431
508
# (10,2,20,4,30,6,40,8,50) n=int(input("enter no--")) i=1 c=10 while i<=n: if i%2==0: c+=10 print(i,end=",") i+=1 i+=1 print(c,end=",") # (1+10=11, 11+20=31, 31+30=61, 61+40=101) n=int(input("enter no,-")) i=0 d=1 s=10 while i<n: print(d,end=",") d=d+s s+=10 i+=1 # (1+10=11, 11+20=31, 31+30=61, 61+40=101) n=int(input("enter no.==")) i=1 d=1 while i<=n: print(d,end=" ") d=d+10*i i+=1
499
336
import os import time from os import mkdir from os.path import isdir from threading import Lock import cv2 import imutils from modules.Camera import Detect from modules.Camera.CameraHandler import CameraHandler from modules.Config import Config from modules.Fusion import Fusion from modules.Logger.Logger import Logger from modules.Radar.RadarHandler import RadarHandler class Manager: def __init__(self, config: Config): self.radarHandler, self.cameraHandler = self.createHandlers() self.logger = Logger(config.debug) self.radarData = [] self.config = config self.lockRadarData = Lock() self.lockRadarTimestamp = Lock() self.temp = None self.state = self.setState() self.radarTimestamp = [0] @staticmethod def createHandlers(): return RadarHandler(), CameraHandler() def setState(self): self.state = self.config.mode path = './data/records/record-' if self.state == 'save': ids = 0 while isdir(path + str(ids)): ids += 1 self.temp = path + str(ids) mkdir(self.temp) mkdir(self.temp + '/radar') mkdir(self.temp + '/camera') if self.state == 'load': if isdir(path + str(self.config.loadId)): self.temp = path + str(self.config.loadId) else: exit(-10) return self.state def configureRadar(self): self.radarHandler.setLogger(self.logger) if self.state != 'load': self.radarHandler.set_ports('/dev/ttyACM1', '/dev/ttyACM0') \ .set_config_file(self.config.configRadar) \ .send_config() if self.state != 'run': self.radarHandler.dataRadarPath = self.temp + '/radar' self.radarHandler.setState(self.state) self.radarHandler.setRadarData(self.radarData) self.radarHandler.lockRadarData = self.lockRadarData self.radarHandler.lockRadarTimestamp = self.lockRadarTimestamp self.radarHandler.timestamp = self.radarTimestamp self.radarHandler.cameraPos = self.config.CameraPosition def configureCamera(self): self.cameraHandler.setState(self.state) self.cameraHandler.setLogger(self.logger) def fpsFromCamera(self): frames = 120 i = 0 start = time.time() while i < 120: ret, frame = self.cameraHandler.captureFrame() i += 1 seconds = time.time() - start return frames / seconds def runner(self): fusion = Fusion.Fusion(self.config) if self.state != 'load': self.logger.log('Find out camera fps') fps = int(self.fpsFromCamera() + 0.5) self.radarHandler.start() if self.state == 'save': c = 0 while self.cameraHandler.cap.isOpened(): ret, frame = self.cameraHandler.captureFrame() if ret: if c % fps == 0: timestamp = int(time.time()) filename = self.temp + '/camera/img-' + str(timestamp) + '.png' cv2.imwrite(filename, frame) c = 0 cv2.imshow('frame', frame) c += 1 if cv2.waitKey(1) & 0xFF == ord('q'): self.radarHandler.setState('cancel') break else: break exit(0) oldFusion = None if self.state == 'load': for file in sorted(os.listdir(self.temp + '/camera/')): fusedCount = 0 pick = [0] img = cv2.imread(self.temp + '/camera/' + file) frame = imutils.resize(img, width=min(self.config.imageSize, img.shape[1])) timestamp = int(file.split('-')[1].split('.')[0]) * 1000 while True: self.lockRadarTimestamp.acquire() try: timestampRadar = self.radarTimestamp[0] finally: self.lockRadarTimestamp.release() if (timestampRadar - 50 < timestamp) & (timestamp < timestampRadar + 50): pick = Detect.detectPedestrian(frame, self.config.winStride, self.config.scale) self.lockRadarData.acquire() try: fused = fusion.fuse(pick, [frame.shape[0], frame.shape[1]], self.radarData) finally: self.lockRadarData.release() if fused is not None: oldFusion = fused[0] fusedCount = fused[1] break if timestampRadar + 50 > timestamp: break time.sleep(0.1) self.lockRadarData.acquire() try: self.cameraHandler.insertCountDataToImage(frame, [fusedCount, len(pick), len(self.radarData)]) finally: self.lockRadarData.release() if oldFusion is None: cv2.imshow('Frame', frame) if cv2.waitKey(25) == ord('q'): self.radarHandler.setState('cancel') break continue for o in oldFusion: for oo in o: if oo.fused is not True: continue self.cameraHandler.insertDataToImage(frame, oo) cv2.imshow('Frame', frame) if cv2.waitKey(25) == ord('q'): self.radarHandler.setState('cancel') break self.radarHandler.setState('cancel') while self.radarHandler.is_alive(): time.sleep(0.4) print('5') try: self.cameraHandler.releaseAndClose() exit(0) except RuntimeError: pass c = 0 while self.cameraHandler.cap.isOpened(): time.sleep(0.001) fusedCount = 0 pick = [0] # Capture frame-by-frame ret, frame = self.cameraHandler.captureFrame() frame = imutils.resize(frame, width=min(600, frame.shape[1])) if ret: if (c % self.config.oldDetection == 0) & (self.config.oldDetection > 0): oldFusion = None if c % int(fps / 4) == 0: timestamp = time.time() * 1000 self.lockRadarTimestamp.acquire() try: timestampRadar = self.radarTimestamp[0] finally: self.lockRadarTimestamp.release() if timestampRadar - 50 < timestamp < timestampRadar + 50: pick = Detect.detectPedestrian(frame, self.config.winStride, self.config.scale) self.lockRadarData.acquire() try: fused = fusion.fuse(pick, [self.cameraHandler.cap.get(3), self.cameraHandler.cap.get(4)], self.radarData) finally: self.lockRadarData.release() if fused is not None: oldFusion = fused[0] fusedCount = fused[1] c = 0 self.lockRadarData.acquire() try: self.cameraHandler.insertCountDataToImage(frame, [fusedCount, len(pick), len(self.radarData)]) finally: self.lockRadarData.release() if oldFusion is None: cv2.imshow('Frame', frame) if cv2.waitKey(25) == ord('q'): self.radarHandler.setState('cancel') break c += 1 continue for o in oldFusion: for oo in o: if oo.fused is not True: continue self.cameraHandler.insertDataToImage(frame, oo) # Display the resulting frame cv2.imshow('Frame', frame) # Press Q on keyboard to exit if cv2.waitKey(25) == ord('q'): self.radarHandler.setState('cancel') break c += 1 # Break the loop else: break self.radarHandler.join() self.cameraHandler.releaseAndClose()
9,092
2,420
import torch from torch.autograd import Variable import time import os import sys import numpy as np from utils import AverageMeter, calculate_accuracy, save_gif, accuracy from models.binarized_modules import binarizef def train_epoch(epoch, data_loader, model, criterion, optimizer, opt, epoch_logger, batch_logger, device): print('train at epoch {}'.format(epoch)) model.train() batch_time = AverageMeter() data_time = AverageMeter() losses = AverageMeter() # accuracies = AverageMeter() top1 = AverageMeter() top5 = AverageMeter() input_mean = [] end_time = time.time() for i, (inputs, targets) in enumerate(data_loader): input_mean.extend([i.mean() for i in inputs.detach().cpu().numpy()]) data_time.update(time.time() - end_time) inputs = inputs.to(device) targets = targets.to(device) outputs = model(inputs) loss = criterion(outputs, targets) # acc = calculate_accuracy(outputs, targets) losses.update(loss.item(), inputs.size(0)) # accuracies.update(acc, inputs.size(0)) prec1, prec5 = accuracy(outputs.data, targets, topk=(1, 5)) top1.update(prec1, inputs.size(0)) top5.update(prec5, inputs.size(0)) optimizer.zero_grad() loss.backward() # https://github.com/itayhubara/BinaryNet.pytorch/blob/master/main_mnist.py#L113 # for p in list(model.parameters()): # if hasattr(p, 'org'): # p.data.copy_(p.org) optimizer.step() # for p in list(model.parameters()): # if hasattr(p, 'org'): # p.org.copy_(p.data.clamp_(-1, 1)) batch_time.update(time.time() - end_time) end_time = time.time() batch_logger.log({ 'epoch': epoch, 'batch': i + 1, 'iter': (epoch - 1) * len(data_loader) + (i + 1), 'loss': losses.val, 'top1': top1.val, 'top5': top5.val, 'lr': optimizer.param_groups[0]['lr'] }) sys.stdout.flush() sys.stdout.write('\rEpoch: [{0}][{1}/{2}]\t' 'Time {batch_time.sum:.3f} ({batch_time.avg:.3f})\t' 'Data {data_time.sum:.3f} ({data_time.avg:.3f})\t' 'Loss {loss.val:.4f} ({loss.avg:.4f})\t' 'Acc@1 {top1.val:.3f} ({top1.avg:.3f})\t' 'Acc@5 {top5.val:.3f} ({top5.avg:.3f})\t\t' 'len {len_mean},' 'mean {mean:.4f},' 'std {std:.4f},' 'min {min:.4f},' 'max {max:.4f}' '\t\t'.format( epoch, i + 1, len(data_loader), batch_time=batch_time, data_time=data_time, loss=losses, top1=top1, top5=top5, len_mean=len(input_mean), mean=np.mean(input_mean), std=np.std(input_mean), min=np.min(input_mean), max=np.max(input_mean), )) sys.stdout.flush() print('\n[Train] Epoch{0}\t' 'Time: {batch_time.sum:.3f} ({batch_time.avg:.3f})\t' 'Data: {data_time.sum:.3f} ({data_time.avg:.3f})\t' 'Loss: {loss.avg:.4f}\t' 'Acc@1: {top1.avg:.3f}\t' 'Acc@5: {top5.avg:.3f}' '\tlen {len_mean},' 'mean {mean:.4f},' 'std {std:.4f},' 'min {min:.4f},' 'max {max:.4f}' '\t\t'.format( epoch, batch_time=batch_time, data_time=data_time, loss=losses, top1=top1, top5=top5, len_mean=len(input_mean), mean=np.mean(input_mean), std=np.std(input_mean), min=np.min(input_mean), max=np.max(input_mean), )) print() epoch_logger.log({ 'epoch': epoch, 'loss': losses.avg, 'top1': top1.avg, 'top5': top5.avg, 'lr': optimizer.param_groups[0]['lr'], 'batch_time': batch_time.sum, 'data_time': data_time.sum, }) # if hasattr(list(model.parameters())[0], 'org'): # mask = binarize( # list(model.parameters())[0].data, # quant_mode='det' # ).add_(1).div_(2).to('cpu').detach().numpy() if 'exp' in opt.model and not opt.load_path: mask = binarizef( list(model.parameters())[0] ).add_(1).div_(2).to('cpu').detach().numpy() print('max', mask.max()) print('min', mask.min()) mask = mask.reshape((opt.sample_duration, 8, 8, 1)).astype(np.uint8) assert mask.shape == (opt.sample_duration, 8, 8, 1) # save_file_path = os.path.join(opt.result_path, # 'mask_{}.npy'.format(epoch)) # np.save(save_file_path, mask) save_file_path = os.path.join(opt.result_path, 'mask_{}.gif'.format(epoch)) save_gif(mask, save_file_path, vmax=1, vmin=0) if epoch % opt.checkpoint == 0: save_file_path = os.path.join(opt.result_path, 'save_{}.pth'.format(epoch)) states = { 'epoch': epoch + 1, 'arch': opt.arch, 'state_dict': model.state_dict(), 'optimizer': optimizer.state_dict(), } torch.save(states, save_file_path)
5,809
1,958
from django.contrib.contenttypes import fields from django.shortcuts import render from .forms import MessageForm, ContactForm from django.views.generic import DetailView, ListView, FormView class MessageAddView(FormView): # form_class = MessageForm form_class = ContactForm template_name = 'contact/message_form.html' success_url = '/' def form_valid(self, form): form.save() # bo... return super(MessageAddView, self).form_valid(form)
477
136
# Modules import discord from datetime import date from discord import Embed from json import loads, dumps from assets.prism import Tools from discord.ext import commands # Main Command Class class Settings(commands.Cog): def __init__(self, bot): self.bot = bot self.desc = "Changes server settings for Prism" self.usage = "settings [key] [value]" @commands.command() @commands.has_permissions(manage_guild = True) async def settings(self, ctx, key: str = None, value = None): db = loads(open("db/guilds", "r").read()) _db = db[str(ctx.guild.id)] if not key: prefix = _db["prefix"] nsfw = True if "nsfw-enabled" in _db["tags"] else False levels = True if "levels-enabled" in _db["tags"] else False joinleave = self.bot.get_channel(_db["data"]["joinleave_channel"]).name if _db["data"]["joinleave_channel"] else "Not setup" if _db["data"]["autorole"]: autorole = discord.utils.get(ctx.guild.roles, id = _db["data"]["autorole"]) if not autorole: autorole = "Not setup" _db["data"]["autorole"] = None open("db/guilds", "w").write(dumps(db, indent = 4)) else: autorole = "@" + autorole.name else: autorole = "Not setup" embed = Embed(title = "Server Settings", description = f"Last updated: {_db['data']['last_updated']}", color = 0x126bf1) embed.add_field(name = "Settings­", value = f"Prefix: {prefix}\nNSFW Enabled: {nsfw}\nLevels Enabled: {levels}\nJoin/Leave Channel: #{joinleave}\nAutorole: {autorole}", inline = False) embed.add_field(name = "How to change these", value = f"To change a setting, use ``{prefix}settings [setting] [value]``.\nFor example: ``{prefix}settings nsfw off``.", inline = False) embed.set_author(name = " | Settings", icon_url = self.bot.user.avatar_url) embed.set_footer(text = f" | Requested by {ctx.author}.", icon_url = ctx.author.avatar_url) return await ctx.send(embed = embed) elif key and not value: return await ctx.send(embed = Tools.error("No value specified.")) key = key.lower() if not key in ["prefix", "nsfw", "levels", "joinleave", "autorole"]: return await ctx.send(embed = Tools.error("That isn't a valid setting.")) elif not isinstance(value, str) and not isinstance(value, bool): return await ctx.send(embed = Tools.error("That isn't a valid value.")) elif value.lower() in ["on", "enable", "true", "yes"]: value = True elif value.lower() in ["off", "disable", "false", "no"]: value = False else: if key != "prefix" and not isinstance(value, str) and not isinstance(value, bool): return await ctx.send(embed = Tools.error("That isn't a valid value.")) if key == "prefix": for char in ["`", "\\"]: if char in value: return await ctx.send(embed = Tools.error("Prefix contains unsupported characters.")) if len(value) > 10: return await ctx.send(embed = Tools.error("Prefixes cannot be longer than 10 characters.")) _db["prefix"] = value text = f"The prefix in this server has been set to ``{value}``." elif key == "nsfw": if value: if not "nsfw-enabled" in _db["tags"]: _db["tags"].append("nsfw-enabled") else: if "nsfw-enabled" in _db["tags"]: _db["tags"].remove("nsfw-enabled") text = f"NSFW has been set to ``{value}``." elif key == "levels": if value: if not "levels-enabled" in _db["tags"]: _db["tags"].append("levels-enabled") else: if "levels-enabled" in _db["tags"]: _db["tags"].remove("levels-enabled") text = f"Leveling has been set to ``{value}``." elif key == "joinleave": if not isinstance(value, str): return await ctx.send(embed = Tools.error("That isn't a valid value.")) try: id = int(value) except: try: id = int(value.split("<#")[1].split(">")[0]) except: return await ctx.send(embed = Tools.error("That isn't a valid value.")) channel = self.bot.get_channel(id) if not channel: return await ctx.send(embed = Tools.error("That isn't a valid channel ID.")) _db["data"]["joinleave_channel"] = channel.id text = f"The join/leave channel has been set to #{channel.name}" elif key == "autorole": if value.lower() in ["remove", "reset"]: _db["data"]["autorole"] = None text = "The autorole for this server has been reset." else: if value.startswith("<@&") and value.endswith(">"): value = value.replace("<", "").replace(">", "").replace("@", "").replace("&", "") else: if value.startswith("@"): value = value[1:] role = discord.utils.get(ctx.guild.roles, name = value) if not role: return await ctx.send(embed = Tools.error("Couldn't find that role; check your capitalization. You can't use IDs here.")) value = role.id role = discord.utils.get(ctx.guild.roles, id = int(value)) _db["data"]["autorole"] = role.id text = "This server's autorole has been set to @" + role.name _db["data"]["last_updated"] = str(date.today()) open("db/guilds", "w").write(dumps(db, indent = 4)) embed = Embed(title = text, color = 0x126bf1) embed.set_author(name = " | Settings", icon_url = self.bot.user.avatar_url) embed.set_footer(text = f" | Set by {ctx.author}.", icon_url = ctx.author.avatar_url) return await ctx.send(embed = embed) # Link to bot def setup(bot): bot.add_cog(Settings(bot))
6,506
1,950
import torch from .mcts import MCTS, Node from .utils import select_action def test(config, model, episodes, device, render): model.to(device) model.eval() test_reward = 0 env = config.new_game() with torch.no_grad(): for ep_i in range(episodes): done = False ep_reward = 0 obs = env.reset() while not done: if render: env.render() root = Node(0) obs = torch.FloatTensor(obs).to(config.device).unsqueeze(0) root.expand(env.to_play(), env.legal_actions(), model.initial_inference(obs)) MCTS(config).run(root, env.action_history(), model) action = select_action(root, temperature=1, deterministic=True) obs, reward, done, info = env.step(action.index) ep_reward += reward test_reward += ep_reward return test_reward / episodes
970
286
# coding=utf-8 import torch class RotateRectangleDCNFeatureCalibration(torch.nn.Module): def __init__(self): torch.nn.Module.__init__(self) pass
167
58
import math class MySuperBall: x=0 y=0 radius=0 speed=0 counter=0 previousBall=None vector = 1 def render (self): noStroke () fill (200 , 100) ellipse (self.x,self.y,self.radius , self.radius ) stroke (10) strokeWeight (2) if self.previousBall!=None: print(self.previousBall) line (self.x,self.y, self.previousBall.x, self.previousBall.y) noStroke () fill (0) ellipse (self.x,self.y ,6 ,6) def upDate (self): self.counter += self.speed * self.vector /500 self.y = 250 + sin( self.counter ) *200 if ( self.counter > TWO_PI ): self.vector = self.vector *( -1) ballArray_one=[] ballArray_two=[] def setup (): size (500 , 500) smooth () myInit () def myInit (): global ballArray_one, ballArray_two number = 125 step = float (width) / float( number ) ballArray_one = list(range(number)) for i in range(len(ballArray_one)): tmp_obj = MySuperBall () variable = random (0 ,5) tmp_obj .x = variable + step *i tmp_obj .y = random ( -100 ,100) + 250 tmp_obj . radius = variable *10 + 5 tmp_obj . speed = random (0.2 , 10) if i > 0: tmp_obj . previousBall = ballArray_one [i -1] ballArray_one [i] = tmp_obj ballArray_two = ballArray_one def draw (): global ballArray_one background (50) for curentBall in ballArray_one: curentBall . upDate () curentBall . render () def keyPressed (): if key == 'a': myInit () if key == 'q': ballArray_two [0]. radius = 300 if key == 's': saveFrame ("myProcessing.png")
1,824
693
import asyncio import logging import discord from carim_discord_bot import managed_service, config from carim_discord_bot.discord_client import discord_service log = logging.getLogger(__name__) class MemberCountService(managed_service.ManagedService): async def handle_message(self, message: managed_service.Message): pass async def service(self): while True: await asyncio.sleep(10 * 60) await self.update_member_count() async def update_member_count(self): if config.get().discord_member_count_channel_id: client: discord.Client = discord_service.get_service_manager().client if not client or not client.is_ready(): log.warning('client not ready') return channel: discord.VoiceChannel = client.get_channel( config.get().discord_member_count_channel_id) count = channel.guild.member_count discord_member_count_string = config.get().discord_member_count_format.format(count=count) await channel.edit(name=discord_member_count_string) log.info(f'Update member count: {discord_member_count_string}') service = None def get_service_manager(): global service if not service: service = MemberCountService() return service
1,340
372
class EarlyStop: def __init__(self, patience=5): self._patience = patience self._min_loss = None self._counter = -1 def __call__(self, loss): if self._min_loss is None: self._counter = 0 self._min_loss = loss return False if self._min_loss > loss: self._min_loss = loss self._counter = 0 else: self._counter += 1 return self._counter >= self._patience
491
141
#!/usr/bin/env python3 from GAReport import GAReport VIEW_ID = 'PutViewIDHere' DIMENSIONS = ["Page", ] METRICS = ["Pageviews", "Unique Pageviews", "Avg. Time on Page", "Entrances", "Bounce Rate", "% Exit", "Page Value"] # Use these instructions for creating single and multiple filters: https://developers.google.com/analytics/devguides/reporting/core/v3/reference#filters FILTERS= "ga:pagePath=~news" report = GAReport(startdate="yesterday", enddate="yesterday", viewID=VIEW_ID, dimensions=DIMENSIONS, metrics=METRICS, filters=FILTERS) print(report.df.head(3))
567
204
from resnet import resnet18_,resnet34_,resnet50_,resnet101_, resnet152_ from keras.layers import Input, Dense, Lambda,Dropout,Conv2D,Activation,Bidirectional,GlobalAveragePooling1D,\ BatchNormalization,Reshape from keras_layer_normalization import LayerNormalization from keras.layers.cudnn_recurrent import CuDNNGRU,CuDNNLSTM from keras.models import Model from keras import backend as K from keras.regularizers import l2 from keras.constraints import unit_norm from keras.utils import multi_gpu_model from keras.optimizers import Adam import losses as ls import VLAD as vd """ ========================= Layers ========================= """ def SQUEEZE(axis=3, name=None): return Lambda(lambda x: K.squeeze(x,axis=axis),name=name) def EXPAND(axis=3,name=None): return Lambda(lambda x: K.expand_dims(x, axis=axis),name=name) def BN(name=None): return BatchNormalization(name=name) def LN(name=None): return LayerNormalization(name=name) def DS(hidden,activation,rgr=l2(1e-4),use_bias=True,name=None): return Dense(hidden, activation=activation, use_bias=use_bias, kernel_initializer='he_normal', kernel_regularizer=rgr, bias_regularizer=rgr, name=name) def BIGRU(hidden,seq=True,rgr=l2(1e-4),name=None): return Bidirectional(CuDNNGRU(hidden, return_sequences=seq, kernel_regularizer=rgr, bias_regularizer=rgr), merge_mode='concat', name=name) def DP(rate,name=None): return Dropout(rate,name=name) """ ========================= ctc constructors ========================= """ def ctc_lambda_func(args): y_pred, labels, input_length, label_length = args return K.ctc_batch_cost(labels, y_pred, input_length, label_length) def ctc_module(ctc_pred,max_label_len): ctc_input_len = Input(shape=[1], dtype='int32', name='x_ctc_in_len') ctc_label_len = Input(shape=[1], dtype='int32', name='x_ctc_out_len') ctc_labels = Input([max_label_len], dtype='float32', name='x_ctc_label') ctc_loss = Lambda(ctc_lambda_func, output_shape=(1,), name='y_ctc_loss')\ ([ctc_pred, ctc_labels, ctc_input_len, ctc_label_len]) return ctc_loss,ctc_labels, ctc_input_len, ctc_label_len """ ========================= NetVLAD ========================= """ def vlad(x, aggregation, vlad_clusters, ghost_clusters): weight_decay = 1e-4 if aggregation == 'vlad': x_k_center = Conv2D(vlad_clusters, (1, 1), strides=(1, 1), kernel_initializer='orthogonal', use_bias=True, trainable=True, kernel_regularizer=l2(weight_decay), bias_regularizer=l2(weight_decay), name='vlad_center_assignment')(x) x = vd.VladPooling(k_centers=vlad_clusters, mode='vlad', name='vlad_pool')([x, x_k_center]) elif aggregation == 'gvlad': x_k_center = Conv2D(vlad_clusters + ghost_clusters, (1, 1), strides=(1, 1), kernel_initializer='orthogonal', use_bias=True, trainable=True, kernel_regularizer=l2(weight_decay), bias_regularizer=l2(weight_decay), name='gvlad_center_assignment')(x) x = vd.VladPooling(k_centers=vlad_clusters, g_centers=ghost_clusters, mode='gvlad', name='gvlad_pool')( [x, x_k_center]) return x """ ========================= AR Module ========================= """ def integration(x, hidden_dim=256, mto='avg', vlad_clusters=8, ghost_clusters=2): if mto== 'avg': x = GlobalAveragePooling1D(name="AR_MERGE")(x) elif mto== 'bigru': x = BIGRU(hidden_dim, seq=False, name="AR_MERGE")(x) elif mto in ['vlad', 'gvlad']: x = EXPAND(axis=1)(x) x = vlad(x, aggregation=mto, vlad_clusters=vlad_clusters, ghost_clusters=ghost_clusters) else: print("Please specify avg/bigru/vlad/gvlad ..") exit(1) return x def disc_loss(x, accent_label, accent_classes, loss, margin, name): if loss == "softmax": y = DS(accent_classes, activation='softmax', use_bias=False, name=name)(x) elif loss == "sphereface": y = ls.SphereFace(n_classes=accent_classes, m=margin, name=name)([x, accent_label]) elif loss == "cosface": y = ls.CosFace(n_classes=accent_classes, m=margin, name=name)([x, accent_label]) elif loss == "arcface": y = ls.ArcFace(n_classes=accent_classes, m=margin, name=name)([x, accent_label]) elif loss == "circleloss": y = Lambda(lambda x: K.l2_normalize(x, 1))(x) y = Dense(accent_classes, activation=None, use_bias=False, kernel_constraint=unit_norm(), name=name)(y) else: return return y """ ========================= Model ========================= """ def build( inputs, outputs, raw=None, name="model"): model = Model(inputs=inputs, outputs=outputs, name=name) model.summary() if raw: print("===== init weights from:%s =====" % raw) model.load_weights(raw, by_name=True, skip_mismatch=True) return model def compile(model, gpus, lr, loss, loss_weights, metrics): if gpus>1: model_ = multi_gpu_model(model, gpus=gpus) else: model_ = model model_.compile(optimizer=Adam(lr,decay=2e-4), loss=loss, loss_weights=loss_weights, metrics=metrics) return model_ def SAR_Net(input_shape, ctc_enable = False, ar_enable = True, disc_enable = False, res_type="res18", res_filters=64, hidden_dim=256, bn_dim=0, bpe_classes=1000, accent_classes=8, max_ctc_len=72, mto=None, vlad_clusters=8, ghost_clusters=2, metric_loss='cosface', margin=0.3, raw_model=None, lr=0.01, gpus = 1, mode="train", name=None): # ========================= # INPUT (2D Spectrogram) # ========================= if mode=="train": inputs = Input(shape=input_shape,name="x_data") else: inputs = Input(shape=[None,input_shape[1],input_shape[2]], name="x_data") if disc_enable: disc_labels = Input(shape=(accent_classes,), name="x_accent") # ============================== # SHARED ENCODER (Res + BiGRU) # ============================== if res_type == "res18": cnn = resnet18_(inputs, filters=res_filters) elif res_type == "res34": cnn = resnet34_(inputs, filters=res_filters) elif res_type == "res50": cnn = resnet50_(inputs, filters=res_filters) elif res_type == "res101": cnn = resnet101_(inputs, filters=res_filters) elif res_type == "res152": cnn = resnet152_(inputs, filters=res_filters) else: print("======= ERROR: please specify cnn in res-[18,34,50,101,152] ======") cnn = Reshape([-1,K.int_shape(cnn)[-1]],name="CNN2SEQ")(cnn) cnn = DS(hidden_dim, activation='tanh', name="CNN_LIN")(cnn) cnn = LN(name="CNN_LIN_LN")(cnn) crnn = BIGRU(hidden_dim, name="CRNN")(cnn) crnn = LN(name="CRNN_LN")(crnn) # ========================= # ASR Branch # ========================= if ctc_enable: asr = crnn asr = BIGRU(hidden_dim, name="CTC_BIGRU")(asr) asr = LN(name="CTC_BIGRU_LN")(asr) asr = DS(hidden_dim, activation='tanh', name='CTC_DS')(asr) asr = LN(name='CTC_DS_LN')(asr) ctc_pred = DS(bpe_classes, activation="softmax", name='ctc_pred')(asr) ctc_loss, ctc_labels, ctc_input_len, ctc_label_len = ctc_module(ctc_pred, max_ctc_len) # ========================= # AR Branch # ========================= if ar_enable: # ========================= # AR Branch: Integration # ========================= ar = DS(hidden_dim,activation='tanh',name='AR_DS')(crnn) ar = LN(name='AR_DS_LN')(ar) ar = integration(ar, hidden_dim=hidden_dim, mto=mto, vlad_clusters=vlad_clusters, ghost_clusters=ghost_clusters) ar = BN(name='AR_BN1')(ar) # ar = DP(0.5,name="AR_DP")(ar) ar = DS(hidden_dim, activation=None, name="AR_EMBEDDING")(ar) # Global Feature ar = BN(name='AR_BN2')(ar) # ======================================= # AR Branch: Classification # ======================================= ar1 = DS(64, activation='relu',name="AR_CF_DS1")(ar) ar1 = DS(64, activation='relu',name="AR_CF_DS2")(ar1) ar1 = DS(accent_classes, activation='softmax', name='y_accent')(ar1) # =================================== # AR Branch: Discriminative loss # =================================== if disc_enable: ar2 = disc_loss(ar, accent_label=disc_labels, accent_classes=accent_classes, loss=metric_loss, margin=margin, name="y_disc") # ========================================== # AR Branch: Visual BottleNeck feature (*) # ========================================== if disc_enable and bn_dim: bn = DS(64, activation='relu',name="AR_BN_DS")(ar) bn = BN(name='AR_BN3')(bn) bn = DS(bn_dim, activation=None, name="bottleneck")(bn) bn = BN(name='AR_BN4')(bn) bn = disc_loss(bn, accent_label=disc_labels, accent_classes=accent_classes, loss=metric_loss, margin=margin, name="y_disc_bn") # ============================== # Model # ============================== input_set = [inputs] output_set = [] if ar_enable: output_set += [ar1] if disc_enable: input_set += [disc_labels] output_set += [ar2] if ctc_enable: input_set += [ctc_labels, ctc_input_len, ctc_label_len] output_set += [ctc_loss] if bn_dim: output_set += [bn] model = build(inputs=input_set,outputs=output_set,raw=raw_model,name=name) # ============================== # Compile # ============================== loss = {} loss_weights = {} metrics = {} alpha = 0.4 beta = 0.01 if ar_enable: loss["y_accent"] = 'categorical_crossentropy' loss_weights["y_accent"] = beta if disc_enable else 1.0 metrics["y_accent"] = "accuracy" if disc_enable: loss["y_disc"] = 'categorical_crossentropy' if metric_loss != 'circleloss' \ else lambda y, x: ls.circle_loss(y, x, gamma=256, margin=margin) loss_weights["y_disc"] = 1-alpha if ctc_enable else 1.0 metrics["y_disc"] = "accuracy" if ctc_enable: loss["y_ctc_loss"] = lambda y_true, y_pred: y_pred loss_weights["y_ctc_loss"] = alpha if disc_enable else 1.0 loss_weights["y_ctc_loss"] = 1-alpha if not disc_enable else beta if bn_dim: loss["y_disc_bn"] = 'categorical_crossentropy' if metrics != 'circleloss' \ else lambda y, x: ls.circle_loss(y, x, gamma=256, margin=margin) loss_weights["y_disc_bn"] = 0.1 metrics['y_disc_bn'] = 'accuracy' train_model = compile(model,gpus,lr=lr,loss=loss,loss_weights=loss_weights,metrics=metrics) print(loss_weights) return model,train_model """ ====================== OTHER ====================== """ def sub_model(model,input_name,output_name): inputs = model.get_layer(name=input_name).input outputs = model.get_layer(name=output_name).output return Model(inputs=inputs, outputs=outputs) def ctc_pred(model,x,batch_size,input_len,): pred = model.predict(x,batch_size=batch_size) input_len = K.constant([input_len]*len(pred),dtype="int32") decoded = K.ctc_decode(pred, input_len, greedy=True, beam_width=100, top_paths=1) return K.get_value(decoded[0][0]) if __name__=="__main__": model,train_model = SAR_Net(input_shape=(1200,80,1), ctc_enable = True, ar_enable = True, disc_enable = True, res_type="res18", res_filters=32, hidden_dim=256, bn_dim=0, bpe_classes=1000, accent_classes=8, max_ctc_len=72, mto='vlad', vlad_clusters=8, ghost_clusters=2, metric_loss='cosface', margin=0.3, raw_model=None, lr=0.01, gpus = 1, name=None) sub_model(model,'x_data','y_accent') model.save_weights('exp/demo.h5') model.load_weights('exp/demo.h5')
14,525
4,980
from .literals import booleans from .models import (readable_web_streams, web_streams, writeable_web_streams) from .paths import web_url_strings
187
53
# -*- coding: utf-8 -*- import os cudaid = 0 os.environ["CUDA_VISIBLE_DEVICES"] = str(cudaid) import sys import time import numpy as np import cPickle as pickle import copy import random from random import shuffle import math import torch import torch.nn as nn from torch.autograd import Variable import data as datar from model import * from utils_pg import * from configs import * cfg = DeepmindConfigs() TRAINING_DATASET_CLS = DeepmindTraining TESTING_DATASET_CLS = DeepmindTesting def print_basic_info(modules, consts, options): if options["is_debugging"]: print "\nWARNING: IN DEBUGGING MODE\n" if options["has_learnable_w2v"]: print "USE LEARNABLE W2V EMBEDDING" if options["is_bidirectional"]: print "USE BI-DIRECTIONAL RNN" if options["has_lvt_trick"]: print "USE LVT TRICK" if options["omit_eos"]: print "<eos> IS OMITTED IN TESTING DATA" if options["prediction_bytes_limitation"]: print "MAXIMUM BYTES IN PREDICTION IS LIMITED" for k in consts: print k + ":", consts[k] def init_modules(): init_seeds() options = {} options["is_debugging"] = False options["is_predicting"] = False options["cuda"] = cfg.CUDA and torch.cuda.is_available() options["device"] = torch.device("cuda" if options["cuda"] else "cpu") options["cell"] = cfg.CELL options["copy"] = cfg.COPY options["coverage"] = cfg.COVERAGE assert TRAINING_DATASET_CLS.IS_UNICODE == TESTING_DATASET_CLS.IS_UNICODE options["is_unicode"] = TRAINING_DATASET_CLS.IS_UNICODE options["has_y"] = TRAINING_DATASET_CLS.HAS_Y options["has_lvt_trick"] = False options["has_learnable_w2v"] = True options["is_bidirectional"] = True options["beam_decoding"] = True # False for greedy decoding options["omit_eos"] = False # omit <eos> and continuously decode until length of sentence reaches MAX_LEN_PREDICT (for DUC testing data) options["prediction_bytes_limitation"] = False if TESTING_DATASET_CLS.MAX_BYTE_PREDICT == None else True assert options["is_unicode"] == False consts = {} consts["idx_gpu"] = cudaid consts["dim_x"] = cfg.DIM_X consts["dim_y"] = cfg.DIM_Y consts["len_x"] = cfg.MAX_LEN_X + 1 # plus 1 for eos consts["len_y"] = cfg.MAX_LEN_Y + 1 consts["num_x"] = cfg.MAX_NUM_X consts["num_y"] = cfg.NUM_Y consts["hidden_size"] = cfg.HIDDEN_SIZE consts["lvt_dict_size"] = 200 if options["is_debugging"] else cfg.LVT_DICT_SIZE consts["batch_size"] = 5 if options["is_debugging"] else TRAINING_DATASET_CLS.BATCH_SIZE if options["is_debugging"]: consts["testing_batch_size"] = 1 if options["beam_decoding"] else 2 else: #consts["testing_batch_size"] = 1 if options["beam_decoding"] else TESTING_DATASET_CLS.BATCH_SIZE consts["testing_batch_size"] = TESTING_DATASET_CLS.BATCH_SIZE consts["min_len_predict"] = TESTING_DATASET_CLS.MIN_LEN_PREDICT consts["max_len_predict"] = TESTING_DATASET_CLS.MAX_LEN_PREDICT consts["max_byte_predict"] = TESTING_DATASET_CLS.MAX_BYTE_PREDICT consts["testing_print_size"] = TESTING_DATASET_CLS.PRINT_SIZE consts["top_k"] = 1 consts["lr"] = 0.15 consts["beam_size"] = 4 consts["max_epoch"] = 300 if options["is_debugging"] else 30 consts["num_model"] = 1 consts["print_time"] = 5 consts["save_epoch"] = 1 assert consts["dim_x"] == consts["dim_y"] assert consts["top_k"] <= cfg.MIN_NUM_X assert consts["beam_size"] >= 1 if options["has_lvt_trick"]: assert consts["lvt_dict_size"] != None assert consts["testing_batch_size"] <= consts["batch_size"] assert consts["lvt_dict_size"] <= cfg.NUM_FREQUENT_WORDS modules = {} [_, dic, hfw, w2i, i2w, w2w] = pickle.load(open(cfg.cc.TRAINING_DATA_PATH + "dic.pkl", "r")) consts["dict_size"] = len(dic) modules["dic"] = dic modules["w2i"] = w2i modules["i2w"] = i2w if options["has_lvt_trick"]: modules["freq_words"] = hfw modules["lfw_emb"] = modules["w2i"][cfg.W_UNK] modules["eos_emb"] = modules["w2i"][cfg.W_EOS] consts["pad_token_idx"] = modules["w2i"][cfg.W_PAD] return modules, consts, options def greedy_decode(flist, batch, model, modules, consts, options): testing_batch_size = len(flist) dec_result = [[] for i in xrange(testing_batch_size)] existence = [True] * testing_batch_size num_left = testing_batch_size word_emb, dec_state, x_mask, y, len_y = batch next_y = torch.LongTensor(np.ones((1, testing_batch_size), dtype="int64")).cuda() for step in xrange(consts["max_len_predict"]): if num_left == 0: break y_pred, dec_state = model.decode_once(next_y, word_emb, dec_state, x_mask) dict_size = y_pred.shape[-1] y_pred = y_pred.view(testing_batch_size, dict_size) dec_state = dec_state.view(testing_batch_size, dec_state.shape[-1]) next_y = torch.argmax(y_pred, 1).view((1, testing_batch_size)) for idx_doc in xrange(testing_batch_size): if existence[idx_doc] == False: continue idx_max = next_y[0, idx_doc].item() if options["has_lvt_trick"]: idx_max = lvt_i2i[idx_max] next_y[0, idx_doc] = idx_max if idx_max == modules["eos_emb"]: existence[idx_doc] = False num_left -= 1 else: dec_result[idx_doc].append(str(idx_max)) if options["prediction_bytes_limitation"]: for i in xrange(len(dec_result)): sample = dec_result[i] b = 0 for j in xrange(len(sample)): b += len(sample[j]) if b > consts["max_byte_predict"]: dec_result[i] = dec_result[i][0 : j] break for idx_doc in xrange(testing_batch_size): fname = str(flist[idx_doc]) if len(dec_result[idx_doc]) >= consts["min_len_predict"]: write_summ("".join((cfg.cc.SUMM_PATH, fname)), dec_result[idx_doc], 1, options) write_summ("".join((cfg.cc.BEAM_SUMM_PATH, fname)), dec_result[idx_doc], 1, options, modules["i2w"]) if options["has_y"]: ly = len_y[idx_doc] y_true = y[0 : ly, idx_doc].tolist() y_true = [str(i) for i in y_true[:-1]] # delete <eos> write_summ("".join((cfg.cc.GROUND_TRUTH_PATH, fname)), y_true, 1, options) write_summ("".join((cfg.cc.BEAM_GT_PATH, fname)), y_true, 1, options, modules["i2w"]) def beam_decode(fname, batch, model, modules, consts, options): fname = str(fname) beam_size = consts["beam_size"] num_live = 1 num_dead = 0 samples = [] sample_scores = np.zeros(beam_size) last_traces = [[]] last_scores = torch.FloatTensor(np.zeros(1)).cuda() last_states = [] x, word_emb, dec_state, x_mask, y, len_y, ref_sents, max_ext_len, oovs = batch next_y = torch.LongTensor(-np.ones((1, num_live, 1), dtype="int64")).cuda() x = x.unsqueeze(1) word_emb = word_emb.unsqueeze(1) x_mask = x_mask.unsqueeze(1) dec_state = dec_state.unsqueeze(0) if options["cell"] == "lstm": dec_state = (dec_state, dec_state) for step in xrange(consts["max_len_predict"]): tile_word_emb = word_emb.repeat(1, num_live, 1) tile_x_mask = x_mask.repeat(1, num_live, 1) tile_x = x.repeat(1, num_live) y_pred, dec_state = model.decode_once(tile_x, next_y, tile_word_emb, dec_state, tile_x_mask, max_ext_len) dict_size = y_pred.shape[-1] y_pred = y_pred.view(num_live, dict_size) if options["cell"] == "lstm": dec_state = (dec_state[0].view(num_live, dec_state[0].shape[-1]), dec_state[1].view(num_live, dec_state[1].shape[-1])) else: dec_state = dec_state.view(num_live, dec_state.shape[-1]) cand_scores = last_scores + torch.log(y_pred) # 分数最大越好 cand_scores = cand_scores.flatten() idx_top_joint_scores = torch.topk(cand_scores, beam_size - num_dead)[1] idx_last_traces = idx_top_joint_scores / dict_size idx_word_now = idx_top_joint_scores % dict_size top_joint_scores = cand_scores[idx_top_joint_scores] traces_now = [] scores_now = np.zeros((beam_size - num_dead)) states_now = [] for i, [j, k] in enumerate(zip(idx_last_traces, idx_word_now)): if options["has_lvt_trick"]: traces_now.append(last_traces[j] + [batch.lvt_i2i[k]]) else: traces_now.append(last_traces[j] + [k]) scores_now[i] = copy.copy(top_joint_scores[i]) if options["cell"] == "lstm": states_now.append((copy.copy(dec_state[0][j, :]), copy.copy(dec_state[1][j, :]))) else: states_now.append(copy.copy(dec_state[j, :])) num_live = 0 last_traces = [] last_scores = [] last_states = [] for i in xrange(len(traces_now)): if traces_now[i][-1] == modules["eos_emb"] and len(traces_now[i]) >= consts["min_len_predict"]: samples.append([str(e.item()) for e in traces_now[i][:-1]]) sample_scores[num_dead] = scores_now[i] num_dead += 1 else: last_traces.append(traces_now[i]) last_scores.append(scores_now[i]) last_states.append(states_now[i]) num_live += 1 if num_live == 0 or num_dead >= beam_size: break last_scores = torch.FloatTensor(np.array(last_scores).reshape((num_live, 1))).cuda() next_y = np.array([e[-1] for e in last_traces], dtype = "int64").reshape((1, num_live)) next_y = torch.LongTensor(next_y).cuda() if options["cell"] == "lstm": h_states = [] c_states = [] for state in last_states: h_states.append(state[0]) c_states.append(state[1]) dec_state = (torch.stack(h_states).view((num_live, h_states[0].shape[-1])),\ torch.stack(c_states).view((num_live, c_states[0].shape[-1]))) else: dec_state = torch.stack(last_states).view((num_live, dec_state.shape[-1])) assert num_live + num_dead == beam_size if num_live > 0: for i in xrange(num_live): samples.append([str(e.item()) for e in last_traces[i]]) sample_scores[num_dead] = last_scores[i] num_dead += 1 #weight by length for i in xrange(len(sample_scores)): sent_len = float(len(samples[i])) sample_scores[i] = sample_scores[i] #* math.exp(-sent_len / 10) idx_sorted_scores = np.argsort(sample_scores) # 低分到高分 if options["has_y"]: ly = len_y[0] y_true = y[0 : ly].tolist() y_true = [str(i) for i in y_true[:-1]] # delete <eos> sorted_samples = [] sorted_scores = [] filter_idx = [] for e in idx_sorted_scores: if len(samples[e]) >= consts["min_len_predict"]: filter_idx.append(e) if len(filter_idx) == 0: filter_idx = idx_sorted_scores for e in filter_idx: sorted_samples.append(samples[e]) sorted_scores.append(sample_scores[e]) num_samples = len(sorted_samples) if len(sorted_samples) == 1: sorted_samples = sorted_samples[0] num_samples = 1 if options["prediction_bytes_limitation"]: for i in xrange(len(sorted_samples)): sample = sorted_samples[i] b = 0 for j in xrange(len(sample)): b += len(sample[j]) if b > consts["max_byte_predict"]: sorted_samples[i] = sorted_samples[i][0 : j] break dec_words = [modules["i2w"][int(e)] for e in sorted_samples[-1]] # for rouge write_for_rouge(fname, ref_sents, dec_words, cfg) # beam search history write_summ("".join((cfg.cc.BEAM_SUMM_PATH, fname)), sorted_samples, num_samples, options, modules["i2w"], sorted_scores) write_summ("".join((cfg.cc.BEAM_GT_PATH, fname)), y_true, 1, options, modules["i2w"]) #print "=================" def beam_decode_copy(fname, batch, model, modules, consts, options): fname = str(fname) beam_size = consts["beam_size"] num_live = 1 num_dead = 0 samples = [] sample_scores = np.zeros(beam_size) last_traces = [[]] last_scores = torch.FloatTensor(np.zeros(1)).cuda() last_states = [] x, word_emb, dec_state, x_mask, y, len_y, ref_sents, max_ext_len, oovs = batch next_y = torch.LongTensor(-np.ones((1, num_live, 1), dtype="int64")).cuda() x = x.unsqueeze(1) word_emb = word_emb.unsqueeze(1) x_mask = x_mask.unsqueeze(1) dec_state = dec_state.unsqueeze(0) if options["cell"] == "lstm": dec_state = (dec_state, dec_state) for step in xrange(consts["max_len_predict"]): tile_word_emb = word_emb.repeat(1, num_live, 1) tile_x_mask = x_mask.repeat(1, num_live, 1) tile_x = x.repeat(1, num_live) y_pred, dec_state = model.decode_once(tile_x, next_y, tile_word_emb, dec_state, tile_x_mask, max_ext_len) dict_size = y_pred.shape[-1] y_pred = y_pred.view(num_live, dict_size) if options["cell"] == "lstm": dec_state = (dec_state[0].view(num_live, dec_state[0].shape[-1]), dec_state[1].view(num_live, dec_state[1].shape[-1])) else: dec_state = dec_state.view(num_live, dec_state.shape[-1]) cand_scores = last_scores + torch.log(y_pred) # 分数最大越好 cand_scores = cand_scores.flatten() idx_top_joint_scores = torch.topk(cand_scores, beam_size - num_dead)[1] idx_last_traces = idx_top_joint_scores / dict_size idx_word_now = idx_top_joint_scores % dict_size top_joint_scores = cand_scores[idx_top_joint_scores] traces_now = [] scores_now = np.zeros((beam_size - num_dead)) states_now = [] for i, [j, k] in enumerate(zip(idx_last_traces, idx_word_now)): traces_now.append(last_traces[j] + [k]) scores_now[i] = copy.copy(top_joint_scores[i]) if options["cell"] == "lstm": states_now.append((copy.copy(dec_state[0][j, :]), copy.copy(dec_state[1][j, :]))) else: states_now.append(copy.copy(dec_state[j, :])) num_live = 0 last_traces = [] last_scores = [] last_states = [] for i in xrange(len(traces_now)): if traces_now[i][-1] == modules["eos_emb"] and len(traces_now[i]) >= consts["min_len_predict"]: samples.append([str(e.item()) for e in traces_now[i][:-1]]) sample_scores[num_dead] = scores_now[i] num_dead += 1 else: last_traces.append(traces_now[i]) last_scores.append(scores_now[i]) last_states.append(states_now[i]) num_live += 1 if num_live == 0 or num_dead >= beam_size: break last_scores = torch.FloatTensor(np.array(last_scores).reshape((num_live, 1))).cuda() next_y = [] for e in last_traces: eid = e[-1].item() if eid in modules["i2w"]: next_y.append(eid) else: next_y.append(modules["lfw_emb"]) # unk next_y = np.array(next_y).reshape((1, num_live)) next_y = torch.LongTensor(next_y).cuda() if options["cell"] == "lstm": h_states = [] c_states = [] for state in last_states: h_states.append(state[0]) c_states.append(state[1]) dec_state = (torch.stack(h_states).view((num_live, h_states[0].shape[-1])),\ torch.stack(c_states).view((num_live, c_states[0].shape[-1]))) else: dec_state = torch.stack(last_states).view((num_live, dec_state.shape[-1])) assert num_live + num_dead == beam_size if num_live > 0: for i in xrange(num_live): samples.append([str(e.item()) for e in last_traces[i]]) sample_scores[num_dead] = last_scores[i] num_dead += 1 #weight by length for i in xrange(len(sample_scores)): sent_len = float(len(samples[i])) sample_scores[i] = sample_scores[i] #* math.exp(-sent_len / 10) idx_sorted_scores = np.argsort(sample_scores) # 低分到高分 if options["has_y"]: ly = len_y[0] y_true = y[0 : ly].tolist() y_true = [str(i) for i in y_true[:-1]] # delete <eos> sorted_samples = [] sorted_scores = [] filter_idx = [] for e in idx_sorted_scores: if len(samples[e]) >= consts["min_len_predict"]: filter_idx.append(e) if len(filter_idx) == 0: filter_idx = idx_sorted_scores for e in filter_idx: sorted_samples.append(samples[e]) sorted_scores.append(sample_scores[e]) num_samples = len(sorted_samples) if len(sorted_samples) == 1: sorted_samples = sorted_samples[0] num_samples = 1 if options["prediction_bytes_limitation"]: for i in xrange(len(sorted_samples)): sample = sorted_samples[i] b = 0 for j in xrange(len(sample)): b += len(sample[j]) if b > consts["max_byte_predict"]: sorted_samples[i] = sorted_samples[i][0 : j] break dec_words = [] for e in sorted_samples[-1]: e = int(e) if e in modules["i2w"]: dec_words.append(modules["i2w"][e]) else: dec_words.append(oovs[e - len(modules["i2w"])]) # for rouge write_for_rouge(fname, ref_sents, dec_words, cfg) # beam search history write_summ_copy("".join((cfg.cc.BEAM_SUMM_PATH, fname)), sorted_samples, num_samples, options, modules["i2w"], oovs, sorted_scores) write_summ_copy("".join((cfg.cc.BEAM_GT_PATH, fname)), y_true, 1, options, modules["i2w"], oovs) #print "=================" def predict(model, modules, consts, options): print "start predicting," options["has_y"] = TESTING_DATASET_CLS.HAS_Y if options["beam_decoding"]: print "using beam search" else: print "using greedy search" rebuild_dir(cfg.cc.BEAM_SUMM_PATH) rebuild_dir(cfg.cc.BEAM_GT_PATH) rebuild_dir(cfg.cc.GROUND_TRUTH_PATH) rebuild_dir(cfg.cc.SUMM_PATH) print "loading test set..." xy_list = pickle.load(open(cfg.cc.TESTING_DATA_PATH + "ibm.pkl", "r")) batch_list, num_files, num_batches = datar.batched(len(xy_list), options, consts) print "num_files = ", num_files, ", num_batches = ", num_batches running_start = time.time() partial_num = 0 total_num = 0 si = 0 for idx_batch in xrange(num_batches): test_idx = batch_list[idx_batch] batch_raw = [xy_list[xy_idx] for xy_idx in test_idx] batch = datar.get_data(batch_raw, modules, consts, options) x, len_x, x_mask, y, len_y, y_mask, oy, x_ext, y_ext, oovs = sort_samples(batch.x, batch.len_x, \ batch.x_mask, batch.y, batch.len_y, batch.y_mask, \ batch.original_summarys, batch.x_ext, batch.y_ext, batch.x_ext_words) word_emb, dec_state = model.encode(torch.LongTensor(x).cuda(), torch.LongTensor(len_x).cuda(), torch.FloatTensor(x_mask).cuda()) if options["beam_decoding"]: for idx_s in xrange(word_emb.size(1)): inputx = (torch.LongTensor(x_ext[:, idx_s]).cuda(), word_emb[:, idx_s, :], dec_state[idx_s, :],\ torch.FloatTensor(x_mask[:, idx_s, :]).cuda(), y[:, idx_s], [len_y[idx_s]], oy[idx_s],\ batch.max_ext_len, oovs[idx_s]) beam_decode_copy(si, inputx, model, modules, consts, options) si += 1 else: inputx = (word_emb, dec_state, torch.FloatTensor(x_mask).cuda(), y, len_y) greedy_decode(test_idx, inputx, model, modules, consts, options) testing_batch_size = len(test_idx) partial_num += testing_batch_size total_num += testing_batch_size if partial_num >= consts["testing_print_size"]: print total_num, "summs are generated" partial_num = 0 print si, total_num def run(existing_model_name = None): modules, consts, options = init_modules() #use_gpu(consts["idx_gpu"]) if options["is_predicting"]: need_load_model = True training_model = False predict_model = True else: need_load_model = False training_model = True predict_model = False print_basic_info(modules, consts, options) if training_model: print "loading train set..." if options["is_debugging"]: xy_list = pickle.load(open(cfg.cc.TESTING_DATA_PATH + "test.pkl", "r")) else: xy_list = pickle.load(open(cfg.cc.TRAINING_DATA_PATH + "train.pkl", "r")) batch_list, num_files, num_batches = datar.batched(len(xy_list), options, consts) print "num_files = ", num_files, ", num_batches = ", num_batches running_start = time.time() if True: #TODO: refactor print "compiling model ..." model = Model(modules, consts, options) #criterion = nn.NLLLoss(ignore_index=consts["pad_token_idx"]) if options["cuda"]: model.cuda() #criterion.cuda() #model = nn.DataParallel(model) optimizer = torch.optim.Adagrad(model.parameters(), lr=consts["lr"], initial_accumulator_value=0.1) model_name = "cnndm.s2s" existing_epoch = 0 if need_load_model: if existing_model_name == None: existing_model_name = "cnndm.s2s.gpu5.epoch5.5" print "loading existed model:", existing_model_name model, optimizer = load_model(cfg.cc.MODEL_PATH + existing_model_name, model, optimizer) if training_model: print "start training model " print_size = num_files / consts["print_time"] if num_files >= consts["print_time"] else num_files last_total_error = float("inf") print "max epoch:", consts["max_epoch"] for epoch in xrange(0, consts["max_epoch"]): ''' if not options["is_debugging"] and epoch == 5: consts["lr"] *= 0.1 #adjust for param_group in optimizer.param_groups: param_group['lr'] = consts["lr"] ''' print "epoch: ", epoch + existing_epoch num_partial = 1 total_error = 0.0 partial_num_files = 0 epoch_start = time.time() partial_start = time.time() # shuffle the trainset batch_list, num_files, num_batches = datar.batched(len(xy_list), options, consts) used_batch = 0. for idx_batch in xrange(num_batches): train_idx = batch_list[idx_batch] batch_raw = [xy_list[xy_idx] for xy_idx in train_idx] if len(batch_raw) != consts["batch_size"]: continue local_batch_size = len(batch_raw) batch = datar.get_data(batch_raw, modules, consts, options) x, len_x, x_mask, y, len_y, y_mask, oy, x_ext, y_ext, oovs = sort_samples(batch.x, batch.len_x, \ batch.x_mask, batch.y, batch.len_y, batch.y_mask, \ batch.original_summarys, batch.x_ext, batch.y_ext, batch.x_ext_words) model.zero_grad() y_pred, cost = model(torch.LongTensor(x).cuda(), torch.LongTensor(len_x).cuda(),\ torch.LongTensor(y).cuda(), torch.FloatTensor(x_mask).cuda(), \ torch.FloatTensor(y_mask).cuda(), torch.LongTensor(x_ext).cuda(), torch.LongTensor(y_ext).cuda(), \ batch.max_ext_len, None) cost.backward() torch.nn.utils.clip_grad_norm_(model.parameters(), 5) optimizer.step() cost = cost.item() total_error += cost used_batch += 1 partial_num_files += consts["batch_size"] if partial_num_files / print_size == 1 and idx_batch < num_batches: print idx_batch + 1, "/" , num_batches, "batches have been processed,", print "average cost until now:", "cost =", total_error / used_batch, ",", print "time:", time.time() - partial_start partial_num_files = 0 if not options["is_debugging"]: print "save model... ", save_model(cfg.cc.MODEL_PATH + model_name + ".gpu" + str(consts["idx_gpu"]) + ".epoch" + str(epoch / consts["save_epoch"] + existing_epoch) + "." + str(num_partial), model, optimizer) print "finished" num_partial += 1 print "in this epoch, total average cost =", total_error / used_batch, ",", print "time:", time.time() - epoch_start print_sent_dec(y_pred, y_ext, y_mask, oovs, modules, consts, options, local_batch_size) if last_total_error > total_error or options["is_debugging"]: last_total_error = total_error if not options["is_debugging"]: print "save model... ", save_model(cfg.cc.MODEL_PATH + model_name + ".gpu" + str(consts["idx_gpu"]) + ".epoch" + str(epoch / consts["save_epoch"] + existing_epoch) + "." + str(num_partial), model, optimizer) print "finished" else: print "optimization finished" break print "save final model... ", save_model(cfg.cc.MODEL_PATH + model_name + "final.gpu" + str(consts["idx_gpu"]) + ".epoch" + str(epoch / consts["save_epoch"] + existing_epoch) + "." + str(num_partial), model, optimizer) print "finished" else: print "skip training model" if predict_model: predict(model, modules, consts, options) print "Finished, time:", time.time() - running_start if __name__ == "__main__": np.set_printoptions(threshold = np.inf) existing_model_name = sys.argv[1] if len(sys.argv) > 1 else None run(existing_model_name)
27,309
9,577
# Generated by Django 2.1.5 on 2019-02-14 14:25 from django.db import migrations import imagekit.models.fields class Migration(migrations.Migration): dependencies = [ ('openbook_posts', '0015_post_community'), ] operations = [ migrations.AlterField( model_name='postimage', name='image', field=imagekit.models.fields.ProcessedImageField(null=True, upload_to='', verbose_name='image'), ), ]
471
155
from collections import MutableMapping, MutableSet, namedtuple from operator import itemgetter class Node(namedtuple("Node", "value, left, right, red")): __slots__ = () def size(self): """ Recursively find size of a tree. Slow. """ if self is NULL: return 0 return 1 + self.left.size() + self.right.size() def find(self, value, key): """ Find a value in a node, using a key function. """ while self is not NULL: direction = cmp(key(value), key(self.value)) if direction < 0: self = self.left elif direction > 0: self = self.right elif direction == 0: return self.value def find_prekeyed(self, value, key): """ Find a value in a node, using a key function. The value is already a key. """ while self is not NULL: direction = cmp(value, key(self.value)) if direction < 0: self = self.left elif direction > 0: self = self.right elif direction == 0: return self.value def rotate_left(self): """ Rotate the node to the left. """ right = self.right new = self._replace(right=self.right.left, red=True) top = right._replace(left=new, red=self.red) return top def rotate_right(self): """ Rotate the node to the right. """ left = self.left new = self._replace(left=self.left.right, red=True) top = left._replace(right=new, red=self.red) return top def flip(self): """ Flip colors of a node and its children. """ left = self.left._replace(red=not self.left.red) right = self.right._replace(red=not self.right.red) top = self._replace(left=left, right=right, red=not self.red) return top def balance(self): """ Balance a node. The balance is inductive and relies on all subtrees being balanced recursively or by construction. If the subtrees are not balanced, then this will not fix them. """ # Always lean left with red nodes. if self.right.red: self = self.rotate_left() # Never permit red nodes to have red children. Note that if the left-hand # node is NULL, it will short-circuit and fail this test, so we don't have # to worry about a dereference here. if self.left.red and self.left.left.red: self = self.rotate_right() # Finally, move red children on both sides up to the next level, reducing # the total redness. if self.left.red and self.right.red: self = self.flip() return self def insert(self, value, key): """ Insert a value into a tree rooted at the given node, and return whether this was an insertion or update. Balances the tree during insertion. An update is performed instead of an insertion if a value in the tree compares equal to the new value. """ # Base case: Insertion into the empty tree is just creating a new node # with no children. if self is NULL: return Node(value, NULL, NULL, True), True # Recursive case: Insertion into a non-empty tree is insertion into # whichever of the two sides is correctly compared. direction = cmp(key(value), key(self.value)) if direction < 0: left, insertion = self.left.insert(value, key) self = self._replace(left=left) elif direction > 0: right, insertion = self.right.insert(value, key) self = self._replace(right=right) elif direction == 0: # Exact hit on an existing node (this node, in fact). In this # case, perform an update. self = self._replace(value=value) insertion = False # And balance on the way back up. return self.balance(), insertion def move_red_left(self): """ Shuffle red to the left of a tree. """ self = self.flip() if self.right is not NULL and self.right.left.red: self = self._replace(right=self.right.rotate_right()) self = self.rotate_left().flip() return self def move_red_right(self): """ Shuffle red to the right of a tree. """ self = self.flip() if self.left is not NULL and self.left.left.red: self = self.rotate_right().flip() return self def delete_min(self): """ Delete the left-most value from a tree. """ # Base case: If there are no nodes lesser than this node, then this is the # node to delete. if self.left is NULL: return NULL, self.value # Acquire more reds if necessary to continue the traversal. The # double-deep check is fine because NULL is red. if not self.left.red and not self.left.left.red: self = self.move_red_left() # Recursive case: Delete the minimum node of all nodes lesser than this # node. left, value = self.left.delete_min() self = self._replace(left=left) return self.balance(), value def delete_max(self): """ Delete the right-most value from a tree. """ # Attempt to rotate left-leaning reds to the right. if self.left.red: self = self.rotate_right() # Base case: If there are no selfs greater than this self, then this is # the self to delete. if self.right is NULL: return NULL, self.value # Acquire more reds if necessary to continue the traversal. NULL is # red so this check doesn't need to check for NULL. if not self.right.red and not self.right.left.red: self = self.move_red_right() # Recursive case: Delete the maximum self of all selfs greater than this # self. right, value = self.right.delete_max() self = self._replace(right=right) return self.balance(), value def delete(self, value, key): """ Delete a value from a tree. """ # Base case: The empty tree cannot possibly have the desired value. if self is NULL: raise KeyError(value) direction = cmp(key(value), key(self.value)) # Because we lean to the left, the left case stands alone. if direction < 0: if (not self.left.red and self.left is not NULL and not self.left.left.red): self = self.move_red_left() # Delete towards the left. left = self.left.delete(value, key) self = self._replace(left=left) else: # If we currently lean to the left, lean to the right for now. if self.left.red: self = self.rotate_right() # Best case: The node on our right (which we just rotated there) is a # red link and also we were just holding the node to delete. In that # case, we just rotated NULL into our current node, and the node to # the right is the lone matching node to delete. if direction == 0 and self.right is NULL: return NULL # No? Okay. Move more reds to the right so that we can continue to # traverse in that direction. At *this* spot, we do have to confirm # that node.right is not NULL... if (not self.right.red and self.right is not NULL and not self.right.left.red): self = self.move_red_right() if direction > 0: # Delete towards the right. right = self.right.delete(value, key) self = self._replace(right=right) else: # Annoying case: The current node was the node to delete all # along! Use a right-handed minimum deletion. First find the # replacement value to rebuild the current node with, then delete # the replacement value from the right-side tree. Finally, create # the new node with the old value replaced and the replaced value # deleted. rnode = self.right while rnode is not NULL: rnode = rnode.left right, replacement = self.right.delete_min() self = self._replace(value=replacement, right=right) return self.balance() NULL = Node(None, None, None, False) class BJ(MutableSet): """ A red-black tree. Blackjacks are based on traditional self-balancing tree theory, and have logarithmic time and space bounds on all mutations in the worst case, and linear bounds on iteration. Blackjacks are mutable sets. See ``collections.MutableSet`` for a precise definition of what this class is capable of. Iteration on blackjacks is always ordered according to the key function used to create the blackjack. In addition to the standard methods, blackjacks can also pop their minimum and maximum values easily, and the ``find()`` method can retrieve the stored value for a key value. """ root = NULL _len = 0 def __init__(self, iterable=None, key=None): if key is None: self._key = lambda v: v else: self._key = key if iterable is not None: for item in iterable: self.add(item) def __repr__(self): return "BJ([%s])" % ", ".join(repr(i) for i in self) def __contains__(self, value): return self.root.find(value, self._key) is not None def __len__(self): return self._len def __iter__(self): node = self.root stack = [] while stack or node is not NULL: if node is not NULL: stack.append(node) node = node.left else: node = stack.pop() yield node.value node = node.right def add(self, value): self.root, insertion = self.root.insert(value, self._key) self._len += insertion def discard(self, value): self.root = self.root.delete(value, self._key) self._len -= 1 def find(self, value): """ Find the actual stored value for a given key value. """ return self.root.find(value, self._key) def pop_max(self): """ Remove the maximum value and return it. """ if self.root is NULL: raise KeyError("pop from an empty blackjack") self.root, value = self.root.delete_max() self._len -= 1 return value def pop_min(self): """ Remove the minimum value and return it. """ if self.root is NULL: raise KeyError("pop from an empty blackjack") self.root, value = self.root.delete_min() self._len -= 1 return value class Deck(MutableMapping): """ A mutable mapping based on a blackjack. Like blackjacks, decks are powered by red-black trees and have the same bounds on operations. """ def __init__(self, mapping=None): self._bj = BJ(mapping, key=itemgetter(0)) def __repr__(self): return "Deck({%s})" % ", ".join("%r: %r" % i for i in self.iteritems()) def __len__(self): return len(self._bj) def __iter__(self): return self.iterkeys() def __getitem__(self, key): # Messy. value = self._bj.root.find_prekeyed(key, self._bj._key) if value is None: raise KeyError(key) return value[1] def __setitem__(self, key, value): self._bj.add((key, value)) def __delitem__(self, key): # Blah. Just do it. value = self[key] self._bj.discard((key, value)) def iteritems(self): return iter(self._bj) def iterkeys(self): for k, v in self.iteritems(): yield k def itervalues(self): for k, v in self.iteritems(): yield v from unittest import TestCase class TestTrees(TestCase): def test_balance_right(self): node = Node(1, NULL, Node(2, NULL, NULL, True), False) balanced = Node(2, Node(1, NULL, NULL, True), NULL, False) self.assertEqual(node.balance(), balanced) def test_balance_four(self): node = Node(2, Node(1, NULL, NULL, True), Node(3, NULL, NULL, True), False) balanced = Node(2, Node(1, NULL, NULL, False), Node(3, NULL, NULL, False), True) self.assertEqual(node.balance(), balanced) def test_balance_left_four(self): node = Node(3, Node(2, Node(1, NULL, NULL, True), NULL, True), NULL, False) balanced = Node(2, Node(1, NULL, NULL, False), Node(3, NULL, NULL, False), True) self.assertEqual(node.balance(), balanced) class TestBlackjack(TestCase): def test_len_single(self): bj = BJ([1]) self.assertEqual(1, len(bj)) def test_len_many(self): bj = BJ(range(10)) self.assertEqual(10, len(bj)) def test_len_many_duplicate(self): bj = BJ(range(10)) bj.add(0) bj.add(5) bj.add(9) self.assertEqual(10, len(bj)) def test_len_after_discard(self): bj = BJ(range(10)) bj.discard(0) self.assertEqual(9, len(bj)) def test_contains_single(self): bj = BJ([1]) self.assertTrue(1 in bj) def test_contains_several(self): bj = BJ([1, 2, 3]) self.assertTrue(1 in bj) self.assertTrue(2 in bj) self.assertTrue(3 in bj) def test_iter_single(self): l = [1] bj = BJ(l) self.assertEqual(list(iter(bj)), l) def test_iter_several(self): l = range(10) bj = BJ(l) self.assertEqual(list(iter(bj)), l) def test_discard(self): bj = BJ([1]) bj.discard(1) self.assertTrue(1 not in bj) def test_discard_missing_empty(self): bj = BJ() self.assertRaises(KeyError, bj.discard, 2) def test_discard_missing(self): bj = BJ([1]) self.assertRaises(KeyError, bj.discard, 2) def test_hashproof(self): """ Generate around 32MiB of numeric data and insert it into a single tree. This is a time-sensitive test that should complete in a few seconds instead of taking hours. See http://bugs.python.org/issue13703#msg150620 for context. """ g = ((x*(2**64 - 1), hash(x*(2**64 - 1))) for x in xrange(1, 10000)) bj = BJ(g) class TestDeck(TestCase): def test_get_set_single(self): d = Deck() d["test"] = "value" self.assertEqual(d["test"], "value") def test_get_set_several(self): d = Deck() d["first"] = "second" d["third"] = "fourth" d["fifth"] = "sixth" self.assertEqual(d["first"], "second") self.assertEqual(d["third"], "fourth") self.assertEqual(d["fifth"], "sixth")
15,517
4,550
from ixnetwork_restpy.base import Base from ixnetwork_restpy.files import Files class LinkOAM(Base): __slots__ = () _SDM_NAME = 'linkOAM' _SDM_ATT_MAP = { 'PacketSubtype': 'linkOAM.header.packet.subtype-1', 'PacketFlags': 'linkOAM.header.packet.flags-2', 'InformationOAMPDUCode': 'linkOAM.header.packet.pduType.informationOAMPDU.code-3', 'LocalInfoTLVType': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.localInfoTLV.type-4', 'LocalInfoTLVLength': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.localInfoTLV.length-5', 'LocalInfoTLVVersion': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.localInfoTLV.version-6', 'LocalInfoTLVRevision': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.localInfoTLV.revision-7', 'LocalInfoTLVState': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.localInfoTLV.state-8', 'LocalInfoTLVOamConfig': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.localInfoTLV.oamConfig-9', 'LocalInfoTLVOamPDUConfig': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.localInfoTLV.oamPDUConfig-10', 'LocalInfoTLVOui': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.localInfoTLV.oui-11', 'LocalInfoTLVVendorInfo': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.localInfoTLV.vendorInfo-12', 'RemoteInfoTLVType': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.remoteInfoTLV.type-13', 'RemoteInfoTLVLength': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.remoteInfoTLV.length-14', 'RemoteInfoTLVVersion': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.remoteInfoTLV.version-15', 'RemoteInfoTLVRevision': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.remoteInfoTLV.revision-16', 'RemoteInfoTLVState': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.remoteInfoTLV.state-17', 'RemoteInfoTLVOamConfig': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.remoteInfoTLV.oamConfig-18', 'RemoteInfoTLVOamPDUConfig': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.remoteInfoTLV.oamPDUConfig-19', 'RemoteInfoTLVOui': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.remoteInfoTLV.oui-20', 'RemoteInfoTLVVendorInfo': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.remoteInfoTLV.vendorInfo-21', 'OrganizationSpecificInfoTLVType': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.organizationSpecificInfoTLV.type-22', 'OrganizationSpecificInfoTLVLength': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.organizationSpecificInfoTLV.length-23', 'OrganizationSpecificInfoTLVOui': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.organizationSpecificInfoTLV.oui-24', 'OrganizationSpecificInfoTLVValueLength': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.organizationSpecificInfoTLV.valueLength-25', 'OrganizationSpecificInfoTLVValue': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.organizationSpecificInfoTLV.value-26', 'ReservedTLVType': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.reservedTLV.type-27', 'ReservedTLVLength': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.reservedTLV.length-28', 'ReservedTLVValueLength': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.reservedTLV.valueLength-29', 'ReservedTLVValue': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.reservedTLV.value-30', 'EndTLVType': 'linkOAM.header.packet.pduType.informationOAMPDU.informationTLV.tlvType.endTLV.type-31', 'LoopbackControlOAMPDUCode': 'linkOAM.header.packet.pduType.loopbackControlOAMPDU.code-32', 'LoopbackControlOAMPDULoopbackCommand': 'linkOAM.header.packet.pduType.loopbackControlOAMPDU.loopbackCommand-33', 'OrganizationSpecificOAMPDUCode': 'linkOAM.header.packet.pduType.organizationSpecificOAMPDU.code-34', 'OrganizationSpecificOAMPDUOui': 'linkOAM.header.packet.pduType.organizationSpecificOAMPDU.oui-35', 'OrganizationSpecificOAMPDUValueLength': 'linkOAM.header.packet.pduType.organizationSpecificOAMPDU.valueLength-36', 'OrganizationSpecificOAMPDUValue': 'linkOAM.header.packet.pduType.organizationSpecificOAMPDU.value-37', 'EventNotificationOAMPDUCode': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.code-38', 'EventNotificationOAMPDUSequenceNumber': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.sequenceNumber-39', 'TlvtypeEndTLVType': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.endTLV.type-40', 'ErroredSymbolPeriodEventTLVType': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredSymbolPeriodEventTLV.type-41', 'ErroredSymbolPeriodEventTLVLength': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredSymbolPeriodEventTLV.length-42', 'ErroredSymbolPeriodEventTLVTimestamp': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredSymbolPeriodEventTLV.timestamp-43', 'ErroredSymbolPeriodEventTLVSymbolWindow': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredSymbolPeriodEventTLV.symbolWindow-44', 'ErroredSymbolPeriodEventTLVSymbolThreshold': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredSymbolPeriodEventTLV.symbolThreshold-45', 'ErroredSymbolPeriodEventTLVSymbols': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredSymbolPeriodEventTLV.symbols-46', 'ErroredSymbolPeriodEventTLVErrorRunningTotal': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredSymbolPeriodEventTLV.errorRunningTotal-47', 'ErroredSymbolPeriodEventTLVEventRunningTotal': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredSymbolPeriodEventTLV.eventRunningTotal-48', 'ErroredFrameEventTLVType': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFrameEventTLV.type-49', 'ErroredFrameEventTLVLength': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFrameEventTLV.length-50', 'ErroredFrameEventTLVTimestamp': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFrameEventTLV.timestamp-51', 'ErroredFrameEventTLVFrameWindow': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFrameEventTLV.frameWindow-52', 'ErroredFrameEventTLVFrameThreshold': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFrameEventTLV.frameThreshold-53', 'ErroredFrameEventTLVFrames': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFrameEventTLV.frames-54', 'ErroredFrameEventTLVErrorRunningTotal': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFrameEventTLV.errorRunningTotal-55', 'ErroredFrameEventTLVEventRunningTotal': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFrameEventTLV.eventRunningTotal-56', 'ErroredFramesPeriodEventTLVType': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFramesPeriodEventTLV.type-57', 'ErroredFramesPeriodEventTLVLength': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFramesPeriodEventTLV.length-58', 'ErroredFramesPeriodEventTLVTimestamp': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFramesPeriodEventTLV.timestamp-59', 'ErroredFramesPeriodEventTLVFrameWindow': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFramesPeriodEventTLV.frameWindow-60', 'ErroredFramesPeriodEventTLVFrameThreshold': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFramesPeriodEventTLV.frameThreshold-61', 'ErroredFramesPeriodEventTLVFrames': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFramesPeriodEventTLV.frames-62', 'ErroredFramesPeriodEventTLVErrorRunningTotal': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFramesPeriodEventTLV.errorRunningTotal-63', 'ErroredFramesPeriodEventTLVEventRunningTotal': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFramesPeriodEventTLV.eventRunningTotal-64', 'ErroredFramesSecondsSummaryEventTLVType': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFramesSecondsSummaryEventTLV.type-65', 'ErroredFramesSecondsSummaryEventTLVLength': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFramesSecondsSummaryEventTLV.length-66', 'ErroredFramesSecondsSummaryEventTLVTimestamp': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFramesSecondsSummaryEventTLV.timestamp-67', 'ErroredFramesSecondsSummaryEventTLVFrameSecondsWindow': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFramesSecondsSummaryEventTLV.frameSecondsWindow-68', 'ErroredFramesSecondsSummaryEventTLVFrameSecondsThreshold': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFramesSecondsSummaryEventTLV.frameSecondsThreshold-69', 'ErroredFramesSecondsSummaryEventTLVFrameSeconds': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFramesSecondsSummaryEventTLV.frameSeconds-70', 'ErroredFramesSecondsSummaryEventTLVErrorRunningTotal': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFramesSecondsSummaryEventTLV.errorRunningTotal-71', 'ErroredFramesSecondsSummaryEventTLVEventRunningTotal': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.erroredFramesSecondsSummaryEventTLV.eventRunningTotal-72', 'OrganizationSpecificEventTLVType': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.organizationSpecificEventTLV.type-73', 'OrganizationSpecificEventTLVLength': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.organizationSpecificEventTLV.length-74', 'OrganizationSpecificEventTLVOui': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.organizationSpecificEventTLV.oui-75', 'OrganizationSpecificEventTLVValueLength': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.organizationSpecificEventTLV.valueLength-76', 'OrganizationSpecificEventTLVValue': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.organizationSpecificEventTLV.value-77', 'TlvtypeReservedTLVType': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.reservedTLV.type-78', 'TlvtypeReservedTLVLength': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.reservedTLV.length-79', 'TlvtypeReservedTLVValueLength': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.reservedTLV.valueLength-80', 'TlvtypeReservedTLVValue': 'linkOAM.header.packet.pduType.eventNotificationOAMPDU.eventTVL.tlvType.reservedTLV.value-81', 'VariableRequestOAMPDUCode': 'linkOAM.header.packet.pduType.variableRequestOAMPDU.code-82', 'VariableDescriptorBranch': 'linkOAM.header.packet.pduType.variableRequestOAMPDU.descriptors.tlvType.variableDescriptor.branch-83', 'VariableDescriptorLeaf': 'linkOAM.header.packet.pduType.variableRequestOAMPDU.descriptors.tlvType.variableDescriptor.leaf-84', 'EndDescriptorEndOfDescriptor': 'linkOAM.header.packet.pduType.variableRequestOAMPDU.descriptors.tlvType.endDescriptor.endOfDescriptor-85', 'VariableResponseOAMPDUCode': 'linkOAM.header.packet.pduType.variableResponseOAMPDU.code-86', 'VariableContainerBranch': 'linkOAM.header.packet.pduType.variableResponseOAMPDU.containers.tlvType.variableContainer.branch-87', 'VariableContainerLeaf': 'linkOAM.header.packet.pduType.variableResponseOAMPDU.containers.tlvType.variableContainer.leaf-88', 'VariableContainerWidth': 'linkOAM.header.packet.pduType.variableResponseOAMPDU.containers.tlvType.variableContainer.width-89', 'VariableContainerValueLength': 'linkOAM.header.packet.pduType.variableResponseOAMPDU.containers.tlvType.variableContainer.valueLength-90', 'VariableContainerValue': 'linkOAM.header.packet.pduType.variableResponseOAMPDU.containers.tlvType.variableContainer.value-91', 'EndContainerEndOfContainer': 'linkOAM.header.packet.pduType.variableResponseOAMPDU.containers.tlvType.endContainer.endOfContainer-92', 'HeaderFcs': 'linkOAM.header.fcs-93', } def __init__(self, parent, list_op=False): super(LinkOAM, self).__init__(parent, list_op) @property def PacketSubtype(self): """ Display Name: Sub Type Default Value: 0x03 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['PacketSubtype'])) @property def PacketFlags(self): """ Display Name: Flags Default Value: 0x0000 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['PacketFlags'])) @property def InformationOAMPDUCode(self): """ Display Name: Code Default Value: 0x00 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['InformationOAMPDUCode'])) @property def LocalInfoTLVType(self): """ Display Name: Type Default Value: 0x01 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LocalInfoTLVType'])) @property def LocalInfoTLVLength(self): """ Display Name: Length Default Value: 0x10 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LocalInfoTLVLength'])) @property def LocalInfoTLVVersion(self): """ Display Name: OAM Version Default Value: 0x01 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LocalInfoTLVVersion'])) @property def LocalInfoTLVRevision(self): """ Display Name: Revision Default Value: 0 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LocalInfoTLVRevision'])) @property def LocalInfoTLVState(self): """ Display Name: State Default Value: 0x01 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LocalInfoTLVState'])) @property def LocalInfoTLVOamConfig(self): """ Display Name: OAM Configuration Default Value: 0x00 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LocalInfoTLVOamConfig'])) @property def LocalInfoTLVOamPDUConfig(self): """ Display Name: OAMPDU Configuration Default Value: 0x5DC Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LocalInfoTLVOamPDUConfig'])) @property def LocalInfoTLVOui(self): """ Display Name: OUI Default Value: 0x000100 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LocalInfoTLVOui'])) @property def LocalInfoTLVVendorInfo(self): """ Display Name: Vendor Specific Information Default Value: 0x00000000 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LocalInfoTLVVendorInfo'])) @property def RemoteInfoTLVType(self): """ Display Name: Type Default Value: 0x02 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['RemoteInfoTLVType'])) @property def RemoteInfoTLVLength(self): """ Display Name: Length Default Value: 0x10 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['RemoteInfoTLVLength'])) @property def RemoteInfoTLVVersion(self): """ Display Name: OAM Version Default Value: 0x01 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['RemoteInfoTLVVersion'])) @property def RemoteInfoTLVRevision(self): """ Display Name: Revision Default Value: 0 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['RemoteInfoTLVRevision'])) @property def RemoteInfoTLVState(self): """ Display Name: State Default Value: 0x01 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['RemoteInfoTLVState'])) @property def RemoteInfoTLVOamConfig(self): """ Display Name: OAM Configuration Default Value: 0x00 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['RemoteInfoTLVOamConfig'])) @property def RemoteInfoTLVOamPDUConfig(self): """ Display Name: OAMPDU Configuration Default Value: 0x5DC Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['RemoteInfoTLVOamPDUConfig'])) @property def RemoteInfoTLVOui(self): """ Display Name: OUI Default Value: 0x000100 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['RemoteInfoTLVOui'])) @property def RemoteInfoTLVVendorInfo(self): """ Display Name: Vendor Specific Information Default Value: 0x00000000 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['RemoteInfoTLVVendorInfo'])) @property def OrganizationSpecificInfoTLVType(self): """ Display Name: Type Default Value: 0xFE Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['OrganizationSpecificInfoTLVType'])) @property def OrganizationSpecificInfoTLVLength(self): """ Display Name: Length Default Value: 0 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['OrganizationSpecificInfoTLVLength'])) @property def OrganizationSpecificInfoTLVOui(self): """ Display Name: Organizationally Unique Identifier Default Value: 0x000100 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['OrganizationSpecificInfoTLVOui'])) @property def OrganizationSpecificInfoTLVValueLength(self): """ Display Name: Organization Specific Value Len Default Value: 1 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['OrganizationSpecificInfoTLVValueLength'])) @property def OrganizationSpecificInfoTLVValue(self): """ Display Name: Organization Specific Value Default Value: 0x00 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['OrganizationSpecificInfoTLVValue'])) @property def ReservedTLVType(self): """ Display Name: Type Default Value: 0xFF Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ReservedTLVType'])) @property def ReservedTLVLength(self): """ Display Name: Length Default Value: 1 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ReservedTLVLength'])) @property def ReservedTLVValueLength(self): """ Display Name: Value Len Default Value: 1 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ReservedTLVValueLength'])) @property def ReservedTLVValue(self): """ Display Name: Value Default Value: 0x00 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ReservedTLVValue'])) @property def EndTLVType(self): """ Display Name: end_of_tlv Default Value: 0x00 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EndTLVType'])) @property def LoopbackControlOAMPDUCode(self): """ Display Name: Code Default Value: 0x04 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LoopbackControlOAMPDUCode'])) @property def LoopbackControlOAMPDULoopbackCommand(self): """ Display Name: Loopback Command Default Value: 1 Value Format: decimal Available enum values: Enable OAM Remote Loopback, 1, Disable OAM Remote Loopback, 2 """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['LoopbackControlOAMPDULoopbackCommand'])) @property def OrganizationSpecificOAMPDUCode(self): """ Display Name: Code Default Value: 0xFE Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['OrganizationSpecificOAMPDUCode'])) @property def OrganizationSpecificOAMPDUOui(self): """ Display Name: OUI Default Value: 0x000100 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['OrganizationSpecificOAMPDUOui'])) @property def OrganizationSpecificOAMPDUValueLength(self): """ Display Name: Length Default Value: 1 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['OrganizationSpecificOAMPDUValueLength'])) @property def OrganizationSpecificOAMPDUValue(self): """ Display Name: Value Default Value: 0x00 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['OrganizationSpecificOAMPDUValue'])) @property def EventNotificationOAMPDUCode(self): """ Display Name: Code Default Value: 0x01 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EventNotificationOAMPDUCode'])) @property def EventNotificationOAMPDUSequenceNumber(self): """ Display Name: Sequence Number Default Value: 0 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EventNotificationOAMPDUSequenceNumber'])) @property def TlvtypeEndTLVType(self): """ Display Name: end_of_tlv Default Value: 0x00 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['TlvtypeEndTLVType'])) @property def ErroredSymbolPeriodEventTLVType(self): """ Display Name: Type Default Value: 0x01 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredSymbolPeriodEventTLVType'])) @property def ErroredSymbolPeriodEventTLVLength(self): """ Display Name: Length Default Value: 0x28 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredSymbolPeriodEventTLVLength'])) @property def ErroredSymbolPeriodEventTLVTimestamp(self): """ Display Name: Time Stamp Default Value: 0x0000 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredSymbolPeriodEventTLVTimestamp'])) @property def ErroredSymbolPeriodEventTLVSymbolWindow(self): """ Display Name: Symbol Window Default Value: 10 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredSymbolPeriodEventTLVSymbolWindow'])) @property def ErroredSymbolPeriodEventTLVSymbolThreshold(self): """ Display Name: Symbol Threshold Default Value: 1 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredSymbolPeriodEventTLVSymbolThreshold'])) @property def ErroredSymbolPeriodEventTLVSymbols(self): """ Display Name: Symbols Default Value: 0 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredSymbolPeriodEventTLVSymbols'])) @property def ErroredSymbolPeriodEventTLVErrorRunningTotal(self): """ Display Name: Error Running Total Default Value: 0 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredSymbolPeriodEventTLVErrorRunningTotal'])) @property def ErroredSymbolPeriodEventTLVEventRunningTotal(self): """ Display Name: Event Running Total Default Value: 0 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredSymbolPeriodEventTLVEventRunningTotal'])) @property def ErroredFrameEventTLVType(self): """ Display Name: Type Default Value: 0x02 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFrameEventTLVType'])) @property def ErroredFrameEventTLVLength(self): """ Display Name: Length Default Value: 0x1A Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFrameEventTLVLength'])) @property def ErroredFrameEventTLVTimestamp(self): """ Display Name: Time Stamp Default Value: 0x0000 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFrameEventTLVTimestamp'])) @property def ErroredFrameEventTLVFrameWindow(self): """ Display Name: Frame Window Default Value: 1 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFrameEventTLVFrameWindow'])) @property def ErroredFrameEventTLVFrameThreshold(self): """ Display Name: Frame Threshold Default Value: 1 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFrameEventTLVFrameThreshold'])) @property def ErroredFrameEventTLVFrames(self): """ Display Name: Frames Default Value: 1 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFrameEventTLVFrames'])) @property def ErroredFrameEventTLVErrorRunningTotal(self): """ Display Name: Error Running Total Default Value: 0 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFrameEventTLVErrorRunningTotal'])) @property def ErroredFrameEventTLVEventRunningTotal(self): """ Display Name: Event Running Total Default Value: 0 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFrameEventTLVEventRunningTotal'])) @property def ErroredFramesPeriodEventTLVType(self): """ Display Name: Type Default Value: 0x03 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFramesPeriodEventTLVType'])) @property def ErroredFramesPeriodEventTLVLength(self): """ Display Name: Length Default Value: 0x1C Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFramesPeriodEventTLVLength'])) @property def ErroredFramesPeriodEventTLVTimestamp(self): """ Display Name: Time Stamp Default Value: 0x0000 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFramesPeriodEventTLVTimestamp'])) @property def ErroredFramesPeriodEventTLVFrameWindow(self): """ Display Name: Frame Window Default Value: 10 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFramesPeriodEventTLVFrameWindow'])) @property def ErroredFramesPeriodEventTLVFrameThreshold(self): """ Display Name: Frame Threshold Default Value: 1 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFramesPeriodEventTLVFrameThreshold'])) @property def ErroredFramesPeriodEventTLVFrames(self): """ Display Name: Frames Default Value: 1 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFramesPeriodEventTLVFrames'])) @property def ErroredFramesPeriodEventTLVErrorRunningTotal(self): """ Display Name: Error Running Total Default Value: 0 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFramesPeriodEventTLVErrorRunningTotal'])) @property def ErroredFramesPeriodEventTLVEventRunningTotal(self): """ Display Name: Event Running Total Default Value: 0 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFramesPeriodEventTLVEventRunningTotal'])) @property def ErroredFramesSecondsSummaryEventTLVType(self): """ Display Name: Type Default Value: 0x04 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFramesSecondsSummaryEventTLVType'])) @property def ErroredFramesSecondsSummaryEventTLVLength(self): """ Display Name: Length Default Value: 0x12 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFramesSecondsSummaryEventTLVLength'])) @property def ErroredFramesSecondsSummaryEventTLVTimestamp(self): """ Display Name: Time Stamp Default Value: 0x0000 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFramesSecondsSummaryEventTLVTimestamp'])) @property def ErroredFramesSecondsSummaryEventTLVFrameSecondsWindow(self): """ Display Name: Frame Seconds Summary Window Default Value: 60 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFramesSecondsSummaryEventTLVFrameSecondsWindow'])) @property def ErroredFramesSecondsSummaryEventTLVFrameSecondsThreshold(self): """ Display Name: Frame Seconds Summary Threshold Default Value: 1 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFramesSecondsSummaryEventTLVFrameSecondsThreshold'])) @property def ErroredFramesSecondsSummaryEventTLVFrameSeconds(self): """ Display Name: Frame Seconds Summary Default Value: 1 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFramesSecondsSummaryEventTLVFrameSeconds'])) @property def ErroredFramesSecondsSummaryEventTLVErrorRunningTotal(self): """ Display Name: Error Running Total Default Value: 0 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFramesSecondsSummaryEventTLVErrorRunningTotal'])) @property def ErroredFramesSecondsSummaryEventTLVEventRunningTotal(self): """ Display Name: Event Running Total Default Value: 0 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['ErroredFramesSecondsSummaryEventTLVEventRunningTotal'])) @property def OrganizationSpecificEventTLVType(self): """ Display Name: Type Default Value: 0xFE Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['OrganizationSpecificEventTLVType'])) @property def OrganizationSpecificEventTLVLength(self): """ Display Name: Length Default Value: 0x00 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['OrganizationSpecificEventTLVLength'])) @property def OrganizationSpecificEventTLVOui(self): """ Display Name: Organizationally Unique Identifier Default Value: 0x000100 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['OrganizationSpecificEventTLVOui'])) @property def OrganizationSpecificEventTLVValueLength(self): """ Display Name: Organization Specific Value Len Default Value: 1 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['OrganizationSpecificEventTLVValueLength'])) @property def OrganizationSpecificEventTLVValue(self): """ Display Name: Organization Specific Value Default Value: 0x00 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['OrganizationSpecificEventTLVValue'])) @property def TlvtypeReservedTLVType(self): """ Display Name: Type Default Value: 0xFF Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['TlvtypeReservedTLVType'])) @property def TlvtypeReservedTLVLength(self): """ Display Name: Length Default Value: 1 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['TlvtypeReservedTLVLength'])) @property def TlvtypeReservedTLVValueLength(self): """ Display Name: Value Len Default Value: 1 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['TlvtypeReservedTLVValueLength'])) @property def TlvtypeReservedTLVValue(self): """ Display Name: Value Default Value: 0x00 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['TlvtypeReservedTLVValue'])) @property def VariableRequestOAMPDUCode(self): """ Display Name: Code Default Value: 0x02 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['VariableRequestOAMPDUCode'])) @property def VariableDescriptorBranch(self): """ Display Name: Variable Branch Default Value: 0x07 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['VariableDescriptorBranch'])) @property def VariableDescriptorLeaf(self): """ Display Name: Variable Leaf Default Value: 0x0002 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['VariableDescriptorLeaf'])) @property def EndDescriptorEndOfDescriptor(self): """ Display Name: end_of_descriptor Default Value: 0x00 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EndDescriptorEndOfDescriptor'])) @property def VariableResponseOAMPDUCode(self): """ Display Name: Code Default Value: 0x03 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['VariableResponseOAMPDUCode'])) @property def VariableContainerBranch(self): """ Display Name: Variable Branch Default Value: 0x07 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['VariableContainerBranch'])) @property def VariableContainerLeaf(self): """ Display Name: Variable Leaf Default Value: 0x0002 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['VariableContainerLeaf'])) @property def VariableContainerWidth(self): """ Display Name: Variable Width Default Value: 0x01 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['VariableContainerWidth'])) @property def VariableContainerValueLength(self): """ Display Name: Variable Value Len Default Value: 1 Value Format: decimal """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['VariableContainerValueLength'])) @property def VariableContainerValue(self): """ Display Name: Variable Value Default Value: 0x00 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['VariableContainerValue'])) @property def EndContainerEndOfContainer(self): """ Display Name: end_of_container Default Value: 0x00 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['EndContainerEndOfContainer'])) @property def HeaderFcs(self): """ Display Name: Frame Check Sequence CRC-32 Default Value: 0 Value Format: hex """ from ixnetwork_restpy.multivalue import Multivalue return Multivalue(self, self._get_attribute(self._SDM_ATT_MAP['HeaderFcs'])) def add(self): return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
45,052
13,813
from flask import current_app from flask_wtf import FlaskForm from flask_wtf.file import FileField, FileAllowed from wtforms import StringField, PasswordField, SubmitField, BooleanField from wtforms.validators import DataRequired, Length, Email, EqualTo, ValidationError from flask_login import current_user from memos.models.User import User from memos.models.Memo import Memo from memos.models.MemoFile import MemoFile from memos.models.MemoSignature import MemoSignature class RegistrationForm(FlaskForm): username = StringField('Username', validators=[DataRequired(), Length(min=2, max=20)]) email = StringField('Email', validators=[DataRequired(), Email()]) password = PasswordField('Password', validators=[DataRequired()]) confirm_password = PasswordField('Confirm Password', validators=[DataRequired(), EqualTo('password')]) submit = SubmitField('Sign Up') def validate_username(self, username): user = User.query.filter_by(username=username.data).first() if user: raise ValidationError('That username is taken. Please choose a different one.') def validate_email(self, email): user = User.query.filter_by(email=email.data).first() if user: raise ValidationError('That email is taken. Please choose a different one.') class LoginForm(FlaskForm): email = StringField('Email', validators=[DataRequired(), Email()]) password = PasswordField('Password', validators=[DataRequired()]) remember = BooleanField('Remember Me') submit = SubmitField('Login') class UpdateAccountForm(FlaskForm): username = StringField('Username',render_kw={}) email = StringField('Email', validators=[DataRequired(), Email()],render_kw={}) delegates = StringField('Delegates', validators=[],render_kw={}) admin = BooleanField('Admin', default=False, false_values=('False', 'false', ''),render_kw={}) readAll = BooleanField('Read All', default=False, false_values=('False', 'false', ''),render_kw={}) subscriptions = StringField('Subscriptions',render_kw={}) pagesize = StringField('Page Size',render_kw={}) picture = FileField('Update Profile Picture', validators=[FileAllowed(['jpg', 'png'])]) submit = SubmitField('Update') def validate_username(self, username): if username.data != current_user.username: user = User.query.filter_by(username=username.data).first() if user: raise ValidationError('That username is taken. Please choose a different one.') def validate_email(self, email): if email.data != current_user.email: user = User.query.filter_by(email=email.data).first() if user: raise ValidationError('That email is taken. Please choose a different one.') def validate_subscriptions(self,subscriptions): users = User.valid_usernames(subscriptions.data) if len(users['invalid_usernames']) > 0: raise ValidationError(f'Invalid users {users["invalid_usernames"]}') class RequestResetForm(FlaskForm): """ This function """ email = StringField('Email', validators=[DataRequired(), Email()]) submit = SubmitField('Request Password Reset') def validate_email(self, email): user = User.query.filter_by(email=email.data).first() if user is None: raise ValidationError('There is no account with that email. You must register first.') class ResetPasswordForm(FlaskForm): password = PasswordField('Password', validators=[DataRequired()]) confirm_password = PasswordField('Confirm Password', validators=[DataRequired(), EqualTo('password')]) submit = SubmitField('Reset Password')
3,967
1,048
import rsa class RSA: @classmethod def generate_keys(cls, size: int = 512) -> tuple: return rsa.newkeys(size) @classmethod def export_key_pkcs1(cls, public_key: rsa.PublicKey, format: str = "PEM") -> bytes: return rsa.PublicKey.save_pkcs1(public_key, format=format) @classmethod def load_key_pkcs1(cls, public_key_pem: bytes) -> rsa.PublicKey: return rsa.PublicKey.load_pkcs1(public_key_pem) @classmethod def sign_message(cls, message: bytes, private_key: rsa.PrivateKey, algorithm: str = "SHA-1") -> bytes: return rsa.sign(message, private_key, algorithm)
626
226
import paramiko,time,sys,json,os,pandas ######################################################################################################################## ################################################### parms ############################################################# proxy = None Port = 22 Username = open('').read() #put username in txt file Pwd = open('').read() #put password in txt file Host = '' keys= '' #file with ssh keys sudo_user = '' #optional parameter fill in if using sudo option in function must be passed as full command ie: sudo su - user path = '' download_from = "" download_to = "" ## put commands one line at a time ## listofcommands=f''' ''' ######################################################################################################################## def exec_remote_cmds(commands,waittime,sudo = None): client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) client.connect(hostname=Host, username=Username, password=Pwd, port=Port, key_filename=keys) #instantiate connection shell = client.invoke_shell() if sudo != None : shell.send(sudo) time.sleep(waittime) receive_buffer = shell.recv(1024) receive_buffer = "" shell.send(commands) status = shell.recv_ready() cmple = [] return_cursor_item = None page = 0 time.sleep(1) while return_cursor_item != '$': #status ==False : time.sleep(1) output = shell.recv(1024).decode("utf-8") for i in output.split(';',) : cmple.append(''.join(s for s in i )) print (output) #print("Page :", page) return_cursor = [s for s in output.splitlines()][-1].strip() ## needed for custom exit subroutine since paramiko hangs the session return_cursor_item = [l for l in return_cursor][len([l for l in return_cursor])-1] ## needed for custom exit subroutine since paramiko hangs the session status+= shell.recv_ready() page +=1 print("Pages Read:",page) #for i in cmple: print(i.replace('[01','').replace('\n','')) def download_remote_file(remotepath:str,localpath:str,waittime:int,sudo:str = None): client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) client.connect(hostname=Host, username=Username, password=Pwd, port=Port, key_filename=keys) #instantiate connection shell = client.invoke_shell() if sudo != None : shell.send(sudo) time.sleep(waittime) receive_buffer = shell.recv(1024) sftp = client.open_sftp() sftp.get(remotepath,localpath) while not os.path.exists(localpath): time.sleep(waittime) sftp.close() def write_file_to_remote(remotepath:str,localpath:str,waittime:int,sudo:str = None): client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) client.connect(hostname=Host, username=Username, password=Pwd, port=Port, key_filename=keys) #instantiate connection shell = client.invoke_shell() if sudo != None : shell.send(sudo) time.sleep(waittime) receive_buffer = shell.recv(1024) sftp = client.open_sftp() sftp.put(localpath,remotepath) while not os.path.exists(remotepath): time.sleep(waittime) sftp.close() exec_remote_cmds(listofcommands,1,sudo_user)#sudo user is option must by passed as full sudo su - if used #write_file_to_remote(download_from,download_to,1,sudo_user) #download_remote_file(download_to,download_from,1)
3,566
1,080
import importlib import logging from volttron.platform.agent import utils _log = logging.getLogger(__name__) utils.setup_logging() __version__ = "0.1" __all__ = ['Model'] class Model(object): def __init__(self, config, **kwargs): base_module = "volttron.pnnl.models." try: model_type = config["model_type"] except KeyError as e: _log.exception("Missing Model Type key: {}".format(e)) raise e _file, model_type = model_type.split(".") module = importlib.import_module(base_module + _file) model_class = getattr(module, model_type) self.model = model_class(config, self) def get_q(self, _set, sched_index, market_index, occupied): q = self.model.predict(_set, sched_index, market_index, occupied) return q
825
256
import draft import os def run(): picks = [[],[]] pairs = draft.pairs() for player in range(1, 3): os.system("clear") pre = "[Player %d]: " % player input(pre + "(Enter when ready) ") pair = 0 while pair < 4: pick = input(pre + "%s (1), %s (2) " % (draft.get_name(pairs[player - 1][pair][0]), draft.get_name(pairs[player - 1][pair][1]))) if pick in ("1", "2"): npick = int(pick) picks[player - 1].append(pairs[player - 1][pair][npick - 1]) picks[2 - player].append(pairs[player - 1][pair][2 - npick]) pair += 1 for player in range(1, 3): os.system("clear") pre = "[Player %d]: " % player input(pre + "(Enter to show) ") input(list(map(draft.get_name, picks[player - 1]))) os.system("clear") run()
826
328
# https://open.kattis.com/problems/honey print(*(lambda x: [x[int(input())] for _ in range(int(input()))])([1, 0, 6, 12, 90, 360, 2040, 10080, 54810, 290640, 1588356, 8676360, 47977776, 266378112, 1488801600]), sep="\n")
222
150
# -*- coding: utf-8 -*- """File containing a Windows Registry plugin to parse the USBStor key.""" import logging from plaso.events import windows_events from plaso.lib import eventdata from plaso.parsers import winreg from plaso.parsers.winreg_plugins import interface __author__ = 'David Nides (david.nides@gmail.com)' class USBStorPlugin(interface.KeyPlugin): """USBStor key plugin.""" NAME = u'windows_usbstor_devices' DESCRIPTION = u'Parser for USB Plug And Play Manager USBStor Registry Key.' REG_KEYS = [u'\\{current_control_set}\\Enum\\USBSTOR'] REG_TYPE = u'SYSTEM' URLS = [u'http://www.forensicswiki.org/wiki/USB_History_Viewing'] def GetEntries( self, parser_mediator, key=None, registry_file_type=None, codepage=u'cp1252', **kwargs): """Collect Values under USBStor and return an event object for each one. Args: parser_mediator: A parser mediator object (instance of ParserMediator). key: Optional Registry key (instance of winreg.WinRegKey). The default is None. registry_file_type: Optional string containing the Windows Registry file type, e.g. NTUSER, SOFTWARE. The default is None. codepage: Optional extended ASCII string codepage. The default is cp1252. """ for subkey in key.GetSubkeys(): text_dict = {} text_dict[u'subkey_name'] = subkey.name # Time last USB device of this class was first inserted. event_object = windows_events.WindowsRegistryEvent( subkey.last_written_timestamp, key.path, text_dict, usage=eventdata.EventTimestamp.FIRST_CONNECTED, offset=key.offset, registry_file_type=registry_file_type, source_append=u': USBStor Entries') parser_mediator.ProduceEvent(event_object) name_values = subkey.name.split(u'&') number_of_name_values = len(name_values) # Normally we expect 4 fields here however that is not always the case. if number_of_name_values != 4: logging.warning( u'Expected 4 &-separated values in: {0:s}'.format(subkey.name)) if number_of_name_values >= 1: text_dict[u'device_type'] = name_values[0] if number_of_name_values >= 2: text_dict[u'vendor'] = name_values[1] if number_of_name_values >= 3: text_dict[u'product'] = name_values[2] if number_of_name_values >= 4: text_dict[u'revision'] = name_values[3] for devicekey in subkey.GetSubkeys(): text_dict[u'serial'] = devicekey.name friendly_name_value = devicekey.GetValue(u'FriendlyName') if friendly_name_value: text_dict[u'friendly_name'] = friendly_name_value.data else: text_dict.pop(u'friendly_name', None) # ParentIdPrefix applies to Windows XP Only. parent_id_prefix_value = devicekey.GetValue(u'ParentIdPrefix') if parent_id_prefix_value: text_dict[u'parent_id_prefix'] = parent_id_prefix_value.data else: text_dict.pop(u'parent_id_prefix', None) # Win7 - Last Connection. # Vista/XP - Time of an insert. event_object = windows_events.WindowsRegistryEvent( devicekey.last_written_timestamp, key.path, text_dict, usage=eventdata.EventTimestamp.LAST_CONNECTED, offset=key.offset, registry_file_type=registry_file_type, source_append=u': USBStor Entries') parser_mediator.ProduceEvent(event_object) # Build list of first Insertion times. first_insert = [] device_parameter_key = devicekey.GetSubkey(u'Device Parameters') if device_parameter_key: first_insert.append(device_parameter_key.last_written_timestamp) log_configuration_key = devicekey.GetSubkey(u'LogConf') if (log_configuration_key and log_configuration_key.last_written_timestamp not in first_insert): first_insert.append(log_configuration_key.last_written_timestamp) properties_key = devicekey.GetSubkey(u'Properties') if (properties_key and properties_key.last_written_timestamp not in first_insert): first_insert.append(properties_key.last_written_timestamp) # Add first Insertion times. for timestamp in first_insert: event_object = windows_events.WindowsRegistryEvent( timestamp, key.path, text_dict, usage=eventdata.EventTimestamp.LAST_CONNECTED, offset=key.offset, registry_file_type=registry_file_type, source_append=u': USBStor Entries') parser_mediator.ProduceEvent(event_object) winreg.WinRegistryParser.RegisterPlugin(USBStorPlugin)
4,712
1,451
from collections import defaultdict from discord.ext.commands import Cog, command from discord.utils import get from ...utils import check_restricted from ... import exceptions from ... import messagemanager class Help(Cog): async def get_cmd(self, name, bot, user, list_all_cmds=False): cmd = bot.get_command(name) user_permissions = bot.permissions.for_user(user) if not check_restricted(cmd, user_permissions) or list_all_cmds: return cmd async def _gen_cog_cmd_dict(self, bot, user, list_all_cmds=False): user_permissions = bot.permissions.for_user(user) ret = defaultdict(dict) cmds = bot.commands if list_all_cmds else check_restricted(bot.commands, user_permissions) for cmd in cmds: # This will always return at least cmd_help, since they needed perms to run this command if not hasattr(cmd.callback, 'dev_cmd'): cog_name = cmd.cog.qualified_name if cmd.cog else 'unknown' ret[cog_name][cmd.qualified_name] = cmd return ret @command() async def help(self, ctx, *options): """ Usage: {command_prefix}help [options...] [name] Options: (none) prints a help message for the command with that name. cog prints a help message for the command in the cog with that name. name argument is required. all list all commands available. name argument will be discarded if not used with cog option. Prints a help message. Supplying multiple names can leads to unexpected behavior. """ prefix = ctx.bot.config.command_prefix options = list(options) list_all = True if 'all' in options else False options.remove('all') if list_all else None list_cog = True if 'cog' in options else False options.remove('cog') if list_cog else None name = '' if not options else ' '.join(options) cogs = await self._gen_cog_cmd_dict(ctx.bot, ctx.author, list_all_cmds=list_all) desc = '' if list_cog: cogdesc = '' try: cogs = {name[0]: cogs[name[0]]} cogdesc = ctx.bot.cogs[name[0]].description except KeyError: raise exceptions.CommandError(ctx.bot.str.get('help?cmd?help?fail@cog', "No such cog"), expire_in=10) desc = '\N{WHITE SMALL SQUARE} {}:\n{}\n\n'.format(name[0], cogdesc) if cogdesc else '\N{WHITE SMALL SQUARE} {}:\n'.format(name[0]) else: if name: cmd = await self.get_cmd(name, ctx.bot, ctx.author, list_all_cmds=True) if not cmd: raise exceptions.CommandError(ctx.bot.str.get('cmd-help-invalid', "No such command"), expire_in=10) if not hasattr(cmd.callback, 'dev_cmd'): usage = cmd.help if ctx.bot.config.help_display_sig and hasattr(cmd, 'commands'): usage = '{}\n\nSignature: {} {}'.format(usage, cmd.qualified_name, cmd.signature) await messagemanager.safe_send_normal( ctx, ctx, "```\n{}\n\n{}Aliases (for this name): {}```".format( usage, '' if not hasattr(cmd, 'commands') else 'This is a command group with following subcommands:\n{}\n\n'.format(', '.join(c.name for c in cmd.commands) if cmd.commands else None), ' '.join(cmd.aliases) ).format(command_prefix=ctx.bot.config.command_prefix), expire_in=60 ) return elif ctx.author.id in ctx.bot.config.owner_id: cogs = await self._gen_cog_cmd_dict(ctx.bot, ctx.author, list_all_cmds=True) cmdlisto = '' for cog, cmdlist in cogs.items(): if len(cmdlist) > 0: cmdlisto += ('\N{WHITE SMALL SQUARE} '+ cog + ' [' + str(len(cmdlist)) + ']:\n') if not list_cog else '' cmdlisto += '```' + ', '.join([cmd for cmd in cmdlist.keys()]) + '```\n' desc += cmdlisto + ctx.bot.str.get( 'cmd-help-response', 'For information about a particular command, run `{}help [command]`\n' 'For further help, see https://just-some-bots.github.io/MusicBot/' ).format(prefix) if not list_all: desc += ctx.bot.str.get('cmd-help-all', '\nOnly showing commands you can use, for a list of all commands, run `{}help all`').format(prefix) await messagemanager.safe_send_normal(ctx, ctx, desc, reply=True, expire_in=60) cogs = [Help]
4,877
1,446
"Update a Flatpak repository for new versions of components" import argparse import asyncio import datetime from functools import total_ordering import hashlib from itertools import zip_longest import json from pathlib import Path import re import httpx import jinja2 import yaml GITHUB_DATE_FORMAT = "%Y-%m-%dT%H:%M:%SZ" @total_ordering class Version: """Class embodying a software version Assumes that version is purely composed of integers and .'s -- no alpha, beta, prelease, etc. or other strings allowed. """ def __init__(self, version): if isinstance(version, str): version_tuple = tuple(version.split(".")) elif isinstance(version, tuple): version_tuple = version else: raise ValueError(f"Invalid version: {version}") self.version_tuple = tuple(int(x) for x in version_tuple) self.date = None def __iter__(self): for part in self.version_tuple: yield part def __str__(self): return ".".join(str(p) for p in self) def __repr__(self): return f"{type(self).__name__}({str(self)})" def __len__(self): return len(self.version_tuple) def __hash__(self): return hash(tuple(self)) def __getitem__(self, key): if len(self) > key: return self.version_tuple[key] return 0 def __eq__(self, other): return str(self) == str(other) def __gt__(self, other): for self_i, other_i in zip_longest(self, other, fillvalue=0): if self_i == other_i: continue return self_i > other_i return False async def get_version_scrape(spec): "Scrape raw HTML for version string regex to find latest version" async with httpx.AsyncClient() as client: response = await client.get(spec["url"]) matches = re.findall(spec["regex"], response.text) version = max(Version(m) for m in matches) return version async def get_version_github_branches(spec): "Get latest version for project that uses separate git tag for each version" base_url = f"https://api.github.com/repos/{spec['project']}" headers = {"Accept": "application/vnd.github.v3+json"} async with httpx.AsyncClient() as client: response = await client.get(f"{base_url}/branches", headers=headers) data = response.json() versions = [ Version(m.group(1)) for b in data if (m := re.match(spec["regex"], b["name"])) ] version = max(versions) return version async def get_version_github_releases(spec): "Find the latest version on GitHub releases / tags page" base_url = f"https://api.github.com/repos/{spec['project']}" headers = {"Accept": "application/vnd.github.v3+json"} if spec.get("tags"): endpt = "tags" else: endpt = "releases" async with httpx.AsyncClient() as client: response = await client.get(f"{base_url}/{endpt}", headers=headers) data = response.json() versions = [] for item in data: version_str = item["name"] for sub in spec.get("substitutions", []): version_str = version_str.replace(sub[0], sub[1]) try: version = Version(version_str) except ValueError: continue versions.append((version, item)) version, metadata = max(versions) if spec.get("set_date"): if endpt == "releases": version_dt = datetime.datetime.strptime( metadata["published_at"], GITHUB_DATE_FORMAT ) elif endpt == "tags": async with httpx.AsyncClient() as client: response = await client.get( metadata["commit"]["url"], headers=headers ) data = response.json() version_dt = datetime.datetime.strptime( data["commit"]["committer"]["date"], GITHUB_DATE_FORMAT ) version.date = version_dt.strftime("%Y-%m-%d") return version async def get_latest_version(spec): "Get latest version of a single component" if spec["type"] == "scrape": version = await get_version_scrape(spec) elif spec["type"] == "github_branches": version = await get_version_github_branches(spec) elif spec["type"] == "github_releases": version = await get_version_github_releases(spec) else: raise ValueError(f"Bad spec type: {spec['type']}") return version async def get_latest_versions(specs): "Look up latest versions of components in config file" versions = await asyncio.gather( *(get_latest_version(s["get_version"]) for s in specs) ) return {s["name"]: v for s, v in zip(specs, versions)} def load_manifest(path): "Load json or yaml file" with path.open("r") as file_: if path.suffix == ".json": manifest = json.load(file_) else: manifest = yaml.load(file_, Loader=yaml.SafeLoader) return manifest def get_current_versions(manifest): "Parse versions from current Flatpak manifest" versions = {"runtime": Version(manifest["runtime-version"])} for module in manifest["modules"]: match = re.search(r"-([0-9\.]+)\.tar\.gz$", module["sources"][0]["url"]) versions[module["name"]] = Version(match.group(1)) return versions async def get_sha256(download_dir: Path, url): "Get sha256 sum for url" output_path = download_dir / Path(url).name if not output_path.exists(): client = httpx.AsyncClient() with output_path.open("wb") as file_: async with client.stream("GET", url) as response: async for chunk in response.aiter_raw(): file_.write(chunk) with output_path.open("rb") as file_: sha256 = hashlib.sha256() while True: data = file_.read(2 ** 16) if not data: break sha256.update(data) return sha256.hexdigest() async def get_sha256_set(named_urls): """Get sha256 sums for name:url pairs Returns dictionary with {name}_sha256 keys for easy merging into j2 variables dict """ cache_dir = Path.cwd() / ".cache" cache_dir.mkdir(exist_ok=True) sums = await asyncio.gather( *(get_sha256(cache_dir, v) for v in named_urls.values()) ) sha256_vars = [f"{n}_sha256" for n in named_urls] return dict(zip(sha256_vars, sums)) def get_template_vars(config, current_versions, new_versions, manifest): "Build up variables for jinja2 templates from version data" env = {} remote_sha256 = {} env["runtime_version"] = new_versions["runtime"] for spec in config["modules"]: name = spec["name"] env[f"{name}_version"] = new_versions[name] env[f"{name}_source_url"] = spec["source_url"].format( version=new_versions[name] ) env[f"{name}_version_date"] = new_versions[name].date if new_versions[name] > current_versions[name]: remote_sha256[name] = env[f"{name}_source_url"] else: for mod in manifest["modules"]: if mod["name"] == name: env[f"{name}_sha256"] = mod["sources"][0]["sha256"] new_sha256 = asyncio.run(get_sha256_set(remote_sha256)) env.update(**new_sha256) return env def render_templates(template_dir, env): "Render a .j2 templates using collected version information" for path in template_dir.glob("*.j2"): with path.open("r") as file_: template = jinja2.Template(file_.read()) with path.with_name(path.stem).open("w") as file_: file_.write(template.render(**env)) def parse_args(): "Parse command line arguments" parser = argparse.ArgumentParser() parser.add_argument("--config", "-c", required=True, help="Configuration file") parser.add_argument( "--manifest", "-m", required=True, help="Current flatpak manifest" ) parser.add_argument( "--template-dir", "-t", help="Directory with .j2 files to render" ) return parser.parse_args() def main(): "Main logic" args = parse_args() with open(args.config) as file_: config = yaml.load(file_, Loader=yaml.SafeLoader) new_versions = asyncio.run( get_latest_versions([config["runtime"]] + config["modules"]) ) manifest = load_manifest(Path(args.manifest)) current_versions = get_current_versions(manifest) env = get_template_vars(config, current_versions, new_versions, manifest) render_templates(Path(args.template_dir), env) if __name__ == "__main__": main()
8,710
2,715
from unittest import TestCase import base.env.pre_process as pre_process import pandas as pd from sklearn.preprocessing import MinMaxScaler from helper.util import get_attribute from base.env.pre_process_conf import active_stragery, get_strategy_analyze import base.env.pre_process class TestProcessStrategy(TestCase): def test_get_active_strategy(self): self.action_fetch, self.action_pre_analyze, self.indicators, self.action_post_analyze, self._label = \ pre_process.get_active_strategy(strategy=None) self.assertIsNotNone(self.action_fetch) self.assertIsNotNone(self.action_pre_analyze) self.assertIsNotNone(self.indicators) self.assertIsNotNone(self.action_post_analyze) def test_process(self): self.test_get_active_strategy() dates, pre_frames, origin_frames, post_frames = pre_process.ProcessStrategy( # self.action_fetch, self.action_pre_analyze,self.indicators, self.action_post_analyze, ['SH_index'], "2008-01-01", "2019-02-01", MinMaxScaler(), active_stragery).process() result = post_frames['nasdaq'].dropna() self.assertIsInstance(result, pd.DataFrame) self.assertNotEqual(result.values.size, 0) def test_palyaround(self): # action_post_analyze = get_attribute('.'.join([active_stragery.get('module'), 'PreAnalyzeDefault'])) # action_post_analyze.fire(None, None) pre_process.PreAnalyzeDefault.fire(None, None) # print(class_post_analyze.fire(None, None)) def test_get_strategy_analyze(self): self.assertIsNotNone(get_strategy_analyze(get_attribute(active_stragery)))
1,746
551
#!/usr/bin/env python2.5 #encoding:utf-8 #author:dbr/Ben #project:themoviedb #forked by ccjensen/Chris #http://github.com/ccjensen/themoviedb """An interface to the themoviedb.org API """ __author__ = "dbr/Ben" __version__ = "0.2b" config = {} config['apikey'] = "a8b9f96dde091408a03cb4c78477bd14" config['urls'] = {} config['urls']['movie.search'] = "http://api.themoviedb.org/2.1/Movie.search/en/xml/%(apikey)s/%%s" % (config) config['urls']['movie.getInfo'] = "http://api.themoviedb.org/2.1/Movie.getInfo/en/xml/%(apikey)s/%%s" % (config) import urllib try: import xml.etree.cElementTree as ElementTree except ImportError: import elementtree.ElementTree as ElementTree # collections.defaultdict # originally contributed by Yoav Goldberg <yoav.goldberg@gmail.com> # new version by Jason Kirtland from Python cookbook. # <http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/523034> try: from collections import defaultdict except ImportError: class defaultdict(dict): def __init__(self, default_factory=None, *a, **kw): if (default_factory is not None and not hasattr(default_factory, '__call__')): raise TypeError('first argument must be callable') dict.__init__(self, *a, **kw) self.default_factory = default_factory def __getitem__(self, key): try: return dict.__getitem__(self, key) except KeyError: return self.__missing__(key) def __missing__(self, key): if self.default_factory is None: raise KeyError(key) self[key] = value = self.default_factory() return value def __reduce__(self): if self.default_factory is None: args = tuple() else: args = self.default_factory, return type(self), args, None, None, self.iteritems() def copy(self): return self.__copy__() def __copy__(self): return type(self)(self.default_factory, self) def __deepcopy__(self, memo): import copy return type(self)(self.default_factory, copy.deepcopy(self.items())) def __repr__(self): return 'defaultdict(%s, %s)' % (self.default_factory, dict.__repr__(self)) # [XX] to make pickle happy in python 2.4: import collections collections.defaultdict = defaultdict class TmdBaseError(Exception): pass class TmdHttpError(TmdBaseError): pass class TmdXmlError(TmdBaseError): pass class XmlHandler: """Deals with retrieval of XML files from API """ def __init__(self, url): self.url = url def _grabUrl(self, url): try: urlhandle = urllib.urlopen(url) except IOError, errormsg: raise TmdHttpError(errormsg) return urlhandle.read() def getEt(self): xml = self._grabUrl(self.url) try: et = ElementTree.fromstring(xml) except SyntaxError, errormsg: raise TmdXmlError(errormsg) return et class recursivedefaultdict(defaultdict): def __init__(self): self.default_factory = type(self) class SearchResults(list): """Stores a list of Movie's that matched the search """ def __repr__(self): return "<Search results: %s>" % (list.__repr__(self)) class MovieResult(dict): """A dict containing the information about a specific search result """ def __repr__(self): return "<MovieResult: %s (%s)>" % (self.get("name"), self.get("released")) class Movie(dict): """A dict containing the information about the film """ def __repr__(self): return "<MovieResult: %s (%s)>" % (self.get("name"), self.get("released")) class Categories(recursivedefaultdict): """Stores category information """ def set(self, category_et): """Takes an elementtree Element ('category') and stores the url, using the type and name as the dict key. For example: <category type="genre" url="http://themoviedb.org/encyclopedia/category/80" name="Crime"/> ..becomes: categories['genre']['Crime'] = 'http://themoviedb.org/encyclopedia/category/80' """ _type = category_et.get("type") name = category_et.get("name") url = category_et.get("url") self[_type][name] = url class Studios(recursivedefaultdict): """Stores category information """ def set(self, studio_et): """Takes an elementtree Element ('studio') and stores the url, using the name as the dict key. For example: <studio url="http://www.themoviedb.org/encyclopedia/company/20" name="Miramax Films"/> ..becomes: studios['name'] = 'http://www.themoviedb.org/encyclopedia/company/20' """ name = studio_et.get("name") url = studio_et.get("url") self[name] = url class Countries(recursivedefaultdict): """Stores country information """ def set(self, country_et): """Takes an elementtree Element ('country') and stores the url, using the name and code as the dict key. For example: <country url="http://www.themoviedb.org/encyclopedia/country/223" name="United States of America" code="US"/> ..becomes: countries['code']['name'] = 'http://www.themoviedb.org/encyclopedia/country/223' """ code = country_et.get("code") name = country_et.get("name") url = country_et.get("url") self[code][name] = url class Images(recursivedefaultdict): """Stores image information """ def set(self, image_et): """Takes an elementtree Element ('image') and stores the url, using the type, id and size as the dict key. For example: <image type="poster" size="original" url="http://images.themoviedb.org/posters/4181/67926_sin-city-02-color_122_207lo.jpg" id="4181"/> ..becomes: images['poster']['4181']['original'] = 'http://images.themoviedb.org/posters/4181/67926_sin-city-02-color_122_207lo.jpg' """ _type = image_et.get("type") _id = image_et.get("id") size = image_et.get("size") url = image_et.get("url") self[_type][_id][size] = url def __repr__(self): return "<%s with %s posters and %s backdrops>" % ( self.__class__.__name__, len(self['poster'].keys()), len(self['backdrop'].keys()) ) def largest(self, _type, _id): """Attempts to return largest image of a specific type and id """ if(isinstance(_id, int)): _id = str(_id) for cur_size in ["original", "mid", "cover", "thumb"]: for size in self[_type][_id]: if cur_size in size: return self[_type][_id][cur_size] class CrewRoleList(dict): """Stores a list of roles, such as director, actor etc >>> import tmdb >>> tmdb.getMovieInfo(550)['cast'].keys()[:5] ['casting', 'producer', 'author', 'sound editor', 'actor'] """ pass class CrewList(list): """Stores list of crew in specific role >>> import tmdb >>> tmdb.getMovieInfo(550)['cast']['author'] [<author (id 7468): Chuck Palahniuk>, <author (id 7469): Jim Uhls>] """ pass class Person(dict): """Stores information about a specific member of cast """ def __init__(self, job, _id, name, character, url): self['job'] = job self['id'] = _id self['name'] = name self['character'] = character self['url'] = url def __repr__(self): if self['character'] is None or self['character'] == "": return "<%(job)s (id %(id)s): %(name)s>" % self else: return "<%(job)s (id %(id)s): %(name)s (as %(character)s)>" % self class MovieDb: """Main interface to www.themoviedb.com The search() method searches for the film by title. The getMovieInfo() method retrieves information about a specific movie using themoviedb id. """ def _parseSearchResults(self, movie_element): cur_movie = MovieResult() cur_images = Images() for item in movie_element.getchildren(): if item.tag.lower() == "images": for subitem in item.getchildren(): cur_images.set(subitem) else: cur_movie[item.tag] = item.text cur_movie['images'] = cur_images return cur_movie def _parseMovie(self, movie_element): cur_movie = Movie() cur_categories = Categories() cur_studios = Studios() cur_countries = Countries() cur_images = Images() cur_cast = CrewRoleList() for item in movie_element.getchildren(): if item.tag.lower() == "categories": for subitem in item.getchildren(): cur_categories.set(subitem) elif item.tag.lower() == "studios": for subitem in item.getchildren(): cur_studios.set(subitem) elif item.tag.lower() == "countries": for subitem in item.getchildren(): cur_countries.set(subitem) elif item.tag.lower() == "images": for subitem in item.getchildren(): cur_images.set(subitem) elif item.tag.lower() == "cast": for subitem in item.getchildren(): job = subitem.get("job").lower() p = Person( job = job, _id = subitem.get("id"), name = subitem.get("name"), character = subitem.get("character"), url = subitem.get("url") ) cur_cast.setdefault(job, CrewList()).append(p) else: cur_movie[item.tag] = item.text cur_movie['categories'] = cur_categories cur_movie['studios'] = cur_studios cur_movie['countries'] = cur_countries cur_movie['images'] = cur_images cur_movie['cast'] = cur_cast return cur_movie def search(self, title): """Searches for a film by its title. Returns SearchResults (a list) containing all matches (Movie instances) """ title = urllib.quote(title.encode("utf-8")) url = config['urls']['movie.search'] % (title) etree = XmlHandler(url).getEt() search_results = SearchResults() for cur_result in etree.find("movies").findall("movie"): cur_movie = self._parseSearchResults(cur_result) search_results.append(cur_movie) return search_results def getMovieInfo(self, id): """Returns movie info by from its tmdb id. Returns a Movie instance """ url = config['urls']['movie.getInfo'] % (id) etree = XmlHandler(url).getEt() return self._parseMovie(etree.find("movies").findall("movie")[0]) def search(name = None): """Convenience wrapper for MovieDb.search - so you can do.. >>> import tmdb >>> tmdb.search("Fight Club") <Search results: [<MovieResult: Fight Club (1999-09-16)>]> """ mdb = MovieDb() return mdb.search(name) def getMovieInfo(id = None): """Convenience wrapper for MovieDb.search - so you can do.. >>> import tmdb >>> tmdb.getMovieInfo(187) <MovieResult: Sin City (2005-04-01)> """ mdb = MovieDb() return mdb.getMovieInfo(id) def main(): results = search("Fight Club") searchResult = results[0] movie = getMovieInfo(searchResult['id']) print movie['name'] print "Producers:" for prodr in movie['cast']['Producer']: print " " * 4, prodr['name'] print movie['images'] for genreName in movie['categories']['genre']: print "%s (%s)" % (genreName, movie['categories']['genre'][genreName]) if __name__ == '__main__': main()
12,194
3,775
import numpy import numpy.linalg def distance_sum(inputs, references): """Sum of all distances between inputs and references Each element should be in a row! """ norms = numpy.zeros(inputs.shape[0]) for i in xrange(references.shape[0]): norms += numpy.apply_along_axis(numpy.linalg.norm, 1, inputs-references[i,:]) return norms def distance_min(inputs, references): """Minimum distances between inputs and any reference Each element should be in a row! """ norms = numpy.ones(inputs.shape[0])*99999999 for i in xrange(references.shape[0]): norms = numpy.minimum(norms, numpy.apply_along_axis(numpy.linalg.norm, 1, inputs-references[i,:])) return norms def distance_matrix(inputs): """Returns a distance matrix """ D = numpy.ones( (inputs.shape[0], inputs.shape[0]) )*99999999 for i in xrange(inputs.shape[0]): for j in xrange(i): D[i,j] = numpy.linalg.norm(inputs[i,:]-inputs[j,:]) D[j,i] = numpy.linalg.norm(inputs[i,:]-inputs[j,:]) return D def distance_mutual_min(inputs, references): """Distance using a mutual distance reference Inspired in: USING MUTUAL PROXIMITY TO IMPROVE CONTENT-BASED AUDIO SIMILARITY Dominik Schnitzer, Arthur Flexer, Markus Sched, Gerhard Widmer """ d = distance_matrix(inputs) a = distance_min(inputs, references) for i in xrange(len(a)): a[i] = a[i] - numpy.min(d[:,i]) return a def range_distance(inputs, references): """Minimum distance from boundaries of a rang """ mi = numpy.amin(references, 0) ma = numpy.amax(references, 0) norms = numpy.zeros(inputs.shape[0]) for i in xrange(inputs.shape[0]): for j in xrange(inputs.shape[1]): if (inputs[i,j] < mi[j]) or \ (inputs[i,j] > ma[j]): norms[i] += numpy.min([abs(inputs[i,j]-mi[j]),\ abs(inputs[i,j]-ma[j])])**2 norms[i] = norms[i]**(0.5) return norms def mutual_range_distance(inputs, references): """Minimum distance from boundaries of a range """ mi = numpy.amin(references, 0) ma = numpy.amax(references, 0) norms = numpy.zeros(inputs.shape[0]) d = distance_matrix(inputs) for i in xrange(inputs.shape[0]): for j in xrange(inputs.shape[1]): if (inputs[i,j] < mi[j]) or \ (inputs[i,j] > ma[j]): norms[i] += numpy.min([abs(inputs[i,j]-mi[j]),\ abs(inputs[i,j]-ma[j])])**2 norms[i] = norms[i]**(0.5) norms[i] = norms[i] - numpy.min(d[:,i]) return norms #a = numpy.array([[2, 4, 6], [4, 3, 2], [5, -2, -1], [10, 11, 12], [15, 20, 31]]) #b = numpy.array([[10, 11, 12], [-1, -2, -3]]) #print distance_sum(a, b) #print a #print b #print distance_min(a, b) #print distance_mutual_min(a, b)
3,033
1,152
# # PySNMP MIB module CABH-QOS2-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CABH-QOS2-MIB # Produced by pysmi-0.3.4 at Mon Apr 29 17:26:31 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ConstraintsUnion, ValueRangeConstraint, ValueSizeConstraint, ConstraintsIntersection, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "SingleValueConstraint") clabProjCableHome, = mibBuilder.importSymbols("CLAB-DEF-MIB", "clabProjCableHome") ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex") InetAddress, InetAddressType, InetPortNumber = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddress", "InetAddressType", "InetPortNumber") SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString") ObjectGroup, ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "ModuleCompliance", "NotificationGroup") MibIdentifier, iso, Bits, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter64, NotificationType, Integer32, Counter32, Gauge32, Unsigned32, ObjectIdentity, ModuleIdentity, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "MibIdentifier", "iso", "Bits", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter64", "NotificationType", "Integer32", "Counter32", "Gauge32", "Unsigned32", "ObjectIdentity", "ModuleIdentity", "IpAddress") RowStatus, TextualConvention, DisplayString, TimeStamp, TruthValue = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "TextualConvention", "DisplayString", "TimeStamp", "TruthValue") cabhQos2Mib = ModuleIdentity((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8)) cabhQos2Mib.setRevisions(('2005-04-08 00:00',)) if mibBuilder.loadTexts: cabhQos2Mib.setLastUpdated('200504080000Z') if mibBuilder.loadTexts: cabhQos2Mib.setOrganization('CableLabs Broadband Access Department') cabhQos2Mib2Notifications = MibIdentifier((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 0)) cabhQos2MibObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1)) cabhQos2Base = MibIdentifier((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 1)) cabhQos2PsIfAttributes = MibIdentifier((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 2)) cabhQos2PolicyHolderObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3)) cabhQos2DeviceObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 4)) cabhQos2SetToFactory = MibScalar((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 1, 1), TruthValue()).setMaxAccess("readwrite") if mibBuilder.loadTexts: cabhQos2SetToFactory.setStatus('current') cabhQos2LastSetToFactory = MibScalar((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 1, 2), TimeStamp()).setMaxAccess("readonly") if mibBuilder.loadTexts: cabhQos2LastSetToFactory.setStatus('current') cabhQos2PsIfAttribTable = MibTable((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 2, 1), ) if mibBuilder.loadTexts: cabhQos2PsIfAttribTable.setStatus('current') cabhQos2PsIfAttribEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 2, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex")) if mibBuilder.loadTexts: cabhQos2PsIfAttribEntry.setStatus('current') cabhQos2PsIfAttribNumPriorities = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 2, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setMaxAccess("readonly") if mibBuilder.loadTexts: cabhQos2PsIfAttribNumPriorities.setStatus('current') cabhQos2PsIfAttribNumQueues = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 2, 1, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setMaxAccess("readonly") if mibBuilder.loadTexts: cabhQos2PsIfAttribNumQueues.setStatus('current') cabhQos2PolicyHolderEnabled = MibScalar((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 1), TruthValue().clone('true')).setMaxAccess("readwrite") if mibBuilder.loadTexts: cabhQos2PolicyHolderEnabled.setStatus('current') cabhQos2PolicyAdmissionControl = MibScalar((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2))).clone('disabled')).setMaxAccess("readwrite") if mibBuilder.loadTexts: cabhQos2PolicyAdmissionControl.setStatus('current') cabhQos2NumActivePolicyHolder = MibScalar((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly") if mibBuilder.loadTexts: cabhQos2NumActivePolicyHolder.setStatus('current') cabhQos2PolicyTable = MibTable((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 4), ) if mibBuilder.loadTexts: cabhQos2PolicyTable.setStatus('current') cabhQos2PolicyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 4, 1), ).setIndexNames((0, "CABH-QOS2-MIB", "cabhQos2PolicyOwner"), (0, "CABH-QOS2-MIB", "cabhQos2PolicyOwnerRuleId")) if mibBuilder.loadTexts: cabhQos2PolicyEntry.setStatus('current') cabhQos2PolicyOwner = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 4, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("operatorOnly", 1), ("homeUser", 2), ("operatorForHomeUser", 3), ("upnp", 4)))) if mibBuilder.loadTexts: cabhQos2PolicyOwner.setStatus('current') cabhQos2PolicyOwnerRuleId = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 4, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))) if mibBuilder.loadTexts: cabhQos2PolicyOwnerRuleId.setStatus('current') cabhQos2PolicyRuleOrder = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 4, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2PolicyRuleOrder.setStatus('current') cabhQos2PolicyAppDomain = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 4, 1, 4), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2PolicyAppDomain.setStatus('current') cabhQos2PolicyAppName = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 4, 1, 5), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2PolicyAppName.setStatus('current') cabhQos2PolicyServiceProvDomain = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 4, 1, 6), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2PolicyServiceProvDomain.setStatus('current') cabhQos2PolicyServiceName = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 4, 1, 7), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2PolicyServiceName.setStatus('current') cabhQos2PolicyPortDomain = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 4, 1, 8), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2PolicyPortDomain.setStatus('current') cabhQos2PolicyPortNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 4, 1, 9), InetPortNumber()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2PolicyPortNumber.setStatus('current') cabhQos2PolicyIpType = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 4, 1, 10), InetAddressType().clone('ipv4')).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2PolicyIpType.setStatus('current') cabhQos2PolicyIpProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 4, 1, 11), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2PolicyIpProtocol.setStatus('current') cabhQos2PolicySrcIp = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 4, 1, 12), InetAddress().clone(hexValue="00000000")).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2PolicySrcIp.setStatus('current') cabhQos2PolicyDestIp = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 4, 1, 13), InetAddress().clone(hexValue="00000000")).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2PolicyDestIp.setStatus('current') cabhQos2PolicySrcPort = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 4, 1, 14), InetPortNumber()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2PolicySrcPort.setStatus('current') cabhQos2PolicyDestPort = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 4, 1, 15), InetPortNumber()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2PolicyDestPort.setStatus('current') cabhQos2PolicyTraffImpNum = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 4, 1, 16), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2PolicyTraffImpNum.setStatus('current') cabhQos2PolicyUserImportance = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 4, 1, 17), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2PolicyUserImportance.setStatus('current') cabhQos2PolicyRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 3, 4, 1, 18), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2PolicyRowStatus.setStatus('current') cabhQos2TrafficClassTable = MibTable((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 4, 1), ) if mibBuilder.loadTexts: cabhQos2TrafficClassTable.setStatus('current') cabhQos2TrafficClassEntry = MibTableRow((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 4, 1, 1), ).setIndexNames((0, "CABH-QOS2-MIB", "cabhQos2TrafficClassMethod"), (0, "CABH-QOS2-MIB", "cabhQos2TrafficClassIdx")) if mibBuilder.loadTexts: cabhQos2TrafficClassEntry.setStatus('current') cabhQos2TrafficClassMethod = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 4, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("static", 1), ("upnp", 2)))) if mibBuilder.loadTexts: cabhQos2TrafficClassMethod.setStatus('current') cabhQos2TrafficClassIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 4, 1, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))) if mibBuilder.loadTexts: cabhQos2TrafficClassIdx.setStatus('current') cabhQos2TrafficClassProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 4, 1, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 256))).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2TrafficClassProtocol.setStatus('current') cabhQos2TrafficClassIpType = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 4, 1, 1, 4), InetAddressType().clone('ipv4')).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2TrafficClassIpType.setStatus('current') cabhQos2TrafficClassSrcIp = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 4, 1, 1, 5), InetAddress().clone(hexValue="00000000")).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2TrafficClassSrcIp.setStatus('current') cabhQos2TrafficClassDestIp = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 4, 1, 1, 6), InetAddress().clone(hexValue="00000000")).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2TrafficClassDestIp.setStatus('current') cabhQos2TrafficClassSrcPort = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 4, 1, 1, 7), InetPortNumber()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2TrafficClassSrcPort.setStatus('current') cabhQos2TrafficClassDestPort = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 4, 1, 1, 8), InetPortNumber()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2TrafficClassDestPort.setStatus('current') cabhQos2TrafficClassImpNum = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 4, 1, 1, 9), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2TrafficClassImpNum.setStatus('current') cabhQos2TrafficClassRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 1, 4, 1, 1, 10), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: cabhQos2TrafficClassRowStatus.setStatus('current') cabhQos2Conformance = MibIdentifier((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 2)) cabhQos2Compliances = MibIdentifier((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 2, 1)) cabhQos2Groups = MibIdentifier((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 2, 2)) cabhQos2Compliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 2, 1, 1)).setObjects(("CABH-QOS2-MIB", "cabhQos2Group"), ("CABH-QOS2-MIB", "cabhQos2ClassifierGroup")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cabhQos2Compliance = cabhQos2Compliance.setStatus('current') cabhQos2Group = ObjectGroup((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 2, 2, 1)).setObjects(("CABH-QOS2-MIB", "cabhQos2SetToFactory"), ("CABH-QOS2-MIB", "cabhQos2LastSetToFactory"), ("CABH-QOS2-MIB", "cabhQos2PsIfAttribNumPriorities"), ("CABH-QOS2-MIB", "cabhQos2PsIfAttribNumQueues"), ("CABH-QOS2-MIB", "cabhQos2PolicyHolderEnabled"), ("CABH-QOS2-MIB", "cabhQos2PolicyAdmissionControl"), ("CABH-QOS2-MIB", "cabhQos2NumActivePolicyHolder"), ("CABH-QOS2-MIB", "cabhQos2PolicyRuleOrder"), ("CABH-QOS2-MIB", "cabhQos2PolicyAppDomain"), ("CABH-QOS2-MIB", "cabhQos2PolicyAppName"), ("CABH-QOS2-MIB", "cabhQos2PolicyServiceProvDomain"), ("CABH-QOS2-MIB", "cabhQos2PolicyServiceName"), ("CABH-QOS2-MIB", "cabhQos2PolicyPortDomain"), ("CABH-QOS2-MIB", "cabhQos2PolicyPortNumber"), ("CABH-QOS2-MIB", "cabhQos2PolicyIpProtocol"), ("CABH-QOS2-MIB", "cabhQos2PolicyIpType"), ("CABH-QOS2-MIB", "cabhQos2PolicySrcIp"), ("CABH-QOS2-MIB", "cabhQos2PolicyDestIp"), ("CABH-QOS2-MIB", "cabhQos2PolicySrcPort"), ("CABH-QOS2-MIB", "cabhQos2PolicyDestPort"), ("CABH-QOS2-MIB", "cabhQos2PolicyTraffImpNum"), ("CABH-QOS2-MIB", "cabhQos2PolicyUserImportance"), ("CABH-QOS2-MIB", "cabhQos2PolicyRowStatus"), ("CABH-QOS2-MIB", "cabhQos2TrafficClassProtocol"), ("CABH-QOS2-MIB", "cabhQos2TrafficClassIpType"), ("CABH-QOS2-MIB", "cabhQos2PolicySrcIp"), ("CABH-QOS2-MIB", "cabhQos2PolicyDestIp"), ("CABH-QOS2-MIB", "cabhQos2PolicySrcPort"), ("CABH-QOS2-MIB", "cabhQos2PolicyDestPort"), ("CABH-QOS2-MIB", "cabhQos2PolicyTraffImpNum"), ("CABH-QOS2-MIB", "cabhQos2PolicyUserImportance"), ("CABH-QOS2-MIB", "cabhQos2PolicyRowStatus")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cabhQos2Group = cabhQos2Group.setStatus('current') cabhQos2ClassifierGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 4491, 2, 4, 8, 2, 2, 2)).setObjects(("CABH-QOS2-MIB", "cabhQos2TrafficClassProtocol"), ("CABH-QOS2-MIB", "cabhQos2TrafficClassIpType"), ("CABH-QOS2-MIB", "cabhQos2TrafficClassSrcIp"), ("CABH-QOS2-MIB", "cabhQos2TrafficClassDestIp"), ("CABH-QOS2-MIB", "cabhQos2TrafficClassSrcPort"), ("CABH-QOS2-MIB", "cabhQos2TrafficClassDestPort"), ("CABH-QOS2-MIB", "cabhQos2TrafficClassImpNum"), ("CABH-QOS2-MIB", "cabhQos2TrafficClassRowStatus")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cabhQos2ClassifierGroup = cabhQos2ClassifierGroup.setStatus('current') mibBuilder.exportSymbols("CABH-QOS2-MIB", cabhQos2PolicyAppName=cabhQos2PolicyAppName, cabhQos2PolicyEntry=cabhQos2PolicyEntry, cabhQos2TrafficClassProtocol=cabhQos2TrafficClassProtocol, cabhQos2TrafficClassRowStatus=cabhQos2TrafficClassRowStatus, cabhQos2SetToFactory=cabhQos2SetToFactory, cabhQos2Mib2Notifications=cabhQos2Mib2Notifications, cabhQos2PsIfAttribEntry=cabhQos2PsIfAttribEntry, cabhQos2PsIfAttribNumQueues=cabhQos2PsIfAttribNumQueues, cabhQos2PolicyHolderEnabled=cabhQos2PolicyHolderEnabled, cabhQos2PolicyIpProtocol=cabhQos2PolicyIpProtocol, PYSNMP_MODULE_ID=cabhQos2Mib, cabhQos2DeviceObjects=cabhQos2DeviceObjects, cabhQos2PolicyPortNumber=cabhQos2PolicyPortNumber, cabhQos2PolicyOwnerRuleId=cabhQos2PolicyOwnerRuleId, cabhQos2PsIfAttribTable=cabhQos2PsIfAttribTable, cabhQos2LastSetToFactory=cabhQos2LastSetToFactory, cabhQos2MibObjects=cabhQos2MibObjects, cabhQos2PsIfAttributes=cabhQos2PsIfAttributes, cabhQos2PolicyDestIp=cabhQos2PolicyDestIp, cabhQos2PolicyDestPort=cabhQos2PolicyDestPort, cabhQos2Compliances=cabhQos2Compliances, cabhQos2TrafficClassSrcPort=cabhQos2TrafficClassSrcPort, cabhQos2PolicyTraffImpNum=cabhQos2PolicyTraffImpNum, cabhQos2Conformance=cabhQos2Conformance, cabhQos2ClassifierGroup=cabhQos2ClassifierGroup, cabhQos2TrafficClassDestIp=cabhQos2TrafficClassDestIp, cabhQos2TrafficClassDestPort=cabhQos2TrafficClassDestPort, cabhQos2PolicyRowStatus=cabhQos2PolicyRowStatus, cabhQos2PolicySrcIp=cabhQos2PolicySrcIp, cabhQos2TrafficClassSrcIp=cabhQos2TrafficClassSrcIp, cabhQos2TrafficClassMethod=cabhQos2TrafficClassMethod, cabhQos2PolicySrcPort=cabhQos2PolicySrcPort, cabhQos2PolicyServiceName=cabhQos2PolicyServiceName, cabhQos2NumActivePolicyHolder=cabhQos2NumActivePolicyHolder, cabhQos2PolicyUserImportance=cabhQos2PolicyUserImportance, cabhQos2Compliance=cabhQos2Compliance, cabhQos2PsIfAttribNumPriorities=cabhQos2PsIfAttribNumPriorities, cabhQos2TrafficClassImpNum=cabhQos2TrafficClassImpNum, cabhQos2PolicyAdmissionControl=cabhQos2PolicyAdmissionControl, cabhQos2PolicyRuleOrder=cabhQos2PolicyRuleOrder, cabhQos2PolicyServiceProvDomain=cabhQos2PolicyServiceProvDomain, cabhQos2PolicyOwner=cabhQos2PolicyOwner, cabhQos2Groups=cabhQos2Groups, cabhQos2PolicyTable=cabhQos2PolicyTable, cabhQos2PolicyAppDomain=cabhQos2PolicyAppDomain, cabhQos2PolicyIpType=cabhQos2PolicyIpType, cabhQos2TrafficClassTable=cabhQos2TrafficClassTable, cabhQos2PolicyPortDomain=cabhQos2PolicyPortDomain, cabhQos2Mib=cabhQos2Mib, cabhQos2TrafficClassEntry=cabhQos2TrafficClassEntry, cabhQos2Group=cabhQos2Group, cabhQos2PolicyHolderObjects=cabhQos2PolicyHolderObjects, cabhQos2TrafficClassIpType=cabhQos2TrafficClassIpType, cabhQos2Base=cabhQos2Base, cabhQos2TrafficClassIdx=cabhQos2TrafficClassIdx)
17,984
8,532
from django.shortcuts import redirect, render from .models import * from datetime import date from brain import ipfunc def home(request): dados = Apod.getObjectOrRequest() buscaData = request.POST.get('data') if request.method == 'POST': dados = Apod.getObjectOrRequest(str(buscaData)) return render(request, 'views/apod.html', {'dados': dados[0]}) def marsWeather(request): return render(request, 'views/mars-weather.html') def mrp(request): return render(request, 'views/mrp.html') def mrpspirit(request): dados = MRP.objects.filter(sol = 1, rover_name__icontains = "spirit").order_by('id') buscaSol = request.POST.get('sol') if request.method == 'POST' and buscaSol: dados = MRP.getObjectOrRequest(buscaSol, "spirit") context = {'dados': dados, 'dias': 609} return render(request, 'views/mrpview.html', context) def mrpopportunity(request): dados = MRP.objects.filter(sol = 1, rover_name__icontains = "opportunity").order_by('id') buscaSol = request.POST.get('sol') if request.method == 'POST' and buscaSol: dados = MRP.getObjectOrRequest(buscaSol, "opportunity") context = {'dados': dados, 'dias': 5100} return render(request, 'views/mrpview.html', context) def mrpcuriosity(request): dias = date.today() - date(2012, 8, 6) dados = MRP.objects.filter(sol = 1, rover_name__icontains = "curiosity").order_by('id') buscaSol = request.POST.get('sol') if request.method == 'POST' and buscaSol: dados = MRP.getObjectOrRequest(buscaSol, "curiosity") context = {'dias': dias.days - 90, 'dados': dados} return render(request, 'views/mrpview.html', context) def trekImagery(request): return render(request, 'views/trek-imagery.html') def nasaSearch(request): dados = None pesquisa = request.GET.get('q') if pesquisa: dados = getRequestNasaSearch(pesquisa) if request.method == 'POST': index = int(request.POST.get('index')) contextContent = getContextNasaSearch(dados, index) return render(request, 'views/search-content.html', contextContent) context = {'pesquisa': pesquisa, 'dados': dados} return render(request, 'views/search.html', context) def crewMembers(request): context = getCrew() return render(request, 'views/crewmembers.html', context) def handlerErrorPage(request, exception): return render(request, 'views/404.html')
2,440
851
from django.db import models from django.utils import timezone from sizefield.models import FileSizeField # Create your models here. class Video(models.Model): sha224 = models.CharField(max_length=56, unique=True) filename = models.CharField(max_length=200) dropbox_directory = models.CharField(max_length=200) mime_type = models.CharField(max_length=200) date_added = models.DateTimeField(default=timezone.now, editable=False) size = FileSizeField() class Meta: abstract = True def __str__(self): return self.filename or self.sha224_id class SourceVideo(Video): vimeo_uri = models.IntegerField() youtube_id = models.CharField(max_length=30, blank=True) def __str__(self): return self.filename+" ("+str(self.vimeo_uri)+")"
805
261
import os.path import json from astral import Astral appdata_folder = os.path.join(os.environ["LOCALAPPDATA"], "Nightshift") def set_location(latitude, longitude): print "Setting location to {0}, {1}".format(latitude, longitude) try: if not os.path.exists(appdata_folder): os.mkdir(appdata_folder) file_obj = open(os.path.join(appdata_folder, "location.json"), "w") json.dump({"longitude": longitude, "latitude": latitude}, file_obj) file_obj.close() return True except: print "Could not save the location and sunrise/sunset." raise def set_location_city(city): print "Trying to set location to", city astral_obj = Astral() try: city_data = astral_obj[city] except KeyError: print "Sorry, but this city does not exist in the city database." print "City names are capitalized and in English (e.g. Rome)" return False set_location(city_data.latitude, city_data.longitude) return True def get_location(): print "Getting saved location." try: file_obj = open(os.path.join(appdata_folder, "location.json"), "r") result = json.load(file_obj) file_obj.close() return {"longitude": result["longitude"], "latitude": result["latitude"]} except IOError: print "Could not read from location file." print "Try setting your location with" print "Nightshift.exe -s latitude longitude" print "or" print "Nightshift.exe -s city" raise except: print "Could not get saved location." raise
1,681
498
# BSD 3-Clause License; see https://github.com/jpivarski/awkward-1.0/blob/master/LICENSE from __future__ import absolute_import import numpy import awkward1._util import awkward1._connect._numpy import awkward1.layout import awkward1.operations.convert def count(array, axis=None, keepdims=False, maskidentity=False): layout = awkward1.operations.convert.tolayout(array, allowrecord=False, allowother=False) if axis is None: def reduce(xs): if len(xs) == 1: return xs[0] else: return xs[0] + reduce(xs[1:]) return reduce([numpy.size(x) for x in awkward1._util.completely_flatten(layout)]) else: behavior = awkward1._util.behaviorof(array) return awkward1._util.wrap(layout.count(axis=axis, mask=maskidentity, keepdims=keepdims), behavior) @awkward1._connect._numpy.implements(numpy.count_nonzero) def count_nonzero(array, axis=None, keepdims=False, maskidentity=False): layout = awkward1.operations.convert.tolayout(array, allowrecord=False, allowother=False) if axis is None: def reduce(xs): if len(xs) == 1: return xs[0] else: return xs[0] + reduce(xs[1:]) return reduce([numpy.count_nonzero(x) for x in awkward1._util.completely_flatten(layout)]) else: behavior = awkward1._util.behaviorof(array) return awkward1._util.wrap(layout.count_nonzero(axis=axis, mask=maskidentity, keepdims=keepdims), behavior) @awkward1._connect._numpy.implements(numpy.sum) def sum(array, axis=None, keepdims=False, maskidentity=False): layout = awkward1.operations.convert.tolayout(array, allowrecord=False, allowother=False) if axis is None: def reduce(xs): if len(xs) == 1: return xs[0] else: return xs[0] + reduce(xs[1:]) return reduce([numpy.sum(x) for x in awkward1._util.completely_flatten(layout)]) else: behavior = awkward1._util.behaviorof(array) return awkward1._util.wrap(layout.sum(axis=axis, mask=maskidentity, keepdims=keepdims), behavior) @awkward1._connect._numpy.implements(numpy.prod) def prod(array, axis=None, keepdims=False, maskidentity=False): layout = awkward1.operations.convert.tolayout(array, allowrecord=False, allowother=False) if axis is None: def reduce(xs): if len(xs) == 1: return xs[0] else: return xs[0] * reduce(xs[1:]) return reduce([numpy.prod(x) for x in awkward1._util.completely_flatten(layout)]) else: behavior = awkward1._util.behaviorof(array) return awkward1._util.wrap(layout.prod(axis=axis, mask=maskidentity, keepdims=keepdims), behavior) @awkward1._connect._numpy.implements(numpy.any) def any(array, axis=None, keepdims=False, maskidentity=False): layout = awkward1.operations.convert.tolayout(array, allowrecord=False, allowother=False) if axis is None: def reduce(xs): if len(xs) == 1: return xs[0] else: return xs[0] or reduce(xs[1:]) return reduce([numpy.any(x) for x in awkward1._util.completely_flatten(layout)]) else: behavior = awkward1._util.behaviorof(array) return awkward1._util.wrap(layout.any(axis=axis, mask=maskidentity, keepdims=keepdims), behavior) @awkward1._connect._numpy.implements(numpy.all) def all(array, axis=None, keepdims=False, maskidentity=False): layout = awkward1.operations.convert.tolayout(array, allowrecord=False, allowother=False) if axis is None: def reduce(xs): if len(xs) == 1: return xs[0] else: return xs[0] and reduce(xs[1:]) return reduce([numpy.all(x) for x in awkward1._util.completely_flatten(layout)]) else: behavior = awkward1._util.behaviorof(array) return awkward1._util.wrap(layout.all(axis=axis, mask=maskidentity, keepdims=keepdims), behavior) @awkward1._connect._numpy.implements(numpy.min) def min(array, axis=None, keepdims=False, maskidentity=True): layout = awkward1.operations.convert.tolayout(array, allowrecord=False, allowother=False) if axis is None: def reduce(xs): if len(xs) == 0: return None elif len(xs) == 1: return xs[0] else: x, y = xs[0], reduce(xs[1:]) return x if x < y else y tmp = awkward1._util.completely_flatten(layout) return reduce([numpy.min(x) for x in tmp if len(x) > 0]) else: behavior = awkward1._util.behaviorof(array) return awkward1._util.wrap(layout.min(axis=axis, mask=maskidentity, keepdims=keepdims), behavior) @awkward1._connect._numpy.implements(numpy.max) def max(array, axis=None, keepdims=False, maskidentity=True): layout = awkward1.operations.convert.tolayout(array, allowrecord=False, allowother=False) if axis is None: def reduce(xs): if len(xs) == 0: return None elif len(xs) == 1: return xs[0] else: x, y = xs[0], reduce(xs[1:]) return x if x > y else y tmp = awkward1._util.completely_flatten(layout) return reduce([numpy.max(x) for x in tmp if len(x) > 0]) else: behavior = awkward1._util.behaviorof(array) return awkward1._util.wrap(layout.max(axis=axis, mask=maskidentity, keepdims=keepdims), behavior) ### The following are not strictly reducers, but are defined in terms of reducers and ufuncs. def moment(x, n, weight=None, axis=None, keepdims=False): with numpy.errstate(invalid="ignore"): if weight is None: sumw = count(x, axis=axis, keepdims=keepdims) sumwxn = sum(x**n, axis=axis, keepdims=keepdims) else: sumw = sum(x*0 + weight, axis=axis, keepdims=keepdims) sumwxn = sum((x*weight)**n, axis=axis, keepdims=keepdims) return numpy.true_divide(sumwxn, sumw) @awkward1._connect._numpy.implements(numpy.mean) def mean(x, weight=None, axis=None, keepdims=False): with numpy.errstate(invalid="ignore"): if weight is None: sumw = count(x, axis=axis, keepdims=keepdims) sumwx = sum(x, axis=axis, keepdims=keepdims) else: sumw = sum(x*0 + weight, axis=axis, keepdims=keepdims) sumwx = sum(x*weight, axis=axis, keepdims=keepdims) return numpy.true_divide(sumwx, sumw) @awkward1._connect._numpy.implements(numpy.var) def var(x, weight=None, ddof=0, axis=None, keepdims=False): with numpy.errstate(invalid="ignore"): xmean = mean(x, weight=weight, axis=axis, keepdims=keepdims) if weight is None: sumw = count(x, axis=axis, keepdims=keepdims) sumwxx = sum((x - xmean)**2, axis=axis, keepdims=keepdims) else: sumw = sum(x*0 + weight, axis=axis, keepdims=keepdims) sumwxx = sum((x - xmean)**2 * weight, axis=axis, keepdims=keepdims) if ddof != 0: return numpy.true_divide(sumwxx, sumw) * numpy.true_divide(sumw, sumw - ddof) else: return numpy.true_divide(sumwxx, sumw) @awkward1._connect._numpy.implements(numpy.std) def std(x, weight=None, ddof=0, axis=None, keepdims=False): with numpy.errstate(invalid="ignore"): return numpy.sqrt(var(x, weight=weight, ddof=ddof, axis=axis, keepdims=keepdims)) def covar(x, y, weight=None, axis=None, keepdims=False): with numpy.errstate(invalid="ignore"): xmean = mean(x, weight=weight, axis=axis, keepdims=keepdims) ymean = mean(y, weight=weight, axis=axis, keepdims=keepdims) if weight is None: sumw = count(x, axis=axis, keepdims=keepdims) sumwxy = sum((x - xmean)*(y - ymean), axis=axis, keepdims=keepdims) else: sumw = sum(x*0 + weight, axis=axis, keepdims=keepdims) sumwxy = sum((x - xmean)*(y - ymean)*weight, axis=axis, keepdims=keepdims) return numpy.true_divide(sumwxy, sumw) def corr(x, y, weight=None, axis=None, keepdims=False): with numpy.errstate(invalid="ignore"): xmean = mean(x, weight=weight, axis=axis, keepdims=keepdims) ymean = mean(y, weight=weight, axis=axis, keepdims=keepdims) xdiff = x - xmean ydiff = y - ymean if weight is None: sumwxx = sum(xdiff**2, axis=axis, keepdims=keepdims) sumwyy = sum(ydiff**2, axis=axis, keepdims=keepdims) sumwxy = sum(xdiff*ydiff, axis=axis, keepdims=keepdims) else: sumwxx = sum((xdiff**2)*weight, axis=axis, keepdims=keepdims) sumwyy = sum((ydiff**2)*weight, axis=axis, keepdims=keepdims) sumwxy = sum((xdiff*ydiff)*weight, axis=axis, keepdims=keepdims) return numpy.true_divide(sumwxy, numpy.sqrt(sumwxx * sumwyy)) def linearfit(x, y, weight=None, axis=None, keepdims=False): with numpy.errstate(invalid="ignore"): if weight is None: sumw = count(x, axis=axis, keepdims=keepdims) sumwx = sum(x, axis=axis, keepdims=keepdims) sumwy = sum(y, axis=axis, keepdims=keepdims) sumwxx = sum(x**2, axis=axis, keepdims=keepdims) sumwxy = sum(x*y, axis=axis, keepdims=keepdims) else: sumw = sum(x*0 + weight, axis=axis, keepdims=keepdims) sumwx = sum(x*weight, axis=axis, keepdims=keepdims) sumwy = sum(y*weight, axis=axis, keepdims=keepdims) sumwxx = sum((x**2)*weight, axis=axis, keepdims=keepdims) sumwxy = sum(x*y*weight, axis=axis, keepdims=keepdims) delta = (sumw*sumwxx) - (sumwx*sumwx) intercept = numpy.true_divide(((sumwxx*sumwy) - (sumwx*sumwxy)), delta) slope = numpy.true_divide(((sumw*sumwxy) - (sumwx*sumwy)), delta) intercept_error = numpy.sqrt(numpy.true_divide(sumwxx, delta)) slope_error = numpy.sqrt(numpy.true_divide(sumw, delta)) intercept = awkward1.operations.convert.tolayout(intercept, allowrecord=True, allowother=True) slope = awkward1.operations.convert.tolayout(slope, allowrecord=True, allowother=True) intercept_error = awkward1.operations.convert.tolayout(intercept_error, allowrecord=True, allowother=True) slope_error = awkward1.operations.convert.tolayout(slope_error, allowrecord=True, allowother=True) scalar = not isinstance(intercept, awkward1.layout.Content) and not isinstance(slope, awkward1.layout.Content) and not isinstance(intercept_error, awkward1.layout.Content) and not isinstance(slope_error, awkward1.layout.Content) if not isinstance(intercept, (awkward1.layout.Content, awkward1.layout.Record)): intercept = awkward1.layout.NumpyArray(numpy.array([intercept])) if not isinstance(slope, (awkward1.layout.Content, awkward1.layout.Record)): slope = awkward1.layout.NumpyArray(numpy.array([slope])) if not isinstance(intercept_error, (awkward1.layout.Content, awkward1.layout.Record)): intercept_error = awkward1.layout.NumpyArray(numpy.array([intercept_error])) if not isinstance(slope_error, (awkward1.layout.Content, awkward1.layout.Record)): slope_error = awkward1.layout.NumpyArray(numpy.array([slope_error])) out = awkward1.layout.RecordArray([intercept, slope, intercept_error, slope_error], ["intercept", "slope", "intercept_error", "slope_error"]) out.setparameter("__record__", "LinearFit") if scalar: out = out[0] return awkward1._util.wrap(out, awkward1._util.behaviorof(x, y)) def softmax(x, axis=None, keepdims=False): with numpy.errstate(invalid="ignore"): expx = numpy.exp(x) denom = sum(expx, axis=axis, keepdims=keepdims) return numpy.true_divide(expx, denom) __all__ = [x for x in list(globals()) if not x.startswith("_") and x not in ("collections", "numpy", "awkward1")]
12,140
4,064
import os import re import sys import warnings __version__ = '1.4.3' line_re = re.compile(r""" ^ (?:export\s+)? # optional export ([\w\.]+) # key (?:\s*=\s*|:\s+?) # separator ( # optional value begin '(?:\'|[^'])*' # single quoted value | # or "(?:\"|[^"])*" # double quoted value | # or [^#\n]+ # unquoted value )? # value end (?:\s*\#.*)? # optional comment $ """, re.VERBOSE) variable_re = re.compile(r""" (\\)? # is it escaped with a backslash? (\$) # literal $ ( # collect braces with var for sub \{? # allow brace wrapping ([A-Z0-9_]+) # match the variable \}? # closing brace ) # braces end """, re.IGNORECASE | re.VERBOSE) overrides = ('source_env', 'source_up') def read_dotenv(dotenv=None, override=False): """ Read a .env file into os.environ. If not given a path to a dotenv path, does filthy magic stack backtracking to find manage.py and then find the dotenv. If tests rely on .env files, setting the overwrite flag to True is a safe way to ensure tests run consistently across all environments. :param override: True if values in .env should override system variables. """ if dotenv is None: frame_filename = sys._getframe().f_back.f_code.co_filename dotenv = os.path.join(os.path.dirname(frame_filename), '.env') if os.path.isdir(dotenv) and os.path.isfile(os.path.join(dotenv, '.env')): dotenv = os.path.join(dotenv, '.env') if os.path.exists(dotenv): with open(dotenv) as f: env = parse_dotenv(f.read()) for k, v in env.items(): if k in overrides: continue if override: os.environ[k] = v else: os.environ.setdefault(k, v) for k, v in env.items(): if k not in overrides: continue for fname in v: read_dotenv(fname, override) else: warnings.warn("Not reading {0} - it doesn't exist.".format(dotenv), stacklevel=2) def parse_dotenv(content): env = {} def replace(variable): """Substitute variables in a value either from `os.environ` or from previously declared variable that is still in our `env`""" for parts in variable_re.findall(variable): if parts[0] == '\\': # Variable is escaped, don't replace it replaced = ''.join(parts[1:-1]) else: # Replace it with the value from the environment replacement = os.environ.get(parts[-1]) if not replacement: replacement = env.get(parts[-1], '') replaced = env.get(parts[-1], replacement) variable = variable.replace(''.join(parts[0:-1]), replaced) return variable for line in content.splitlines(): m1 = line_re.search(line) if m1: key, value = m1.groups() if value is None: value = '' # Remove leading/trailing whitespace value = value.strip() # Remove surrounding quotes m2 = re.match(r'^([\'"])(.*)\1$', value) if m2: quotemark, value = m2.groups() else: quotemark = None # Unescape all chars except $ so variables can be escaped properly if quotemark == '"': value = re.sub(r'\\([^$])', r'\1', value) if quotemark != "'": value = replace(value) env[key] = value elif not re.search(r'^\s*(?:#.*)?$', line): # not comment or blank fname = None for prefix in overrides: if prefix not in line: continue fname = line.split(prefix)[-1].strip() fname = replace(fname) if fname.startswith('~'): fname = os.path.expanduser(fname) exists = env.get(prefix) if not exists: exists = [fname, ] else: exists.append(fname) env[prefix] = exists break if not fname: warnings.warn( "Line {0} doesn't match format".format(repr(line)), SyntaxWarning ) return env
4,758
1,328
#!/usr/bin/env python # -*- coding: utf-8 -*- """ This experiment was created using PsychoPy3 Experiment Builder (v2020.2.4post1), on October 27, 2020, at 14:06 If you publish work using this script the most relevant publication is: Peirce J, Gray JR, Simpson S, MacAskill M, Höchenberger R, Sogo H, Kastman E, Lindeløv JK. (2019) PsychoPy2: Experiments in behavior made easy Behav Res 51: 195. https://doi.org/10.3758/s13428-018-01193-y """ from __future__ import absolute_import, division from psychopy import locale_setup from psychopy import prefs from psychopy import sound, gui, visual, core, data, event, logging, clock from psychopy.constants import (NOT_STARTED, STARTED, PLAYING, PAUSED, STOPPED, FINISHED, PRESSED, RELEASED, FOREVER) import numpy as np # whole numpy lib is available, prepend 'np.' from numpy import (sin, cos, tan, log, log10, pi, average, sqrt, std, deg2rad, rad2deg, linspace, asarray) from numpy.random import random, randint, normal, shuffle import os # handy system and path functions import sys # to get file system encoding from psychopy.hardware import keyboard # Ensure that relative paths start from the same directory as this script _thisDir = os.path.dirname(os.path.abspath(__file__)) os.chdir(_thisDir) # Store info about the experiment session psychopyVersion = '2020.2.4post1' expName = 'ContrastDetection' # from the Builder filename that created this script expInfo = {'participant': 's_001', 'ori': '10'} dlg = gui.DlgFromDict(dictionary=expInfo, sort_keys=False, title=expName) if dlg.OK == False: core.quit() # user pressed cancel expInfo['date'] = data.getDateStr() # add a simple timestamp expInfo['expName'] = expName expInfo['psychopyVersion'] = psychopyVersion # Data file name stem = absolute path + name; later add .psyexp, .csv, .log, etc filename = _thisDir + os.sep + 'data' + os.sep + '%s_%s' % (expInfo['participant'], expInfo['date']) # An ExperimentHandler isn't essential but helps with data saving thisExp = data.ExperimentHandler(name=expName, version='', extraInfo=expInfo, runtimeInfo=None, originPath='C:\\Users\\Ryan Hunsaker\\psychopy\\PsychoPy3 Experiments\\ContrastDetection.py', savePickle=True, saveWideText=True, dataFileName=filename) # save a log file for detail verbose info logFile = logging.LogFile(filename+'.log', level=logging.EXP) logging.console.setLevel(logging.WARNING) # this outputs to the screen, not a file endExpNow = False # flag for 'escape' or other condition => quit the exp frameTolerance = 0.001 # how close to onset before 'same' frame # Start Code - component code to be run before the window creation # Setup the Window win = visual.Window( size=[2496, 1664], fullscr=True, screen=0, winType='pyglet', allowGUI=False, allowStencil=False, monitor='testMonitor', color=[0,0,0], colorSpace='rgb', blendMode='avg', useFBO=True) # store frame rate of monitor if we can measure it expInfo['frameRate'] = win.getActualFrameRate() if expInfo['frameRate'] != None: frameDur = 1.0 / round(expInfo['frameRate']) else: frameDur = 1.0 / 60.0 # could not measure, so guess # create a default keyboard (e.g. to check for escape) defaultKeyboard = keyboard.Keyboard() # Initialize components for Routine "instr" instrClock = core.Clock() instructions = visual.TextStim(win=win, name='instructions', text="Press 'up' if you see the stimulus, 'down' if you didn't.\n\nAny key to start", font='Atkinson Hyperlegible', pos=[0, 0], height=0.1, wrapWidth=None, ori=0, color=[1, 1, 1], colorSpace='rgb', opacity=1, languageStyle='LTR', depth=0.0); ready = keyboard.Keyboard() # Initialize components for Routine "trial" trialClock = core.Clock() fixation = visual.GratingStim( win=win, name='fixation',units='pix', tex=None, mask=None, ori=0, pos=[0, 0], size=[25, 25], sf=1, phase=0.0, color=[1, 1, 1], colorSpace='rgb', opacity=1,blendmode='avg', texRes=512, interpolate=True, depth=0.0) gabor = visual.GratingStim( win=win, name='gabor',units='pix', tex='sin', mask='gauss', ori=expInfo['ori'], pos=[0, 0], size=[1024,1024], sf=0.025, phase=1.0, color='white', colorSpace='rgb', opacity=1,blendmode='avg', texRes=512, interpolate=True, depth=-1.0) resp = keyboard.Keyboard() # Create some handy timers globalClock = core.Clock() # to track the time since experiment started routineTimer = core.CountdownTimer() # to track time remaining of each (non-slip) routine # ------Prepare to start Routine "instr"------- continueRoutine = True # update component parameters for each repeat ready.keys = [] ready.rt = [] _ready_allKeys = [] # keep track of which components have finished instrComponents = [instructions, ready] for thisComponent in instrComponents: thisComponent.tStart = None thisComponent.tStop = None thisComponent.tStartRefresh = None thisComponent.tStopRefresh = None if hasattr(thisComponent, 'status'): thisComponent.status = NOT_STARTED # reset timers t = 0 _timeToFirstFrame = win.getFutureFlipTime(clock="now") instrClock.reset(-_timeToFirstFrame) # t0 is time of first possible flip frameN = -1 # -------Run Routine "instr"------- while continueRoutine: # get current time t = instrClock.getTime() tThisFlip = win.getFutureFlipTime(clock=instrClock) tThisFlipGlobal = win.getFutureFlipTime(clock=None) frameN = frameN + 1 # number of completed frames (so 0 is the first frame) # update/draw components on each frame # *instructions* updates if instructions.status == NOT_STARTED and tThisFlip >= 0-frameTolerance: # keep track of start time/frame for later instructions.frameNStart = frameN # exact frame index instructions.tStart = t # local t and not account for scr refresh instructions.tStartRefresh = tThisFlipGlobal # on global time win.timeOnFlip(instructions, 'tStartRefresh') # time at next scr refresh instructions.setAutoDraw(True) # *ready* updates waitOnFlip = False if ready.status == NOT_STARTED and tThisFlip >= 0-frameTolerance: # keep track of start time/frame for later ready.frameNStart = frameN # exact frame index ready.tStart = t # local t and not account for scr refresh ready.tStartRefresh = tThisFlipGlobal # on global time win.timeOnFlip(ready, 'tStartRefresh') # time at next scr refresh ready.status = STARTED # keyboard checking is just starting waitOnFlip = True win.callOnFlip(ready.clock.reset) # t=0 on next screen flip win.callOnFlip(ready.clearEvents, eventType='keyboard') # clear events on next screen flip if ready.status == STARTED and not waitOnFlip: theseKeys = ready.getKeys(keyList=None, waitRelease=False) _ready_allKeys.extend(theseKeys) if len(_ready_allKeys): ready.keys = _ready_allKeys[-1].name # just the last key pressed ready.rt = _ready_allKeys[-1].rt # a response ends the routine continueRoutine = False # check for quit (typically the Esc key) if endExpNow or defaultKeyboard.getKeys(keyList=["escape"]): core.quit() # check if all components have finished if not continueRoutine: # a component has requested a forced-end of Routine break continueRoutine = False # will revert to True if at least one component still running for thisComponent in instrComponents: if hasattr(thisComponent, "status") and thisComponent.status != FINISHED: continueRoutine = True break # at least one component has not yet finished # refresh the screen if continueRoutine: # don't flip if this routine is over or we'll get a blank screen win.flip() # -------Ending Routine "instr"------- for thisComponent in instrComponents: if hasattr(thisComponent, "setAutoDraw"): thisComponent.setAutoDraw(False) thisExp.addData('instructions.started', instructions.tStartRefresh) thisExp.addData('instructions.stopped', instructions.tStopRefresh) # check responses if ready.keys in ['', [], None]: # No response was made ready.keys = None thisExp.addData('ready.keys',ready.keys) if ready.keys != None: # we had a response thisExp.addData('ready.rt', ready.rt) thisExp.addData('ready.started', ready.tStartRefresh) thisExp.addData('ready.stopped', ready.tStopRefresh) thisExp.nextEntry() # the Routine "instr" was not non-slip safe, so reset the non-slip timer routineTimer.reset() # --------Prepare to start Staircase "trials" -------- # set up handler to look after next chosen value etc trials = data.StairHandler(startVal=0.9, extraInfo=expInfo, stepSizes=asarray([1,1,0.9,0.9,0.8,0.8,0.6, 0.6, 0.4,0.4,0.2]), stepType='log', nReversals=1, nTrials=30.0, nUp=1, nDown=3, minVal=0.0, maxVal=1.0, originPath=-1, name='trials') thisExp.addLoop(trials) # add the loop to the experiment level = thisTrial = 0.9 # initialise some vals for thisTrial in trials: currentLoop = trials level = thisTrial # ------Prepare to start Routine "trial"------- continueRoutine = True routineTimer.add(2.500000) # update component parameters for each repeat gabor.setColor([level, level, level], colorSpace='rgb') resp.keys = [] resp.rt = [] _resp_allKeys = [] # keep track of which components have finished trialComponents = [fixation, gabor, resp] for thisComponent in trialComponents: thisComponent.tStart = None thisComponent.tStop = None thisComponent.tStartRefresh = None thisComponent.tStopRefresh = None if hasattr(thisComponent, 'status'): thisComponent.status = NOT_STARTED # reset timers t = 0 _timeToFirstFrame = win.getFutureFlipTime(clock="now") trialClock.reset(-_timeToFirstFrame) # t0 is time of first possible flip frameN = -1 # -------Run Routine "trial"------- while continueRoutine and routineTimer.getTime() > 0: # get current time t = trialClock.getTime() tThisFlip = win.getFutureFlipTime(clock=trialClock) tThisFlipGlobal = win.getFutureFlipTime(clock=None) frameN = frameN + 1 # number of completed frames (so 0 is the first frame) # update/draw components on each frame # *fixation* updates if fixation.status == NOT_STARTED and tThisFlip >= 0.0-frameTolerance: # keep track of start time/frame for later fixation.frameNStart = frameN # exact frame index fixation.tStart = t # local t and not account for scr refresh fixation.tStartRefresh = tThisFlipGlobal # on global time win.timeOnFlip(fixation, 'tStartRefresh') # time at next scr refresh fixation.setAutoDraw(True) if fixation.status == STARTED: # is it time to stop? (based on global clock, using actual start) if tThisFlipGlobal > fixation.tStartRefresh + 0.5-frameTolerance: # keep track of stop time/frame for later fixation.tStop = t # not accounting for scr refresh fixation.frameNStop = frameN # exact frame index win.timeOnFlip(fixation, 'tStopRefresh') # time at next scr refresh fixation.setAutoDraw(False) # *gabor* updates if gabor.status == NOT_STARTED and tThisFlip >= 0.5-frameTolerance: # keep track of start time/frame for later gabor.frameNStart = frameN # exact frame index gabor.tStart = t # local t and not account for scr refresh gabor.tStartRefresh = tThisFlipGlobal # on global time win.timeOnFlip(gabor, 'tStartRefresh') # time at next scr refresh gabor.setAutoDraw(True) if gabor.status == STARTED: # is it time to stop? (based on global clock, using actual start) if tThisFlipGlobal > gabor.tStartRefresh + 0.5-frameTolerance: # keep track of stop time/frame for later gabor.tStop = t # not accounting for scr refresh gabor.frameNStop = frameN # exact frame index win.timeOnFlip(gabor, 'tStopRefresh') # time at next scr refresh gabor.setAutoDraw(False) if gabor.status == STARTED: # only update if drawing gabor.setPhase(trialClock.getTime()*2, log=False) # *resp* updates waitOnFlip = False if resp.status == NOT_STARTED and tThisFlip >= 0.5-frameTolerance: # keep track of start time/frame for later resp.frameNStart = frameN # exact frame index resp.tStart = t # local t and not account for scr refresh resp.tStartRefresh = tThisFlipGlobal # on global time win.timeOnFlip(resp, 'tStartRefresh') # time at next scr refresh resp.status = STARTED # keyboard checking is just starting waitOnFlip = True win.callOnFlip(resp.clock.reset) # t=0 on next screen flip win.callOnFlip(resp.clearEvents, eventType='keyboard') # clear events on next screen flip if resp.status == STARTED: # is it time to stop? (based on global clock, using actual start) if tThisFlipGlobal > resp.tStartRefresh + 2.0-frameTolerance: # keep track of stop time/frame for later resp.tStop = t # not accounting for scr refresh resp.frameNStop = frameN # exact frame index win.timeOnFlip(resp, 'tStopRefresh') # time at next scr refresh resp.status = FINISHED if resp.status == STARTED and not waitOnFlip: theseKeys = resp.getKeys(keyList=['up', 'down'], waitRelease=False) _resp_allKeys.extend(theseKeys) if len(_resp_allKeys): resp.keys = _resp_allKeys[-1].name # just the last key pressed resp.rt = _resp_allKeys[-1].rt # was this correct? if (resp.keys == str('up')) or (resp.keys == 'up'): resp.corr = 1 else: resp.corr = 0 # check for quit (typically the Esc key) if endExpNow or defaultKeyboard.getKeys(keyList=["escape"]): core.quit() # check if all components have finished if not continueRoutine: # a component has requested a forced-end of Routine break continueRoutine = False # will revert to True if at least one component still running for thisComponent in trialComponents: if hasattr(thisComponent, "status") and thisComponent.status != FINISHED: continueRoutine = True break # at least one component has not yet finished # refresh the screen if continueRoutine: # don't flip if this routine is over or we'll get a blank screen win.flip() # -------Ending Routine "trial"------- for thisComponent in trialComponents: if hasattr(thisComponent, "setAutoDraw"): thisComponent.setAutoDraw(False) trials.addOtherData('fixation.started', fixation.tStartRefresh) trials.addOtherData('fixation.stopped', fixation.tStopRefresh) trials.addOtherData('gabor.started', gabor.tStartRefresh) trials.addOtherData('gabor.stopped', gabor.tStopRefresh) # check responses if resp.keys in ['', [], None]: # No response was made resp.keys = None # was no response the correct answer?! if str('up').lower() == 'none': resp.corr = 1; # correct non-response else: resp.corr = 0; # failed to respond (incorrectly) # store data for trials (StairHandler) trials.addResponse(resp.corr) trials.addOtherData('resp.rt', resp.rt) trials.addOtherData('resp.started', resp.tStartRefresh) trials.addOtherData('resp.stopped', resp.tStopRefresh) thisExp.nextEntry() # staircase completed trials.saveAsExcel(filename + '.xlsx', sheetName='trials') trials.saveAsText(filename + 'trials.csv', delim=',') # Flip one final time so any remaining win.callOnFlip() # and win.timeOnFlip() tasks get executed before quitting win.flip() # these shouldn't be strictly necessary (should auto-save) thisExp.saveAsWideText(filename+'.csv', delim='comma') thisExp.saveAsPickle(filename) logging.flush() # make sure everything is closed down thisExp.abort() # or data files will save again on exit win.close() core.quit()
16,755
5,209
grammar = r""" start: (item ";")+ item: import | coords | playlist | step import : "import" string coords : "coords" coords_body coords_body : "{" coord_def ("," coord_def)* "}" coord_def: string ":" "{" coord_body ("," coord_body)* "}" coord_body: string ":" "[" int "," int "]" playlist : "playlist" string playlist_body playlist_body : "{" (step ";")* "}" step : screen | repeat | play | active | focus | delay | sleep | shell | coord_off | coord | mouse | drag | click | btnclick | btndown | btnup | scroll | hscroll | keypress | keydown | keyup | hotkeys | write | copy | paste | save_clipboard | load_clipboard | copy_clipboard | paste_clipboard screen: "screen" string repeat: "play" string+ int | "play" string+ number play: "play" string+ active: "active" string focus: "focus" string delay: "delay" number sleep: "sleep" number shell: ("shell"|"sh") string+ coord_off: ("coord"|"mc") string number number number coord: ("coord"|"mc") string number mouse: ("mouse"|"mv"|"mm") number number number drag: ("drag"|"md") string number number number click: "click" btnclick: ("btnclick"|"bc") string btndown: ("btndown"|"bd") string btnup: ("btnup"|"bu") string scroll: "scroll" number hscroll: "hscroll" number keypress: ("keypress"|"kp") string keydown: ("keydown"|"kd") string keyup: ("keyup"|"ku") string hotkeys: ("hotkeys"|"hk") string+ write: ("write"|"w"|"type"|"t") string number? copy: "copy" paste: "paste" save_clipboard: ("save_clipboard"|"scb") string load_clipboard: ("load_clipboard"|"lcb") string copy_clipboard: ("copy_clipboard"|"ccb") string paste_clipboard: ("paste_clipboard"|"pcb") string int: INT number: SIGNED_NUMBER string: ESCAPED_STRING COMMENT: /#[^\n]*/ IDENT: (LETTER|"_") (LETTER|INT|"-"|"_")* NAME: LETTER (LETTER|INT|"-"|"_")* WORD: LETTER+ %import common.LETTER %import common.ESCAPED_STRING %import common.INT %import common.SIGNED_NUMBER %import common.WS %ignore COMMENT %ignore WS """
1,945
760
import configparser import os import shutil import traceback class Conf: def __init__(self, conf): self.options = conf config = configparser.ConfigParser(interpolation=None) if not config.read(conf, encoding='utf-8'): print("I had to remake the config file from default. Please check the config and restart once the proper settings have been changed.") print("The config should exist here: " +self.options) try: shutil.copy(os.path.dirname(self.options)+"/example_config.ini", self.options) except: traceback.print_exc() print("Well... Somehow the example I was copying from is also gone. You're in a bad spot.") os._exit(1) config.read(conf, encoding='utf-8') self.THE_TOKEN = config.get("Login", "Token", fallback=Fallbacks.token) self.owner_id = int(config.get("Permissions", "OwnerID", fallback=Fallbacks.ownerID)) self.download_path = config.get("Music", "Path", fallback=Fallbacks.download_path) self.stopwords_path = config.get("BarTalk", "Stop_words_Path", fallback=Fallbacks.stopword_path) self.stopwords = set() if self.stopwords_path != "": try: f = open(self.stopwords_path, "r", encoding="utf-8") for line in f: self.stopwords.add(line.strip()) f.close() except: pass class Fallbacks: #these will only get used if the user leaves the config.ini existant but really messes something up... everything breaks if they get used. token = "0" ownerID = 0 download_path = "" stopword_path = ""
1,765
493
#!/usr/bin/env python # __BEGIN_LICENSE__ #Copyright (c) 2015, United States Government, as represented by the #Administrator of the National Aeronautics and Space Administration. #All rights reserved. # __END_LICENSE__ """ Takes as input a CSV file in the format: 37 46 29.2080,-122 25 08.1336,San Francisco 37 27 13.8132,-122 10 55.7184,Menlo Park And outputs CSV in the format: 10S EG 51172 80985,San Francisco 10S EG 72335 45533,Menlo Park Optionally outputs a KML file of placemarks as well, where the placemark descriptions include USNG coordinates. """ import csv from geocamUtil.usng import usng from geocamUtil import KmlUtil def parseDegMinSec(val): valDeg, valMin, valSec = val.split(' ') sgn = -1 if float(valDeg) < 0 else 1 return sgn * (abs(float(valDeg)) + float(valMin) / 60.0 + float(valSec) / 3600.0) def convertUsngCsv(opts, inPath): inFile = file(inPath, 'r') inLines = csv.reader(inFile) coords = [] for latDms, lonDms, name in inLines: lat = parseDegMinSec(latDms) lon = parseDegMinSec(lonDms) easting, northing, zoneNumber, zoneLetter = usng.LLtoUTM(lat, lon) easting += opts.eastOffset northing += opts.northOffset usngCoords = usng.UTMtoUSNG(easting, northing, zoneNumber, zoneLetter, precision=5) print usngCoords, ' ', name clat, clon = usng.UTMtoLL(easting, northing, zoneNumber, zoneLetter) coords.append((clat, clon, name, usngCoords)) if opts.kml: kbits = [] kbits.append('<Folder>\n') for lat, lon, name, usngCoords in coords: kbits.append(""" <Placemark> <name>%(name)s</name> <description>%(usngCoords)s</description> <Point> <coordinates>%(lon)s,%(lat)s</coordinates> </Point> </Placemark> """ % dict(lat=lat, lon=lon, name=name, usngCoords=usngCoords)) kbits.append('</Folder>') text = ''.join(kbits) file(opts.kml, 'w').write(KmlUtil.wrapKml(text)) def main(): import optparse parser = optparse.OptionParser('usage: %prog <in.csv>') parser.add_option('--eastOffset', type='float', default=0, help='Offset to add to easting values for datum correction (meters)') parser.add_option('--northOffset', type='float', default=0, help='Offset to add to northing values for datum correction (meters)') parser.add_option('--kml', help='Filename for KML output') opts, args = parser.parse_args() if len(args) != 1: parser.error('expected exactly 1 arg') inPath = args[0] convertUsngCsv(opts, inPath) if __name__ == '__main__': main()
2,805
1,013
from torchvision.datasets import CIFAR10 from torchvision.datasets import CIFAR100 import os root = '/database/cifar10/' from PIL import Image dataset_train = CIFAR10(root) for k, (img, label) in enumerate(dataset_train): print('processsing' + str(k)) if not os.path.exists(root + 'CIFAR10_image/' + str(label)+ '/'): os.mkdir(root + 'CIFAR10_image/' + str(label)+ '/') img.save(root + 'CIFAR10_image/' + str(label) + '/' + str(k) + '.png')
478
195
from toapi import Item, XPath class User(Item): url = XPath('//a[@class="hnuser"][1]/@href') name = XPath('//a[@class="hnuser"][1]/text()') class Meta: source = XPath('//tr[@class="athing"]') route = '/news\?p=\d+'
246
94
''' Failure tests ''' import os from typing import Any import pytest from helpers.github import API api = API() pass_token = Any fail_token = os.getenv('FAIL_TOKEN') fail_org = os.getenv('FAIL_ORG') fail_repo = os.getenv('FAIL_REPO') def test_fail_auth(): ''' Fail 'auth' to Github ''' with pytest.raises(SystemExit): api.authenticate(fail_token) def test_fail_org(token): ''' Fail 'get organization' ''' pass_token = token with pytest.raises(SystemExit): api.authenticate(pass_token) api.get_organization(fail_org) def test_fail_repo(): ''' Fail 'get repo' ''' with pytest.raises(SystemExit): api.get_repo("user", fail_repo)
722
259
# -*- coding: utf-8 -*- """ .. module:: skimpy :platform: Unix, Windows :synopsis: Simple Kinetic Models in Python .. moduleauthor:: SKiMPy team [---------] Copyright 2017 Laboratory of Computational Systems Biotechnology (LCSB), Ecole Polytechnique Federale de Lausanne (EPFL), Switzerland Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import numpy as np # Test models from skimpy.core import * from skimpy.mechanisms import * name = 'pfk' SpecificConvenience = make_convenience_with_inhibition([-2, -1, 3], [1]) metabolites = SpecificConvenience.Reactants(substrate1 = 'A', substrate2 = 'B', product1 = 'C' ) inhibitors = SpecificConvenience.Inhibitors(inhibitor1 = 'I') # thermo_data = {'S': 1e-2, # 'P': 1e-2, # 'sig_S': 0.1, # 'sig_P': 0.1, # 'gamma': 0.1, # 'flux': 1.0, # 'E_tot': 1e-5} ## QSSA Method parameters = SpecificConvenience.Parameters( vmax_forward = 1.0, k_equilibrium=2.0, km_substrate1 = 10.0, km_substrate2 = 10.0, km_product1 = 10.0, ki_inhibitor1 = 1.0) pfk = Reaction(name=name, mechanism=SpecificConvenience, reactants=metabolites, inhibitors=inhibitors, ) name = 'inhib' metabolites = ReversibleMichaelisMenten.Reactants(substrate = 'C', product = 'I') ## QSSA Method parameters_inh = ReversibleMichaelisMenten.Parameters( vmax_forward = 1.0, k_equilibrium=2.0, km_substrate = 10.0, km_product = 10.0, total_enzyme_concentration = 1.0, ) inh = Reaction(name=name, mechanism=ReversibleMichaelisMenten, reactants=metabolites, ) this_model = KineticModel() this_model.add_reaction(pfk) this_model.add_reaction(inh) this_model.parametrize_by_reaction({inh.name:parameters_inh, pfk.name: parameters}) this_model.compile_ode(sim_type = QSSA) this_model.initial_conditions['A'] = 10.0 this_model.initial_conditions['B'] = 10.0 this_model.initial_conditions['C'] = 10.0 this_model.initial_conditions['I'] = 0.0 this_sol_qssa = this_model.solve_ode(np.linspace(0.0, 50.0, 500),solver_type = 'cvode') this_sol_qssa.plot('output/base_out_qssa.html')
3,042
1,035
from enum import Enum, unique, auto @unique class Fruit(Enum): APPLE = 1 BANANA = 2 ORANGE = 3 # if you don't care what value to put you can use auto => basically it will get last value used + 1 (if auto is for first item it will be 0 + 1 = 1) PEAR = auto() def main(): print(Fruit.APPLE) # Fruit.APPLE print(type(Fruit.APPLE)) # <enum 'Fruit'> print(repr(Fruit.APPLE)) # <Fruit.APPLE: 1> print(Fruit.APPLE.name) # APPLE print(Fruit.APPLE.value) # 1 # you cannot have duplicate keys in Fruits (you can have duplicate values if you are not having unique decorator!) print(Fruit.PEAR.name) # PEAR print(Fruit.PEAR.value) # 4 my_fruits = {} my_fruits[Fruit.BANANA] = 'BANANA' print(my_fruits[Fruit.BANANA]) if __name__ == '__main__': main()
825
322
# Import packages import os import cv2 import numpy as np import tensorflow as tf import sys from imutils.object_detection import non_max_suppression # This is needed since the notebook is stored in the object_detection folder. sys.path.append("..") # Import utilites from utils import label_map_util class detector: def __init__(self): folder_detector = 'inference_graph' # Path to frozen detection graph .pb file, which contains the model that is used # for object detection. PATH_TO_CKPT = 'frozen_model/frozen_inference_graph.pb' # Path to label map file PATH_TO_LABELS = 'classes.pbtxt' # Number of classes the object detector can identify NUM_CLASSES = 37 # Load the label map. # Label maps map indices to category names, so that when our convolution # network predicts `5`, we know that this corresponds to `king`. # Here we use internal utility functions, but anything that returns a # dictionary mapping integers to appropriate string labels would be fine label_map = label_map_util.load_labelmap(PATH_TO_LABELS) categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True) self.category_index = label_map_util.create_category_index(categories) # Load the Tensorflow model into memory. self.detection_graph = tf.Graph() with self.detection_graph.as_default(): od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') self.sess = tf.Session(graph=self.detection_graph) # Define input and output tensors (i.e. data) for the object detection classifier # Input tensor is the image self.image_tensor = self.detection_graph.get_tensor_by_name('image_tensor:0') # Output tensors are the detection boxes, scores, and classes # Each box represents a part of the image where a particular object was detected self.detection_boxes = self.detection_graph.get_tensor_by_name('detection_boxes:0') # Each score represents level of confidence for each of the objects. # The score is shown on the result image, together with the class label. self.detection_scores = self.detection_graph.get_tensor_by_name('detection_scores:0') self.detection_classes = self.detection_graph.get_tensor_by_name('detection_classes:0') # Number of objects detected self.num_detections = self.detection_graph.get_tensor_by_name('num_detections:0') def detect_plate(self, frame): image_expanded = np.expand_dims(frame, axis=0) # Perform the actual detection by running the model with the image as input (boxes, scores, classes, num) = self.sess.run( [self.detection_boxes, self.detection_scores, self.detection_classes, self.num_detections], feed_dict={self.image_tensor: image_expanded}) return (boxes, scores, classes, num, self.category_index)
3,313
972
# Handle client-side action for an IBM Watson Assistant chatbot # # The code requires my Watson Conversation Tool. For details see # https://github.com/data-henrik/watson-conversation-tool # # # Setup: Configure your credentials # - for Watson Assistant see instructions for the tool # - for Discovery change username / password below # # # Written by Henrik Loeser import json from watson_developer_cloud import DiscoveryV1 def handleClientActions(context, actions, watsonResponse): print (">>> processing client actions...\n") # Initialize the Discovery API discovery = DiscoveryV1( version='2018-08-01', ## url is optional, and defaults to the URL below. Use the correct URL for your region. # url='https://gateway.watsonplatform.net/discovery/api', username='your-username', password='your-password') # We are going to access a system collection with English news # You could change the language to news-de or news-es... news_environment_id = 'system' collection_id = 'news-en' # We could query the different collections here # collections = discovery.list_collections(news_environment_id).get_result() # news_collections = [x for x in collections['collections']] # print(json.dumps(collections, indent=2)) # Now query Discovery, sort the result and only return certain fields query_results = discovery.query( news_environment_id, collection_id, natural_language_query=context['topic'], deduplicate="true", sort="-score,-publication_date", return_fields='title,url,publication_date').get_result() # Write the result to our defined variable and return context.update({'myNews':query_results}) return context
1,770
486
import numpy as np import matplotlib.pyplot as plt from cycler import cycler from source.solving_strategies.strategies.residual_based_newton_raphson_solver import ResidualBasedNewtonRaphsonSolver from source.solving_strategies.strategies.residual_based_picard_solver import ResidualBasedPicardSolver from source.model.structure_model import StraightBeam np.set_printoptions(suppress=False, precision=2, linewidth=140) params = { "name": "CaarcBeamPrototypeOptimizable", "domain_size": "3D", "system_parameters": { "element_params": { "type": "CRBeam", "is_nonlinear": True }, "material": { "density": 7850.0, "youngs_modulus": 2069000000, "poisson_ratio": 0.29, "damping_ratio": 0.1 }, "geometry": { "length_x": 1.2, "number_of_elements": 1, "defined_on_intervals": [{ "interval_bounds": [0.0, "End"], "length_y": [1.0], "length_z": [1.0], "area": [0.0001], "shear_area_y": [0.0], "shear_area_z": [0.0], "moment_of_inertia_y": [0.0001], "moment_of_inertia_z": [0.0001], "torsional_moment_of_inertia": [0.0001], "outrigger_mass": [0.0], "outrigger_stiffness": [0.0]}] } }, "boundary_conditions": "fixed-free" } dt = 0.1 tend = 10. steps = int(tend / dt) array_time = np.linspace(0.0, tend, steps + 1) array_time_kratos = np.linspace(0.1, 10, 101) def test_residual_based_solvers(): f_ext = np.array([np.array([0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 100.0 * np.sin(t), 0.0, 0.0, 0.0]) for t in np.sin(array_time)]) u0 = np.zeros(6) v0 = np.zeros(6) a0 = np.zeros(6) scheme = "BackwardEuler1" beam = StraightBeam(params) f_ext = beam.apply_bc_by_reduction(f_ext, 'column').T newton_solver = ResidualBasedNewtonRaphsonSolver(array_time, scheme, dt, [beam.comp_m, beam.comp_b, beam.comp_k], [u0, v0, a0], f_ext, beam) picard_solver = ResidualBasedPicardSolver(array_time, scheme, dt, [beam.comp_m, beam.comp_b, beam.comp_k], [u0, v0, a0], f_ext, beam) newton_solver.solve() picard_solver.solve() reference_file = "kratos_reference_results/dynamic_displacement_z.txt" disp_z_soln = np.loadtxt(reference_file)[:, 1] plt.plot(array_time, newton_solver.displacement[2, :], c='b', label='Newton Raphson') plt.plot(array_time, picard_solver.displacement[2, :], c='g', label='Picard') plt.plot(array_time_kratos, disp_z_soln, c='k', label='Kratos reference') plt.grid() plt.legend() plt.show()
2,977
1,144
from typing import Dict from objects.base import BaseModel class RegexDeleter(BaseModel): name: str regex: str chat_id: int for_all: bool def save(self) -> Dict[str, int]: return { 'name': self.name, 'regex': self.regex, 'chat_id': self.chat_id, 'for_all': self.for_all }
378
131
from copy import deepcopy from django.contrib import admin from cartridge.shop.admin import OrderAdmin from cartridge.shop.models import Order order_fieldsets = deepcopy(admin.site._registry[Order].fieldsets) order_fieldsets[2][1]["fields"] = list(order_fieldsets[2][1]["fields"]) order_fieldsets[2][1]["fields"].insert(0, 'payment_done') class ExternalPaymentOrderAdmin(OrderAdmin): fieldsets = order_fieldsets list_display = ("id", "billing_name", "total", "time", "payment_done", "status", "transaction_id", "invoice") admin.site.unregister(Order) admin.site.register(Order, ExternalPaymentOrderAdmin)
639
198
#!/usr/bin/env python # -*- coding:utf-8 -*- import copy class AttrDict(dict): def __init__(self, seq=None, **kwargs): dict.__init__(self, seq or {}, **kwargs) def __getattr__(self, name): return self.get(name, self.get("__default", None)) def __setattr__(self, name, value): self[name] = value def __getitem__(self, name): return self.get(name, self.get("__default", None)) def __deepcopy__(self, memo): y = {} memo[id(self)] = y for key, value in self.iteritems(): y[copy.deepcopy(key, memo)] = copy.deepcopy(value, memo) return y def test_attr_dict_exist_attr(): a = AttrDict() a.id = 1 assert a.id == 1, 'a.id != 1' assert a.get('id') == 1, 'a.get("id") != 1' assert a['id'] == 1, 'a["id"] != 1' b = AttrDict(id=2) assert b.id == 2, 'b.id != 2' c = AttrDict({"id": 3}) assert c.id == 3, 'c.id != 3' d = AttrDict(None) d.id = 4 assert d.id == 4, 'd.id != 4' def test_attr_dict_not_exist_attr(): a = AttrDict() assert a.notexist is None, 'a.notexist is None' assert a['notexist'] is None, 'a["notexist"] not __default ' def test_attr_dict_not_exist_attr_default(): a = AttrDict() a.__default = '' assert a.notexist == '', 'a.notexist not __default ' assert a['notexist'] == '', 'a["notexist"] not __default ' b = AttrDict(__default=0) assert b.notexist == 0, 'b.notexist not __default '
1,483
574
""" Author: Amanda Ortega de Castro Ayres Created in: September 19, 2019 Python version: 3.6 """ from Least_SRMTL import Least_SRMTL import libmr from matplotlib import pyplot, cm from matplotlib.patches import Circle from mpl_toolkits.mplot3d import Axes3D, art3d import numpy as np import numpy.matlib import sklearn.metrics class EVeP(object): """ evolving Extreme Value Machine Ruled-based predictor with EVM at the definition of the antecedent of the rules. 1. Create a new instance and provide the model parameters; 2. Call the predict(x) method to make predictions based on the given input; 3. Call the train(x, y) method to evolve the model based on the new input-output pair. """ # Model initialization def __init__(self, sigma=0.5, delta=50, N=np.Inf, rho=None, columns_ts=None): # Setting EVM algorithm parameters self.sigma = sigma self.tau = 99999 self.delta = delta self.N = N self.rho = rho self.columns_ts = columns_ts if self.rho is not None: self.init_theta = 2 self.srmtl = Least_SRMTL(rho) self.R = None self.mr_x = list() self.mr_y = list() self.x0 = list() self.y0 = list() self.X = list() self.y = list() self.step = list() self.last_update = list() self.theta = list() self.c = 0 # Initialization of a new instance of EV. def add_EV(self, x0, y0, step): self.mr_x.append(libmr.MR()) self.mr_y.append(libmr.MR()) self.x0.append(x0) self.y0.append(y0) self.X.append(x0) self.y.append(y0) self.step.append(step) self.last_update.append(np.max(step)) self.theta.append(np.zeros_like(x0)) self.c = self.c + 1 if self.rho is None: # coefficients of the consequent part self.theta[-1] = np.insert(self.theta[-1], 0, y0, axis=1).T else: self.init_theta = 2 # coefficients of the consequent part self.theta[-1] = np.insert(self.theta[-1], 0, y0, axis=1) # Add the sample(s) (X, y) as covered by the extreme vector. Remove repeated points. def add_sample_to_EV(self, index, X, y, step): self.X[index] = np.concatenate((self.X[index], X)) self.y[index] = np.concatenate((self.y[index], y)) self.step[index] = np.concatenate((self.step[index], step)) if self.X[index].shape[0] > self.N: indexes = np.argsort(-self.step[index].reshape(-1)) self.X[index] = self.X[index][indexes[: self.N], :] self.y[index] = self.y[index][indexes[: self.N]] self.step[index] = self.step[index][indexes[: self.N]] self.x0[index] = np.average(self.X[index], axis=0).reshape(1, -1) self.y0[index] = np.average(self.y[index], axis=0).reshape(1, -1) self.last_update[index] = np.max(self.step[index]) if self.rho is None: self.theta[index] = np.linalg.lstsq(np.insert(self.X[index], 0, 1, axis=1), self.y[index], rcond=None)[0] def delete_from_list(self, list_, indexes): for i in sorted(indexes, reverse=True): del list_[i] return list_ # Calculate the firing degree of the sample to the psi curve def firing_degree(self, index, x=None, y=None): if y is None: return self.mr_x[index].w_score_vector(sklearn.metrics.pairwise.pairwise_distances(self.x0[index], x).reshape(-1)) elif x is None: return self.mr_y[index].w_score_vector(sklearn.metrics.pairwise.pairwise_distances(self.y0[index], y).reshape(-1)) else: return np.minimum(self.mr_x[index].w_score_vector(sklearn.metrics.pairwise.pairwise_distances(self.x0[index], x).reshape(-1)), self.mr_y[index].w_score_vector(sklearn.metrics.pairwise.pairwise_distances(self.y0[index], y).reshape(-1))) # Fit the psi curve of the EVs according to the external samples def fit(self, index, X_ext, y_ext): self.fit_x(index, sklearn.metrics.pairwise.pairwise_distances(self.x0[index], X_ext)[0]) self.fit_y(index, sklearn.metrics.pairwise.pairwise_distances(self.y0[index], y_ext)[0]) # Fit the psi curve to the extreme values with distance D to the center of the EV def fit_x(self, index, D): self.mr_x[index].fit_low(1/2 * D, min(D.shape[0], self.tau)) # Fit the psi curve to the extreme values with distance D to the center of the EV def fit_y(self, index, D): self.mr_y[index].fit_low(1/2 * D, min(D.shape[0], self.tau)) # Get the distance from the origin of the input EV which has the given probability to belong to the curve def get_distance_input(self, percentage, index=None): if index is None: return [self.mr_x[i].inv(percentage) for i in range(self.c)] else: return self.mr_x[index].inv(percentage) # Get the distance from the origin of the output EV which has the given probability to belong to the curve def get_distance_output(self, percentage, index=None): if index is None: return [self.mr_y[i].inv(percentage) for i in range(self.c)] else: return self.mr_y[index].inv(percentage) # Obtain the samples that do not belong to the given EV def get_external_samples(self, index=None): if index is None: X = np.concatenate(self.X) y = np.concatenate(self.y) else: if self.c > 1: X = np.concatenate(self.X[:index] + self.X[index + 1 :]) y = np.concatenate(self.y[:index] + self.y[index + 1 :]) else: X = np.array([]) y = np.array([]) return (X, y) # Merge two EVs of different clusters whenever the origin of one is inside the sigma probability of inclusion of the psi curve of the other def merge(self): self.sort_EVs() index = 0 while index < self.c: if index + 1 < self.c: x0 = np.concatenate(self.x0[index + 1 : ]) y0 = np.concatenate(self.y0[index + 1 : ]) S_index = self.firing_degree(index, x0, y0) index_to_merge = np.where(S_index > self.sigma)[0] + index + 1 if index_to_merge.size > 0: self.init_theta = 2 for i in reversed(range(len(index_to_merge))): self.add_sample_to_EV(index, self.X[index_to_merge[i]], self.y[index_to_merge[i]], self.step[index_to_merge[i]]) self.remove_EV([index_to_merge[i]]) index = index + 1 # Plot the granules that form the antecedent part of the rules def plot(self, name_figure_input, name_figure_output, step): # Input fuzzy granules plot fig = pyplot.figure() ax = fig.add_subplot(111, projection='3d') ax.axes.set_xlim3d(left=-2, right=2) ax.axes.set_ylim3d(bottom=-2, top=2) z_bottom = -0.3 ax.set_zticklabels("") colors = cm.get_cmap('Dark2', self.c) for i in range(self.c): self.plot_EV_input(i, ax, '.', colors(i), z_bottom) legend.append('$\lambda$ = ' + str(round(self.mr_x[new_order[i]].get_params()[0], 1)) + ' $\kappa$ = ' + str(round(self.mr_x[new_order[i]].get_params()[1], 1))) # Plot axis' labels ax.set_xlabel('u(t)', fontsize=15) ax.set_ylabel('y(t)', fontsize=15) ax.set_zlabel('$\mu_x$', fontsize=15) ax.legend(legend, fontsize=10, loc=2) # Save figure fig.savefig(name_figure_input) # Close plot pyplot.close(fig) # Output fuzzy granules plot fig = pyplot.figure() ax = fig.add_subplot(111) ax.axes.set_xlim(left=-2, right=2) for i in range(self.c): self.plot_EV_output(i, ax, '.', colors(i), z_bottom) # Plot axis' labels ax.set_xlabel('y(t + 1)', fontsize=15) ax.set_ylabel('$\mu_y$', fontsize=15) ax.legend(legend, fontsize=10, loc=2) # Save figure fig.savefig(name_figure_output) # Close plot pyplot.close(fig) # Plot the probability of sample inclusion (psi-model) together with the samples associated with the EV for the input fuzzy granules def plot_EV_input(self, index, ax, marker, color, z_bottom): # Plot the input samples in the XY plan ax.scatter(self.X[index][:, 0], self.X[index][:, 1], z_bottom * np.ones((self.X[index].shape[0], 1)), marker=marker, color=color) # Plot the radius for which there is a probability sigma to belong to the EV radius = self.get_distance_input(self.sigma, index) p = Circle((self.x0[index][0, 0], self.x0[index][0, 1]), radius, fill=False, color=color) ax.add_patch(p) art3d.pathpatch_2d_to_3d(p, z=z_bottom, zdir="z") # Plot the psi curve of the EV r = np.linspace(0, self.get_distance_input(0.05, index), 100) theta = np.linspace(0, 2 * np.pi, 145) radius_matrix, theta_matrix = np.meshgrid(r,theta) X = self.x0[index][0, 0] + radius_matrix * np.cos(theta_matrix) Y = self.x0[index][0, 1] + radius_matrix * np.sin(theta_matrix) points = np.array([np.array([X, Y])[0, :, :].reshape(-1), np.array([X, Y])[1, :, :].reshape(-1)]).T Z = self.firing_degree(index, points) ax.plot_surface(X, Y, Z.reshape((X.shape[0], X.shape[1])), antialiased=False, cmap=cm.coolwarm, alpha=0.1) # Plot the probability of sample inclusion (psi-model) together with the samples associated with the EV for the output fuzzy granules def plot_EV_output(self, index, ax, marker, color, z_bottom): # Plot the output data points in the X axis ax.scatter(self.y[index], np.zeros_like(self.y[index]), marker=marker, color=color) # Plot the psi curve of the EV r = np.linspace(0, self.get_distance_output(0.01, index), 100) points = np.concatenate((np.flip((self.y0[index] - r).T, axis=0), (self.y0[index] + r).T), axis=0) Z = self.firing_degree(index, y=points) #ax.plot(points, Z, antialiased=False, cmap=cm.coolwarm, alpha=0.1) ax.plot(points, Z, color=color) # Predict the output given the input sample x def predict(self, x): num = 0 den = 0 for i in range(self.c): p = self.predict_EV(i, x) num = num + self.firing_degree(i, x, p) * p den = den + self.firing_degree(i, x, p) if den == 0: if self.columns_ts is None: return np.mean(x) return np.mean(x[:, self.columns_ts]) return num / den # Predict the local output of x based on the linear regression of the samples stored at the EV def predict_EV(self, index, x): if self.rho is None: return np.insert(x, 0, 1).reshape(1, -1) @ self.theta[index] return np.insert(x, 0, 1).reshape(1, -1) @ self.theta[index].T # Calculate the degree of relationship of all the rules to the rule of index informed as parameter def relationship_rules(self, index): distance_x = sklearn.metrics.pairwise.pairwise_distances(self.x0[index], np.concatenate(self.x0)).reshape(-1) distance_y = sklearn.metrics.pairwise.pairwise_distances(self.y0[index], np.concatenate(self.y0)).reshape(-1) relationship_x_center = self.mr_x[index].w_score_vector(distance_x) relationship_y_center = self.mr_y[index].w_score_vector(distance_y) relationship_x_radius = self.mr_x[index].w_score_vector(distance_x - self.get_distance_input(self.sigma)) relationship_y_radius = self.mr_y[index].w_score_vector(distance_y - self.get_distance_output(self.sigma)) return np.maximum(np.maximum(relationship_x_center, relationship_x_radius), np.maximum(relationship_y_center, relationship_y_radius)) # Remove the EV whose index was informed by parameter def remove_EV(self, index): self.mr_x = self.delete_from_list(self.mr_x, index) self.mr_y = self.delete_from_list(self.mr_y, index) self.x0 = self.delete_from_list(self.x0, index) self.y0 = self.delete_from_list(self.y0, index) self.X = self.delete_from_list(self.X, index) self.y = self.delete_from_list(self.y, index) self.step = self.delete_from_list(self.step, index) self.last_update = self.delete_from_list(self.last_update, index) self.theta = self.delete_from_list(self.theta, index) self.c = len(self.mr_x) # Remove the EVs that didn't have any update in the last threshold steps def remove_outdated_EVs(self, threshold): indexes_to_remove = list() for index in range(self.c): if self.last_update[index] <= threshold: indexes_to_remove.append(index) if len(indexes_to_remove) > 0: self.remove_EV(indexes_to_remove) if self.rho is not None: self.update_R() self.init_theta = 2 # Sort the EVs according to the last update def sort_EVs(self): new_order = (-np.array(self.last_update)).argsort() self.mr_x = list(np.array(self.mr_x)[new_order]) self.mr_y = list(np.array(self.mr_y)[new_order]) self.x0 = list(np.array(self.x0)[new_order]) self.y0 = list(np.array(self.y0)[new_order]) self.X = list(np.array(self.X)[new_order]) self.y = list(np.array(self.y)[new_order]) self.step = list(np.array(self.step)[new_order]) self.last_update = list(np.array(self.last_update)[new_order]) # Evolves the model (main method) def train(self, x, y, step): best_EV = None best_EV_value = 0 # check if it is possible to insert the sample in an existing model for index in range(self.c): tau = self.firing_degree(index, x, y) if tau > best_EV_value and tau > self.sigma: best_EV = index best_EV_value = tau update = False # Add the sample to an existing EV if best_EV is not None: self.add_sample_to_EV(best_EV, x, y, step) # Create a new EV else: self.add_EV(x, y, step) update = True self.update_EVs() if step != 0 and (step % self.delta) == 0: self.remove_outdated_EVs(step[0, 0] - self.delta) self.merge() update = True if self.rho is not None: if update: self.update_R() self.theta = self.srmtl.train(self.X, self.y, self.init_theta) self.init_theta = 1 # Update the psi curve of the EVs def update_EVs(self): for i in range(self.c): (X_ext, y_ext) = self.get_external_samples(i) if X_ext.shape[0] > 0: self.fit(i, X_ext, y_ext) def update_R(self): S = np.zeros((self.c, self.c)) for i in range(self.c): S[i, :] = self.relationship_rules(i) self.R = None for i in range(self.c): for j in range(i + 1, self.c): if S[i, j] > 0 or S[j, i] > 0: edge = np.zeros((self.c, 1)) edge[i] = max(S[i, j], S[j, i]) edge[j] = - max(S[i, j], S[j, i]) if self.R is None: self.R = edge else: self.R = np.concatenate((self.R, edge), axis=1) self.srmtl.set_RRt(self.R)
16,034
5,565
# -*- coding: utf-8 -*- #!/usr/bin/env python # # Copyright 2013-2016 BigML # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """A BasicModel resource. This module defines a BasicModel to hold the main information of the model resource in BigML. It becomes the starting point for the Model class, that is used for local predictions. """ import logging LOGGER = logging.getLogger('BigML') from bigml.util import invert_dictionary, DEFAULT_LOCALE from bigml.fields import DEFAULT_MISSING_TOKENS def check_model_structure(model): """Checks the model structure to see if it contains all the needed keys """ return (isinstance(model, dict) and 'resource' in model and model['resource'] is not None and ('object' in model and 'model' in model['object'] or 'model' in model)) class ModelFields(object): """ A lightweight wrapper of the field information in the model, cluster or anomaly objects """ def __init__(self, fields, objective_id=None, data_locale=None, missing_tokens=None): if isinstance(fields, dict): try: self.objective_id = objective_id self.uniquify_varnames(fields) self.inverted_fields = invert_dictionary(fields) self.fields = {} self.fields.update(fields) self.data_locale = data_locale self.missing_tokens = missing_tokens if self.data_locale is None: self.data_locale = DEFAULT_LOCALE if self.missing_tokens is None: self.missing_tokens = DEFAULT_MISSING_TOKENS except KeyError: raise Exception("Wrong field structure.") def uniquify_varnames(self, fields): """Tests if the fields names are unique. If they aren't, a transformation is applied to ensure unicity. """ unique_names = set([fields[key]['name'] for key in fields]) if len(unique_names) < len(fields): self.transform_repeated_names(fields) def transform_repeated_names(self, fields): """If a field name is repeated, it will be transformed adding its column number. If that combination is also a field name, the field id will be added. """ # The objective field treated first to avoid changing it. if self.objective_id: unique_names = [fields[self.objective_id]['name']] else: unique_names = [] field_ids = sorted([field_id for field_id in fields if field_id != self.objective_id]) for field_id in field_ids: new_name = fields[field_id]['name'] if new_name in unique_names: new_name = "{0}{1}".format(fields[field_id]['name'], fields[field_id]['column_number']) if new_name in unique_names: new_name = "{0}_{1}".format(new_name, field_id) fields[field_id]['name'] = new_name unique_names.append(new_name) def normalize(self, value): """Transforms to unicode and cleans missing tokens """ if isinstance(value, basestring) and not isinstance(value, unicode): value = unicode(value, "utf-8") return None if value in self.missing_tokens else value def filter_input_data(self, input_data, by_name=True): """Filters the keys given in input_data checking against model fields """ if isinstance(input_data, dict): # remove all missing values for key, value in input_data.items(): value = self.normalize(value) if value is None: del input_data[key] if by_name: # We no longer check that the input data keys match some of # the dataset fields. We only remove the keys that are not # used as predictors in the model input_data = dict( [[self.inverted_fields[key], value] for key, value in input_data.items() if key in self.inverted_fields and (self.objective_id is None or self.inverted_fields[key] != self.objective_id)]) else: input_data = dict( [[key, value] for key, value in input_data.items() if key in self.fields and (self.objective_id is None or key != self.objective_id)]) return input_data else: LOGGER.error("Failed to read input data in the expected" " {field:value} format.") return {}
5,375
1,416
import os,time import torch from larcv import larcv import numpy as np import ROOT as rt from array import array class IntersectUB( torch.autograd.Function ): larcv_version = None dataloaded = False imgdimset = False @classmethod def load_intersection_data(cls,intersectiondatafile=None,larcv_version=None,nsource_wires=3456,ntarget_wires=2400): if intersectiondatafile is None: # set default if os.environ["LARCV_VERSION"].strip()=="1": intersectiondatafile = "../gen3dconsistdata/consistency3d_data_larcv1.root" cls.larcv_version = 1 elif os.environ["LARCV_VERSION"].strip()=="2": intersectiondatafile = "../gen3dconsistdata/consistency3d_data_larcv2.root" cls.larcv_version = 2 else: raise RuntimeError("Invalid LARCV_VERSION: {}".format(LARCV_VERSION)) else: if larcv_version is None: raise ValueError("When specifiying data, need to specify larcv version") cls.larcv_version = larcv_version if not os.path.exists(intersectiondatafile): raise RuntimeError("could not find intersection data file: {}".format(intersectiondatafile)) cls.nsource_wires = nsource_wires cls.ntarget_wires = ntarget_wires # intersection location (y,z) for (source,target) intersections cls.intersections_t = torch.zeros( (2, 2, cls.nsource_wires, cls.ntarget_wires ) ).float() # fill intersection matrix (should make image2d instead of this loop fill if os.environ["LARCV_VERSION"]=="1": io = larcv.IOManager(larcv.IOManager.kREAD,"inersect3d", larcv.IOManager.kTickBackward) io.add_in_file(intersectiondata) io.initialize() ev_y2u = io.get_data(larcv.kProductImage2D,"y2u_intersect") if ev_y2u.Image2DArray().size()!=2: raise RuntimeError("Y2U intersection image2d vector should be len 2 (for detector y,z)") cls.intersections_t[0,0,:,:] = torch.from_numpy( larcv.as_ndarray( ev_y2u.Image2DArray()[0] ).reshape(cls.ntarget_wires,cls.nsource_wires).transpose((1,0)) ) cls.intersections_t[0,1,:,:] = torch.from_numpy( larcv.as_ndarray( ev_y2u.Image2DArray()[1] ).reshape(cls.ntarget_wires,cls.nsource_wires).transpose((1,0)) ) ev_y2v = io.get_data(larcv.kProductImage2D,"y2v_intersect") if ev_y2v.Image2DArray().size()!=2: raise RuntimeError("Y2V intersection image2d vector should be len 2 (for detector y,z)") cls.intersections_t[1,0,:,:] = torch.from_numpy( larcv.as_ndarray( ev_y2v.Image2DArray()[0] ).reshape(cls.ntarget_wires,cls.nsource_wires).transpose((1,0)) ) cls.intersections_t[1,1,:,:] = torch.from_numpy( larcv.as_ndarray( ev_y2v.Image2DArray()[1] ).reshape(cls.ntarget_wires,cls.nsource_wires).transpose((1,0)) ) elif os.environ["LARCV_VERSION"]=="2": io = larcv.IOManager() io.add_in_file(intersectiondatafile) io.initialize() ev_y2u = io.get_data("image2d","y2u_intersect") ev_y2v = io.get_data("image2d","y2v_intersect") cls.intersections_t[0,0,:,:] = torch.from_numpy( larcv.as_ndarray( ev_y2u.as_vector()[0] ).transpose((1,0)) ) cls.intersections_t[0,1,:,:] = torch.from_numpy( larcv.as_ndarray( ev_y2u.as_vector()[1] ).transpose((1,0)) ) cls.intersections_t[1,0,:,:] = torch.from_numpy( larcv.as_ndarray( ev_y2v.as_vector()[0] ).transpose((1,0)) ) cls.intersections_t[1,1,:,:] = torch.from_numpy( larcv.as_ndarray( ev_y2v.as_vector()[1] ).transpose((1,0)) ) cls.dataloaded = True @classmethod def set_img_dims(cls,nrows,ncols): cls.nrows = nrows cls.ncols = ncols # index of source matrix: each column gets value same as index src_index_np = np.tile( np.linspace( 0, float(ncols)-1, ncols ), nrows ) src_index_np = src_index_np.reshape( (nrows, ncols) ).transpose( (1,0) ) cls.src_index_t = torch.from_numpy( src_index_np ).float() #print "src_index_np: ",self.src_index_np.shape#, self.src_index_np[3,:] cls.imgdimset = True @classmethod def print_intersect_grad(cls): print "Y2U: dy/du -------------- " w = 500 for u in xrange(300,310): print " (w=500,u={}) ".format(u),cls.intersections_t[0,0,500,u+1]-cls.intersections_t[0,0,500,u] print "Y2U: dz/du -------------- " for u in xrange(300,310): print " (w=500,u={}) ".format(u),cls.intersections_t[0,1,500,u+1]-cls.intersections_t[0,1,500,u] print "Y2V: dy/dv -------------- " for v in xrange(300,310): print " (w=500,v={}) ".format(v),cls.intersections_t[1,0,500,v+1]-cls.intersections_t[1,0,500,v] print "Y2V: dz/dv -------------- " for v in xrange(300,310): print " (w=500,v={}) ".format(v),cls.intersections_t[1,1,500,v+1]-cls.intersections_t[1,1,500,v] @staticmethod def forward(ctx,pred_flowy2u, pred_flowy2v, source_originx, targetu_originx, targetv_originx ): assert(IntersectUB.dataloaded and IntersectUB.imgdimset and IntersectUB.larcv_version is not None) ## our device dev = pred_flowy2u.device ## switch tensors to device IntersectUB.src_index_t = IntersectUB.src_index_t.to(device=dev) IntersectUB.intersections_t = IntersectUB.intersections_t.to(device=dev) #print pred_flowy2u.is_cuda #print IntersectUB.src_index_t.is_cuda #print IntersectUB.intersections_t.is_cuda ## img dims ncols = IntersectUB.ncols nrows = IntersectUB.nrows ntarget_wires = IntersectUB.ntarget_wires batchsize = pred_flowy2u.size()[0] if type(source_originx) is float: source_originx_t = torch.ones( (batchsize), dtype=torch.float ).to(device=dev)*source_originx else: source_originx_t = source_originx if type(targetu_originx) is float: targetu_originx_t = torch.ones( (batchsize), dtype=torch.float ).to(device=dev)*targetu_originx else: targetu_originx_t = targetu_originx if type(targetv_originx) is float: targetv_originx_t = torch.ones( (batchsize), dtype=torch.float ).to(device=dev)*targetv_originx else: targetv_originx_t = targetv_originx #print "source origin: ",source_originx_t #print "targetu origin: ",targetu_originx_t #print "targetv origin: ",targetv_originx_t ## wire position calcs source_fwire_t = torch.zeros( (batchsize,1,ncols,nrows), dtype=torch.float ).to( device=dev ) pred_target1_fwire_t = torch.zeros( (batchsize,1,ncols,nrows), dtype=torch.float ).to( device=dev ) pred_target2_fwire_t = torch.zeros( (batchsize,1,ncols,nrows), dtype=torch.float ).to( device=dev ) for b in xrange(batchsize): ## we need to get the source wire, add origin wire + relative position source_fwire_t[b,:] = IntersectUB.src_index_t.add( source_originx_t[b] ) ## calcualte the wires in the target planes pred_target1_fwire_t[b,:] = (IntersectUB.src_index_t+pred_flowy2u[b,:]).add( targetu_originx_t[b] ) pred_target2_fwire_t[b,:] = (IntersectUB.src_index_t+pred_flowy2v[b,:]).add( targetv_originx_t[b] ) ## clamp for those out of flow and round pred_target1_fwire_t.clamp(0,ntarget_wires).round() pred_target2_fwire_t.clamp(0,ntarget_wires).round() #print "source fwire: ",source_fwire_t #print "target1 fwire: ",pred_target1_fwire_t #print "target2 fwire: ",pred_target2_fwire_t ## calculate the index for the lookup table pred_target1_index_t = (source_fwire_t*ntarget_wires + pred_target1_fwire_t).long() pred_target2_index_t = (source_fwire_t*ntarget_wires + pred_target2_fwire_t).long() ## get the (y,z) of the intersection we've flowed to posyz_target1_t = torch.zeros( (batchsize,2,ncols,nrows) ).to( device=dev ) posyz_target2_t = torch.zeros( (batchsize,2,ncols,nrows) ).to( device=dev ) for b in xrange(batchsize): posyz_target1_t[b,0,:,:] = torch.take( IntersectUB.intersections_t[0,0,:,:], pred_target1_index_t[b,0,:,:].reshape( ncols*nrows ) ).reshape( (ncols,nrows) ) # det-y posyz_target1_t[b,1,:,:] = torch.take( IntersectUB.intersections_t[0,1,:,:], pred_target1_index_t[b,0,:,:].reshape( ncols*nrows ) ).reshape( (ncols,nrows) ) # det-y posyz_target2_t[b,0,:,:] = torch.take( IntersectUB.intersections_t[1,0,:,:], pred_target2_index_t[b,0,:,:].reshape( ncols*nrows ) ).reshape( (ncols,nrows) ) # det-y posyz_target2_t[b,1,:,:] = torch.take( IntersectUB.intersections_t[1,1,:,:], pred_target2_index_t[b,0,:,:].reshape( ncols*nrows ) ).reshape( (ncols,nrows) ) # det-y #ctx.save_for_backward(posyz_target1_t,posyz_target2_t) #print "posyz_target1: ",posyz_target1_t #print "posyz_target2: ",posyz_target2_t return (posyz_target1_t,posyz_target2_t) @staticmethod def backward(ctx,grad_output1,grad_output2): #posyz_target1_t, posyz_target2_t, = ctx.saved_tensors #diffy = posyz_target1_t[0,:] - posyz_target2_t[0,:] # ydiff #diffz = posyz_target1_t[1,:] - posyz_target2_t[1,:] # zdiff batchsize = grad_output1.size()[0] grad_input_u = (-0.3464*grad_output1[:,0,:,:]).reshape( (batchsize,1,IntersectUB.ncols,IntersectUB.nrows) ) # only y-pos changes with respect to the intersection of Y-U wires grad_input_v = ( 0.3464*grad_output2[:,0,:,:]).reshape( (batchsize,1,IntersectUB.ncols,IntersectUB.nrows) ) # only y-pos changes with respect to the intersection of Y-V wires return grad_input_u,grad_input_v, None, None, None if __name__=="__main__": device = torch.device("cuda:0") #device = torch.device("cpu") IntersectUB.load_intersection_data() IntersectUB.set_img_dims(512,832) IntersectUB.print_intersect_grad() # save a histogram rout = rt.TFile("testout_func_intersect_ub.root","recreate") ttest = rt.TTree("test","Consistency 3D Loss test data") dloss = array('d',[0]) dtime = array('d',[0]) ttest.Branch("loss",dloss,"loss/D") ttest.Branch("dtime",dtime,"dtime/D") # as test, we process some pre-cropped small samples io = larcv.IOManager() io.add_in_file( "../testdata/smallsample/larcv_dlcosmictag_5482426_95_smallsample082918.root" ) # create a unit test file (csv) io.initialize() nentries = io.get_n_entries() print "Number of Entries: ",nentries start = time.time() istart=0 iend=nentries #istart=155 #iend=156 for ientry in xrange(istart,iend): tentry = time.time() io.read_entry( ientry ) if os.environ["LARCV_VERSION"]=="1": ev_adc_test = io.get_data(larcv.kProductImage2D,"adc") ev_flowy2u_test = io.get_data(larcv.kProductImage2D,"larflow_y2u") ev_flowy2v_test = io.get_data(larcv.kProductImage2D,"larflow_y2v") ev_trueflow_test = io.get_data(larcv.kProductImage2D,"pixflow") ev_truevisi_test = io.get_data(larcv.kProductImage2D,"pixvisi") flowy2u = ev_flowy2u_test.Image2DArray()[0] flowy2v = ev_flowy2v_test.Image2DArray()[0] truey2u = ev_trueflow_test.Image2DArray()[0] truey2v = ev_trueflow_test.Image2DArray()[1] visiy2u = ev_truevisi_test.Image2DArray()[0] visiy2v = ev_truevisi_test.Image2DArray()[1] source_meta = ev_adc_test.Image2DArray()[2].meta() targetu_meta = ev_adc_test.Image2DArray()[0].meta() targetv_meta = ev_adc_test.Image2DArray()[1].meta() elif os.environ["LARCV_VERSION"]=="2": ev_adc_test = io.get_data("image2d","adc") ev_flowy2u_test = io.get_data("image2d","larflow_y2u") ev_flowy2v_test = io.get_data("image2d","larflow_y2v") ev_trueflow_test = io.get_data("image2d","pixflow") ev_truevisi_test = io.get_data("image2d","pixvisi") flowy2u = ev_flowy2u_test.as_vector()[0] flowy2v = ev_flowy2v_test.as_vector()[0] truey2u = ev_trueflow_test.as_vector()[0] truey2v = ev_trueflow_test.as_vector()[1] visiy2u = ev_truevisi_test.as_vector()[0] visiy2v = ev_truevisi_test.as_vector()[1] source_meta = ev_adc_test.as_vector()[2].meta() targetu_meta = ev_adc_test.as_vector()[0].meta() targetv_meta = ev_adc_test.as_vector()[1].meta() # numpy arrays index = (0,1) if os.environ["LARCV_VERSION"]=="2": index = (1,0) np_flowy2u = larcv.as_ndarray(flowy2u).transpose(index).reshape((1,1,source_meta.cols(),source_meta.rows())) np_flowy2v = larcv.as_ndarray(flowy2v).transpose(index).reshape((1,1,source_meta.cols(),source_meta.rows())) np_visiy2u = larcv.as_ndarray(visiy2u).transpose(index).reshape((1,1,source_meta.cols(),source_meta.rows())) np_visiy2v = larcv.as_ndarray(visiy2v).transpose(index).reshape((1,1,source_meta.cols(),source_meta.rows())) np_trueflowy2u = larcv.as_ndarray(truey2u).transpose(index).reshape((1,1,source_meta.cols(),source_meta.rows())) np_trueflowy2v = larcv.as_ndarray(truey2v).transpose(index).reshape((1,1,source_meta.cols(),source_meta.rows())) #print "NAN indices (flow-Y2U): ",np.argwhere( np.isnan(np_flowy2u) ) #print "NAN indices (flow-Y2V): ",np.argwhere( np.isnan(np_flowy2v) ) #print "NAN indices (visi-Y2U): ",np.argwhere( np.isnan(np_visiy2u) ) #print "NAN indices (visi-Y2V): ",np.argwhere( np.isnan(np_visiy2v) ) # tensor conversion predflow_y2u_t = torch.from_numpy( np_flowy2u ).to(device=device).requires_grad_() predflow_y2v_t = torch.from_numpy( np_flowy2v ).to(device=device).requires_grad_() trueflow_y2u_t = torch.from_numpy( np_trueflowy2u ).to(device=device).requires_grad_() trueflow_y2v_t = torch.from_numpy( np_trueflowy2v ).to(device=device).requires_grad_() truevisi_y2u_t = torch.from_numpy( np_visiy2u ).to(device=device) truevisi_y2v_t = torch.from_numpy( np_visiy2v ).to(device=device) #print "requires grad: ",predflow_y2u_t.requires_grad,predflow_y2v_t.requires_grad #y2u_t = predflow_y2u_t #y2v_t = predflow_y2v_t y2u_t = trueflow_y2u_t y2v_t = trueflow_y2v_t source_origin = torch.zeros( (1) ).to(device=device) targetu_origin = torch.zeros( (1) ).to(device=device) targetv_origin = torch.zeros( (1) ).to(device=device) for b in xrange(1): source_origin[0] = source_meta.min_x() targetu_origin[0] = targetu_meta.min_x() targetv_origin[0] = targetv_meta.min_x() posyz_fromy2u,posyz_fromy2v = IntersectUB.apply( y2u_t, y2v_t, source_origin, targetu_origin, targetv_origin ) mask = truevisi_y2u_t*truevisi_y2v_t diff = (posyz_fromy2u-posyz_fromy2v) #print "diff.shape=",diff.shape #print "mask.shape=",mask.shape diff[:,0,:,:] *= mask[:,0,:,:] diff[:,1,:,:] *= mask[:,0,:,:] l2 = diff[:,0,:,:]*diff[:,0,:,:] + diff[:,1,:,:]*diff[:,1,:,:] #print "l2 shape: ",l2.shape if mask.sum()>0: lossval = l2.sum()/mask.sum() else: lossval = l2.sum() # backward test tback = time.time() lossval.backward() print " runbackward: ",time.time()-tback," secs" print "Loss (iter {}): {}".format(ientry,lossval.item())," iscuda",lossval.is_cuda dloss[0] = lossval.item() dtime[0] = time.time()-tentry ttest.Fill() end = time.time() tloss = end-start print "Time: ",tloss," secs / ",tloss/nentries," secs per event" rout.cd() ttest.Write() rout.Close()
16,229
6,265
from inspect import isclass def build_permissions_fn(permissions): def fn(**kwargs): for perm in permissions: if not perm().has_permission(**kwargs): raise PermissionError(perm().error_message(**kwargs)) return fn class BasePermission: def __init__(self, **kwargs): self.kwargs = kwargs def permission_statement(self, **kwargs): raise NotImplementedError def has_permission(self, **kwargs): # to achieve hierarchical checking for permissions (a subclass calls the permission statement of the superclass # and only if it passes, executes its own), we need to traverse over the whole linearization order of the # permission class; however, classes like object, which are naturally also a part of the chain, do not # contain the `permission_statement` method and therefore should be just skipped; the same is true for abstract # permission classes which contain the method, but is not implemented - like this one for example: to achieve # this, we try calling the method and if it turns out to not be implemented, we skip it as well for cls in reversed(self.__class__.__mro__): if not hasattr(cls, "permission_statement"): continue try: if not cls.permission_statement(self, **kwargs): return False except NotImplementedError: continue return True def error_message(self, **kwargs): return "You do not have permission to access this." class LogicalConnector: def __init__(self, *permissions): self.permissions = permissions def __call__(self, **kwargs): for perm in self.permissions: assert isclass(perm) or isinstance(perm, LogicalConnector), \ "Permissions in logical connectors must be classes." return LogicalResolver(self.permissions, self.resolve_fn) def resolve_fn(self, permissions, **kwargs): raise NotImplementedError class LogicalResolver: def __init__(self, permissions, resolve_fn): self.permissions = permissions self.resolve_fn = resolve_fn def has_permission(self, **kwargs): return self.resolve_fn(self.permissions, **kwargs) def error_message(self, **kwargs): return "You do not have permission to access this." class Or(LogicalConnector): def resolve_fn(self, permissions, **kwargs): for perm in permissions: if perm().has_permission(**kwargs): return True return False class And(LogicalConnector): def resolve_fn(self, permissions, **kwargs): for perm in permissions: if not perm().has_permission(**kwargs): return False return True class Not(LogicalConnector): def resolve_fn(self, permissions, **kwargs): assert len(permissions) == 1, "`Not` accepts only one permission class as parameter." return not permissions[0]().has_permission(**kwargs) class AllowAll(BasePermission): def permission_statement(self, **kwargs): return True class AllowNone(BasePermission): def permission_statement(self, **kwargs): return False
3,270
819
# Faça um algoritmo que leia o preço de um produto e mostre o novo preço com um desconto. preco = float(input('Digite o preço atual do produto: R$ ')) desconto = float(input('Digite o valor do desconto (0.X): ')) novopreco = preco * desconto print('O novo preço é R$ {}.'.format(novopreco))
290
113
from draw_control import DrawControl if __name__ == '__main__': zotter = DrawControl() test = input("track, rail, pen, hor, ver, diag: ") while(test): if test == "track": dir_in = input('dir step: ') dir = dir_in.split(" ") zotter.track.spin_fixed_step(int(dir[0]), int(dir[1])) elif test == "rail": dir_in = input('dir step: ') dir = dir_in.split(" ") zotter.rail.spin_fixed_step(int(dir[0]), int(dir[1])) elif test == "pen": angle = float(input("angle: ")) zotter.pen_holder.turn_angle(angle) elif test == "hor": steps = input('steps: ') s = int(steps) zotter.draw_hor_line(0, s) elif test == "ver": steps = input('steps: ') s = int(steps) zotter.draw_ver_line(0, s) elif test == "diag": dir_in = input('dir1 dir2 steps1 steps2: ') dir = dir_in.split(" ") zotter.draw_diagonal(int(dir[0]), int(dir[1]), int(dir[2]), int(dir[3])) test = input("track, rail, pen, hor, ver, diag: ") zotter.close_board()
1,188
402
import pathlib import logging from torch import nn import numpy as np import torch import torch.functional as F import torchvision.transforms as T from torch.utils.data import Dataset from torchvision.datasets.cityscapes import Cityscapes import cv2 from torchvision.transforms import ToPILImage from torch.utils.data import DataLoader from torchvision.transforms import ( RandomResizedCrop, RandomHorizontalFlip, Normalize, RandomErasing, Resize, ToTensor, RandomAffine, Compose, ColorJitter, ) logger = logging.getLogger(__name__) from enum import Enum class Split(Enum): TEST = 1 TRAIN = 2 VALIDATE = 3 class CityscapesData(Dataset): def __init__(self, split: Split, cityscapes_dir: pathlib.Path, image_transforms=None): v = _get_split(split) logger.info(f"Loading Cityscapes '{v}' dataset from '{cityscapes_dir}'") t = image_transforms if image_transforms is not None else [] self.dataset = Cityscapes( # TODO(Ross): make this an argument cityscapes_dir, split=v, mode="fine", target_type=["polygon"], transform=Compose([*t, ToTensor()]), ) def __len__(self) -> int: # return min(len(self.dataset), 10) return len(self.dataset) def __getitem__(self, idx): img, poly = self.dataset[idx] class_labels, box_labels = _poly_to_labels(img, poly) return img, class_labels, box_labels def collate_fn(batch): return ( torch.stack([b[0] for b in batch], dim=0), [b[1] for b in batch], [b[2] for b in batch], ) def tensor_to_image(t) -> np.ndarray: """ Return a PIL image (RGB) """ img = Compose([ToPILImage(),])(t) return np.array(img) def _poly_to_labels(image_tensor, poly): _, img_height, img_width = image_tensor.shape # TODO(Ross): fix this. h = poly["imgHeight"] w = poly["imgWidth"] scaling = img_height / h box_labels = [] class_labels = [] for obj in poly["objects"]: if obj["label"] == "car": polygon = obj["polygon"] min_x = min(x for x, _ in polygon) * scaling max_x = max(x for x, _ in polygon) * scaling max_y = max(y for _, y in polygon) * scaling min_y = min(y for _, y in polygon) * scaling box_labels.append(torch.FloatTensor([min_x, min_y, max_x, max_y])) class_labels.append(torch.IntTensor([1])) if len(class_labels) == 0: return torch.zeros((0, 1)), torch.zeros(0, 4) return torch.stack(class_labels), torch.stack(box_labels) def _get_split(split_name: str) -> Split: if split_name is Split.TEST: return "test" elif split_name is Split.VALIDATE: return "val" elif split_name is Split.TRAIN: return "train" else: raise ValueError(f"unknown split kind {split_name}")
2,961
992
class EventHandler: def __init__(self, maze, maze_handler, maze_builder, bfs, a_star, indexes, text_table, screen): """ Initialize a new EventHandler instance. :param maze: _maze list :param maze_handler: MazeHandler instance :param maze_builder: MazeBuilder instance :param bfs: BFS instance :param a_star: AStar instance :param indexes: dictionary of algorithms and their respective text_table indexes :param text_table: TextTable instance :param screen pygame screen instance """ self._maze = maze self._maze_handler = maze_handler self._maze_builder = maze_builder self._bfs = bfs self._a_star = a_star self.__indexes = indexes self.__text_table = text_table self.__screen = screen self.__current_table_index = 0 self.__active = False self._event_queue = lambda: None self._generator = None def is_active(self): """ Check weather there is an active event in the event queue. :return: True if currently processing event, False otherwise. """ return self.__active def __reset(self): """ Called after a event has terminated, reset the event handler, set active to false and empty the event queue. :return: None """ self.__active = False self._generator = None self._event_queue = lambda: None self._maze_handler.unlock() def next(self): """ Calls the next generator call from the current active event. :return: None """ self._event_queue() def __next_new_maze_event(self): """ This is the generator function for the new_maze_event. Update the next tile to color from the maze generation. :return: None """ # get the next tile to color, and number of increments next_tile, increments = next(self._generator, (-1, 0)) if next_tile >= 0: # increment the value of the text_table self.__text_table.increment_value(self.__current_table_index, increments) self.__text_table.draw_table_element(self.__screen, self.__current_table_index) # update the maze self._maze[next_tile][2] = 0 self._maze_handler.draw_box_by_idx(next_tile) else: # reset event handler self._maze_handler.remove_grey_tiles() self.__reset() def __next_bfs_or_a_star_event(self): """ This is the generator function for the new_bfs_event or new_a_star_event. Update the next tile to color from the bfs. :return: None """ # get the next tile to color next_tile = next(self._generator, [-1]) if next_tile[0] >= 0: # 5 iterations per step to give similar speed to baseline random maze generation for i in range(5): # increment the value of the text_table self.__text_table.increment_value(self.__current_table_index) self.__text_table.draw_table_element(self.__screen, self.__current_table_index) # update the maze self._maze[next_tile[0]][2] = next_tile[1] self._maze_handler.draw_box_by_idx(next_tile[0]) else: # reset event handler self._maze_handler.remove_grey_tiles() self.__reset() def __next_bi_bfs_event(self): """ This is the generator function for the new_bfs_event. Update the next tile to color from the bfs. :return: None """ # get the next tile to color next_tile = next(self._generator, [-1]) if next_tile[0] >= 0: # 5 iterations per step to give similar speed to baseline random _maze generation for i in range(5): # increment the value in the text_table self.__text_table.increment_value(self.__current_table_index) self.__text_table.draw_table_element(self.__screen, self.__current_table_index) # update maze self._maze[next_tile[0]][2] = next_tile[1] self._maze_handler.draw_box_by_idx(next_tile[0]) else: # reset event_handler self._maze_handler.remove_grey_tiles() self.__reset() def new_maze_event(self): """ Create a new event for building a randomized maze. :return: None """ if not self.__active: self.__active = True self.__current_table_index = self.__indexes['random_maze'] self.__text_table.reset_value(self.__current_table_index) self._generator = self._maze_builder.generate_random_maze() self._event_queue = self.__next_new_maze_event self._maze_handler.reset_maze() self._maze_handler.lock() self._maze = self._maze_handler.maze def new_bfs_event(self): """ Create a new event for finding the shortest path with bfs. :return: None """ if not self.__active: self.__active = True self.__current_table_index = self.__indexes['bfs'] self.__text_table.reset_value(self.__current_table_index) self._maze_handler.remove_all_colored_tiles() self._maze = self._maze_handler.maze self._generator = self._bfs.bfs_shortest_path(self._maze) self._maze_handler.lock() self._event_queue = self.__next_bfs_or_a_star_event def new_bidirectional_bfs_event(self): """ Create a new event for finding the shortest path with bfs. :return: None """ if not self.__active: self.__active = True self.__current_table_index = self.__indexes['bi_bfs'] self.__text_table.reset_value(self.__current_table_index) self._maze_handler.remove_all_colored_tiles() self._maze = self._maze_handler.maze self._generator = self._bfs.bidirectional_bfs(self._maze) self._maze_handler.lock() self._event_queue = self.__next_bi_bfs_event def new_a_star_event(self): """ Create a new event for finding the shortest path with bfs. :return: None """ if not self.__active: self.__active = True self.__current_table_index = self.__indexes['a_star'] self.__text_table.reset_value(self.__current_table_index) self._maze_handler.remove_all_colored_tiles() self._maze = self._maze_handler.maze self._generator = self._a_star.a_star(self._maze) self._maze_handler.lock() self._event_queue = self.__next_bfs_or_a_star_event
6,960
2,003
from flask import Flask, render_template, request, jsonify from flask import request from flask import Response from flask import url_for from flask import jsonify import GetOldTweets3 as got import pandas as pd import datetime import numpy as np import requests import json from pyquery import PyQuery as pq app = Flask(__name__) @app.route("/") def hello(): return render_template("index.html") @app.route('/report', methods=['POST']) def report(): request_info = request.form.get("name") data = {} data["userInput"] = {} data["userInput"]["name"] = request_info data["numOfPost"] = {} data["numOfPost"]["total"] = 0 data["numOfPost"]["instagram"] = 18 data["numOfPost"]["facebook"] = 0 ### error handling errorMessage = { "noUserInput": "Oops, you did not enter any username ...", "wrongTwitterInput": "Oops, the twitter account you enter either does not exist or has no content in it...", "wrongInstagramInput": "Oops, the Instagram username you enter either does not exist or is set to private...", "emptyInstagramContent": "Oops, your instagram account currently has no content..", } #TWITTER if request.form.get("twitter-input"): twitter_info = request.form.get("twitter-input") data["userInput"]["twitterInput"] = twitter_info username = twitter_info tweetCriteria = got.manager.TweetCriteria().setUsername(username)\ .setSince("2019-01-01")\ .setUntil("2019-12-31")\ .setEmoji("unicode") tweet_df = pd.DataFrame({'got_criteria':got.manager.TweetManager.getTweets(tweetCriteria)}) tweets_df = pd.DataFrame() def get_twitter_info(): tweets_df["tweet_text"] = tweet_df["got_criteria"].apply(lambda x: x.text) tweets_df["date"] = tweet_df["got_criteria"].apply(lambda x: x.date) tweets_df["hashtags"] = tweet_df["got_criteria"].apply(lambda x: x.hashtags) tweets_df["link"] = tweet_df["got_criteria"].apply(lambda x: x.permalink) tweets_df["favorites"] = tweet_df["got_criteria"].apply(lambda x: x.favorites) tweets_df["retweets"] = tweet_df["got_criteria"].apply(lambda x: x.retweets) tweets_df["mentions"] = tweet_df["got_criteria"].apply(lambda x: x.mentions) get_twitter_info() tweets_df['mentions'] = tweets_df['mentions'].astype(str) # Data Aggregation # Number of posts num_post = tweets_df.shape[0] print(num_post) if num_post == 0: return render_template("error.html", data = errorMessage["wrongTwitterInput"]) else: data["numOfPost"]["twitter"] = num_post data["numOfPost"]["total"] = num_post #Month with most post tweets_df['month'] = pd.DatetimeIndex(tweets_df['date']).month month_posts = tweets_df.groupby(['month']).size().reset_index(name='counts') most_month_val = month_posts[month_posts.counts == month_posts.counts.max()] most_month = most_month_val.month.values[0] month_posts_count = most_month_val.counts.values[0] most_month_verb = datetime.date(1900, most_month, 1).strftime('%B') month_posts.index = month_posts.month df2 = pd.DataFrame({'month':range(1, 13), 'counts':0}) df2.index = df2.month df2.counts = month_posts.counts df2= df2.fillna(0) df2.drop('month',1).reset_index() month_trend = df2.counts.tolist() data["monthMostPost"] = { "month": most_month_verb, "total": month_posts_count, "facebook": 0, "twitter": month_posts_count, "monthPost":month_trend } # Twitter Total Like total_like = tweets_df.favorites.sum() total_like = format(total_like, ',') data["totalLikesTwitter"] = total_like # Twitter most like posts most_favorites_set = tweets_df[tweets_df.favorites == tweets_df.favorites.max()] most_fav_text = most_favorites_set.tweet_text.values[0] most_fav_date = most_favorites_set.date.values[0] most_fav_date = pd.to_datetime(str(most_fav_date )) most_fav_date = most_fav_date.strftime('%Y.%m.%d') data["twitterPostWithMostLikes"] = { "content":most_fav_text, "date": most_fav_date, "twitterAccount": "@" + twitter_info } # The latest post tweets_df['hour'] = tweets_df.date.dt.hour pos = tweets_df.hour.sub(3).abs().values.argmin() df1 = tweets_df.iloc[[pos]] latest_text = df1.tweet_text.values[0] latest_date = df1.date.values[0] latest_date = pd.to_datetime(str(latest_date)) latest_date = latest_date.strftime('%Y.%m.%d') if df1.hour.values[0] < 5 or df1.hour.values[0] > 20: latest_hour = (str(df1.hour.values[0]), ':00') latest_hour = "".join(latest_hour) data["twitterLatestPost"] = { "latePost": bool(True), "content":latest_text, "date":latest_date, "time":latest_hour, "twitterAccount": "@" + twitter_info } else: data["twitterLatestPost"] = { "latePost": bool(False) } #Mention Most tweets_df['mentions'].replace('', np.nan, inplace=True) tweets_df.dropna(subset=['mentions'], inplace=True) mention_set = tweets_df.groupby(['mentions']).size().reset_index(name='counts') mention_set.sort_values(by=['counts'], inplace=True, ascending=False) if mention_set.shape[0] > 3: mention_set = mention_set.iloc[:3] mention_name = mention_set.mentions.tolist() mention_counts = mention_set.counts.tolist() data["twitterPeopleMentionedMost"] = { "names":mention_name } data["twitterPeopleMentioneTimes"] = { "top_times":mention_counts } # Tweet first post tweet_arrange = tweets_df.sort_values(by = ['date']) first_tweet = tweet_arrange.iloc[[0]] first_text = first_tweet.tweet_text.values[0] first_date = first_tweet.date.values[0] first_date = pd.to_datetime(str(first_date)) first_date = first_date.strftime('%Y.%m.%d') data["twitterFirstPostYear"] = { "content":first_text, "date": first_date, "twitterAccount": "@" + twitter_info } # Tweet last post last_tweet = tweet_arrange.iloc[[-1]] last_text = last_tweet.tweet_text.values[0] last_date = last_tweet.date.values[0] last_date = pd.to_datetime(str(last_date)) last_date = last_date.strftime('%Y.%m.%d') data["twitterLastPostYear"] = { "content":last_text, "date": last_date, "twitterAccount": "@" + twitter_info } # Hashtags tweets_df['hashtags'].replace('', np.nan, inplace=True) tweets_df.dropna(subset=['hashtags'], inplace=True) hash_set = tweets_df.groupby(['hashtags']).size().reset_index(name='counts') hash_set.sort_values(by=['counts'], inplace=True, ascending=False) hash_name = hash_set.hashtags.tolist() hash_counts = hash_set.counts.tolist() if len(hash_name) == 0: data["twitterHashtag"] = { "hashtag": bool(False) } else: hash_most = hash_name[0] hash_most data["twitterHashtag"] = { "hashtag": bool(True), "hashtags":hash_name, "hashtagsCount": hash_counts, "hashtagMost": hash_most } else: twitter_info = bool(False) data["userInput"]["twitterInput"] = bool(False) ## FACEBOOK if request.form.get("facebook-input"): facebook_info = request.form.get("facebook-input") else: facebook_info = bool(False) data["userInput"]["facebookInput"] = bool(False) ## INSTAGRAM if request.form.get("instagram-input"): instagram_info = request.form.get("instagram-input") ins_user = instagram_info url = ("https://www.instagram.com/", ins_user, '/') url = "".join(url) headers = { 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36', # 'cookie': 'mid=W4VyZwALAAHeINz8GOIBiG_jFK5l; mcd=3; csrftoken=KFLY0ovWwChYoayK3OBZLvSuD1MUL04e; ds_user_id=8492674110; sessionid=IGSCee8a4ca969a6825088e207468e4cd6a8ca3941c48d10d4ac59713f257114e74b%3Acwt7nSRdUWOh00B4kIEo4ZVb4ddaZDgs%3A%7B%22_auth_user_id%22%3A8492674110%2C%22_auth_user_backend%22%3A%22accounts.backends.CaseInsensitiveModelBackend%22%2C%22_auth_user_hash%22%3A%22%22%2C%22_platform%22%3A4%2C%22_token_ver%22%3A2%2C%22_token%22%3A%228492674110%3Avsy7NZ3ZPcKWXfPz356F6eXuSUYAePW8%3Ae8135a385c423477f4cc8642107dec4ecf3211270bb63eec0a99da5b47d7a5b7%22%2C%22last_refreshed%22%3A1535472763.3352122307%7D; csrftoken=KFLY0ovWwChYoayK3OBZLvSuD1MUL04e; rur=FRC; urlgen="{\"103.102.7.202\": 57695}:1furLR:EZ6OcQaIegf5GSdIydkTdaml6QU"' } def get_urls(url): try: response = requests.get(url, headers=headers) if response.status_code == 200: return response.text else: print('error code:', response.status_code) return True except Exception as e: print(e) return None html = get_urls(url) if (html == True): return render_template("error.html", data = errorMessage["wrongInstagramInput"]) else: urls = [] doc = pq(html) items = doc('script[type="text/javascript"]').items() for item in items: if item.text().strip().startswith('window._sharedData'): js_data = json.loads(item.text()[21:-1], encoding='utf-8') edges = js_data["entry_data"]["ProfilePage"][0]["graphql"]["user"]["edge_owner_to_timeline_media"]["edges"] for edge in edges: url = edge['node']['display_url'] urls.append(url) if urls == []: return render_template("error.html", data = errorMessage["emptyInstagramContent"]) else: data["userInput"]["instagramInput"] = instagram_info data["insPostMostComments"] = { "pictureLink": ["picture here"], "comments": ["comment1", "comment2", "comment3", "comment4"], "totalComments": 12 } data["insNinephotos"] = urls else: instagram_info = bool(False) data["userInput"]["instagramInput"] = bool(False) # if no user input for social media if (not twitter_info) and (not instagram_info) and (not facebook_info): return render_template("error.html", data = errorMessage["noUserInput"]) else: #Incoming data is processed here and converted into following format: data["year"] = 2019 data["facebookNumOfFriends"] = 423 data["facebookRecentAcademic"] = { "schoolName":"Texas Academy of Mathematics and Technology", "year":2019 } return render_template("samplereport.html", data = data) if __name__ == '__main__': app.run(debug=True, threaded=True)
11,838
4,049
__author__ = 'Rachum'
22
11
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import unicode_literals from __future__ import absolute_import import unicodedata import re, os import logging from django.utils.translation import ugettext_lazy as _ from django.db import models from django.template.loader import render_to_string from django.conf import settings from django.contrib.auth.models import User logger = logging.getLogger(settings.PROJECT_NAME) REPLACEMENTS = ( # international characters that need more than just stripping accents ('Ä', 'AE'), ('Ö', 'OE'), ('Ü', 'UE'), ('ß', 'SS'), ('Œ', 'OE'), ('Æ', 'AE'), ('Ø', 'OE'), ) reASCIIonly = re.compile(r'[^A-Z]', re.I) reCleanInput = re.compile(r'[^\w_%\?\*]', re.I) def cleanword(word, strict=True): word = word.upper() for k,v in REPLACEMENTS: word = word.replace(k,v) word = unicodedata.normalize('NFD', word).encode('ASCII', errors='ignore').decode('ASCII') # decompose international chars if strict: word = reASCIIonly.sub('', word) else: word = reCleanInput.sub('', word) return word def splitwordline(line): """ a line from a wordlist may contain word, description and priority, separated by tabs if description and priority are missing, default is the word and 0 """ parts = line.replace('\n','').split('\t') if len(parts)==1: parts.extend([parts[0],0]) elif len(parts)==2: parts.append(0) elif len(parts)>3: parts = parts[0:2] if len(parts[1])<2: parts[1] = parts[0] try: parts[2] = int(parts[2]) except ValueError as ex: parts[2] = 0 parts[0] = cleanword(parts[0]) return parts class Dictionary(models.Model): """ A dictionary """ class Meta: verbose_name = _('Dictionary') verbose_name_plural = _('Dictionaries') ordering = ['language','name'] unique_together = (('name','language'),) name = models.CharField(_('Name'), max_length=31, help_text=_('A short descriptive name')) public = models.BooleanField(_('public?'), default=True, help_text=_('May everyone use this dictionary?')) language = models.CharField(_('Language'), max_length=15, default=settings.LANGUAGE_CODE, choices=settings.LANGUAGES, help_text=_('Language of (most of) the words in this dictionary')) description = models.CharField(_('Description'), max_length=255, blank=True) owner = models.ForeignKey(User, verbose_name=_('Owner')) def __str__(self): return "%s (%s)" % (self.name, self.language) def get_absolute_url(self): return '/dictionary/%d/' % self.id class Word(models.Model): """ A word with a description, according to a dictionary """ class Meta: verbose_name = _('Word') verbose_name_plural = _('Words') ordering = ['word','priority'] unique_together = (('word','dictionary'),) word = models.CharField(_('Word'), max_length=63, help_text=_('a word fitting a crossword puzzle; will become uppercased; no numbers, hyphens etc.')) dictionary = models.ForeignKey(Dictionary, verbose_name=_('Dictionary')) #, related_name="%(class)s_related") description = models.CharField(_('Description'), max_length=127, help_text=_('Meaning of the word within the context of the selected dictionary')) priority = models.SmallIntegerField(_('Priority'), default=0, help_text=_('0 is neutral, you can increase or decrease the priority')) def __str__(self): return "%s\t%s" % (self.word, self.description) def save(self, *args, **kwargs): self.word = cleanword(self.word) super(Word, self).save(*args, **kwargs) def get_absolute_url(self): return '/dictionary/%d/%s/' % (self.dictionary.id, self.word) class WordlistUpload(models.Model): """ Wordlist importer """ wordlist_file = models.FileField(_('wordlist file (.txt)'), upload_to=os.path.relpath(os.path.join(settings.MEDIA_ROOT, 'temp')), help_text=_('Select a .txt file containing a single word per line to upload as a new dictionary.')) dictionary = models.ForeignKey(Dictionary, null=True, blank=True, help_text=_('Select a dictionary to add these words to. leave this empty to create a new dictionary from the supplied name.')) name = models.CharField(_('Name'), max_length=31, blank=True, help_text=_('A short descriptive name')) uniqueonly = models.BooleanField(_('only unique'), default=True, help_text=_('Import only words that are not contained in any other dictionary?')) public = models.BooleanField(_('public?'), default=True, help_text=_('May everyone use this dictionary?')) language = models.CharField(_('Language'), max_length=15, default=settings.LANGUAGE_CODE, choices=settings.LANGUAGES, help_text=_('Language of (most of) the words in this dictionary')) description = models.CharField(_('Description'), blank=True, max_length=255) owner = models.ForeignKey(User, verbose_name=_('Owner')) class Meta: verbose_name = _('wordlist upload') verbose_name_plural = _('wordlist uploads') def __str__(self): return "%s (%s)" % (self.name, self.wordlist_file) def save(self, *args, **kwargs): super(WordlistUpload, self).save(*args, **kwargs) dictionary = self.process_wordlist() super(WordlistUpload, self).delete() return dictionary def process_wordlist(self): if not os.path.isfile(self.wordlist_file.path): # TODO: throw exception? return None wordfile = open(self.wordlist_file.path, 'rU', encoding='utf-8') lines = wordfile.readlines() wordfile.close() if self.dictionary: D = self.dictionary else: if not self.name: # TODO: throw exception? return false D = Dictionary.objects.create( name = self.name, public = self.public, language = self.language, description = self.description, owner = self.owner, ) D.save() for line in lines: (newword, newdesc, newprio) = splitwordline(line) newdesc = newdesc[:127] # max. length # TODO: exception if decoding fails if len(newword) < 2: continue try: if self.uniqueonly: W = Word.objects.filter(word=newword, dictionary__language=D.language) W = W[0] else: W = Word.objects.get(word=newword, dictionary=D) except (Word.DoesNotExist, IndexError): W = Word.objects.create(word=newword, dictionary=D) if newdesc: W.description = newdesc if newprio: W.priority = newprio W.save() try: os.remove(self.wordlist_file.path) except Exception as ex: logger.exception(ex) return D PUZZLE_TYPES = ( ('d', _('default crossword puzzle with black squares')), # numbers and black squares in grid. only possible type ATM ('b', _('crossword puzzle with bars (no squares)')), ('s', _('Swedish crossword puzzle (questions in squares)')), # default in most magazines # other... ) class Puzzle(models.Model): """ """ title = models.CharField(verbose_name=_('title'), max_length=255, blank=True, help_text=_('title or short description of this puzzle')) code = models.SlugField(verbose_name=_('code'), max_length=63, editable=False, unique=True, help_text=_('auto-generated URL code of this puzzle')) public = models.BooleanField(verbose_name=_('public'), default=True, help_text=_('Is this puzzle publicly viewable?')) language = models.CharField(verbose_name=_('language'), max_length=7, default=settings.LANGUAGE_CODE, help_text=_('main language of this puzzle'), choices=settings.LANGUAGES) owner = models.ForeignKey(User, verbose_name=_('owner'), help_text=_('owner of the puzzle')) createdby = models.ForeignKey(User, verbose_name=_('created by'), related_name='+', editable=False, help_text=_('user that saved the puzzle for the first time (may be anonymous)')) lastchangedby = models.ForeignKey(User, verbose_name=_('last changed by'), related_name='+', editable=False, help_text=_('user that saved the puzzle the latest time')) createdon = models.DateTimeField(verbose_name=_('created on'), auto_now_add=True, help_text=_('timestamp of creation (first save)')) lastchangedon = models.DateTimeField(verbose_name=_('last changed on'), auto_now=True, help_text=_('timestamp of last change')) type = models.CharField(verbose_name=_('type'), max_length=1, default='d', editable=False, help_text=_('type of this puzzle'), choices=PUZZLE_TYPES) width = models.PositiveSmallIntegerField(verbose_name=_('width'), default=settings.CROISEE_GRIDDEF_X, help_text=_('width of the puzzle (number of characters)')) height = models.PositiveSmallIntegerField(verbose_name=_('height'), default=settings.CROISEE_GRIDDEF_Y, help_text=_('height of the puzzle (number of characters)')) text = models.TextField(verbose_name=_('text'), blank=True, help_text=_('characters of the puzzle (solution)')) numbers = models.TextField(verbose_name=_('numbers'), blank=True, help_text=_('list of coordinates of word start numbers')) # x,y,num\n questions = models.TextField(verbose_name=_('questions'), blank=True, help_text=_('list of questions')) # 1::h::Description\n class Meta: verbose_name = _('crossword puzzle') verbose_name_plural = _('crossword puzzles') def __str__(self): return "%s (%s)" % (self.code, self.title) def get_absolute_url(self): return '/puzzle/%s/' % self.code
10,072
3,038
# -*- coding: utf-8 -*- from unittest import TestCase from flask_login import login_user from flask_wtf import FlaskForm from wtforms import StringField from wtforms import ValidationError from app import create_app from app import db from app.configuration import TestConfiguration from app.localization import get_language_names from app.userprofile import User from app.views.userprofile.forms import UniqueEmail from app.views.userprofile.forms import UserSettingsForm class UniqueEmailTest(TestCase): def setUp(self): """ Initialize the test cases. """ self.app = create_app(TestConfiguration) self.app_context = self.app.app_context() self.app_context.push() self.request_context = self.app.test_request_context() self.request_context.push() db.create_all() def tearDown(self): """ Reset the test cases. """ db.session.remove() db.drop_all() self.request_context.pop() self.app_context.pop() def test_init_default_message(self): """ Test initializing the UniqueEmail validator with the default error message. Expected result: The default error message is used. """ validator = UniqueEmail() self.assertEqual('The email address already is in use.', validator.message) def test_init_custom_message(self): """ Test initializing the UniqueEmail validator with a custom error message. Expected result: The custom error message is used. """ message = 'Another user already claims this email address.' validator = UniqueEmail(message=message) self.assertEqual(message, validator.message) def test_call_no_data(self): """ Test the validator on an empty field. Expected result: No error is raised. """ class UniqueEmailForm(FlaskForm): email = StringField('Email') form = UniqueEmailForm() validator = UniqueEmail() # noinspection PyNoneFunctionAssignment validation = validator(form, form.email) self.assertIsNone(validation) def test_call_unused_email(self): """ Test the validator on a field with an unused email address. Expected result: No error is raised. """ class UniqueEmailForm(FlaskForm): email = StringField('Email') form = UniqueEmailForm() form.email.data = 'test@example.com' validator = UniqueEmail() # noinspection PyNoneFunctionAssignment validation = validator(form, form.email) self.assertIsNone(validation) def test_call_email_of_current_user(self): """ Test the validator on a field with the current user's email address. Expected result: No error is raised. """ class UniqueEmailForm(FlaskForm): email = StringField('Email') # Create a test user. name = 'John Doe' email = 'test@example.com' user = User(email, name) db.session.add(user) db.session.commit() # Log in the test user. login_user(user) form = UniqueEmailForm() form.email.data = email validator = UniqueEmail() # noinspection PyNoneFunctionAssignment validation = validator(form, form.email) self.assertIsNone(validation) def test_call_email_of_different_user(self): """ Test the validator on a field with a different user's email address. Expected result: An error is raised. """ class UniqueEmailForm(FlaskForm): email = StringField('Email') # Create a test user. name = 'John Doe' email = 'test@example.com' user = User(email, name) db.session.add(user) db.session.commit() message = 'Another user already claims this email address.' form = UniqueEmailForm() form.email.data = email validator = UniqueEmail() with self.assertRaises(ValidationError) as thrown_message: # noinspection PyNoneFunctionAssignment validation = validator(form, form.email) self.assertIsNone(validation) self.assertEqual(message, thrown_message) class UserSettingsFormTest(TestCase): def setUp(self): """ Initialize the test cases. """ self.app = create_app(TestConfiguration) self.app_context = self.app.app_context() self.app_context.push() self.request_context = self.app.test_request_context() self.request_context.push() db.create_all() def tearDown(self): """ Reset the test cases. """ db.session.remove() db.drop_all() self.request_context.pop() self.app_context.pop() def test_init(self): """ Test that the form is correctly initialized. Expected result: The language field is initialized with the available languages. """ languages = get_language_names(TestConfiguration.TRANSLATION_DIR) form = UserSettingsForm() self.assertListEqual(list(languages), form.language.choices)
5,372
1,455
# Generated by Django 3.1.1 on 2020-11-10 03:16 import django.core.validators from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('Post', '0001_initial'), ] operations = [ migrations.AddField( model_name='post', name='image', field=models.ImageField(null=True, upload_to='images/'), ), migrations.AlterField( model_name='post', name='phone_number', field=models.CharField(blank=True, max_length=13, validators=[django.core.validators.RegexValidator(message="Phone number must be 10 digits and entered in the format: '98XXXXXXXX'.", regex='^\\+?1?\\d{10}$')]), ), ]
769
260
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'About.ui' # # Created by: PyQt5 UI code generator 5.11.3 # # WARNING! All changes made in this file will be lost! from PyQt5 import QtCore, QtGui, QtWidgets class Ui_Dialog(object): def setupUi(self, Dialog): Dialog.setObjectName("Dialog") Dialog.resize(400, 241) self.verticalLayoutWidget = QtWidgets.QWidget(Dialog) self.verticalLayoutWidget.setGeometry(QtCore.QRect(10, 20, 381, 71)) self.verticalLayoutWidget.setObjectName("verticalLayoutWidget") self.verticalLayout = QtWidgets.QVBoxLayout(self.verticalLayoutWidget) self.verticalLayout.setContentsMargins(0, 0, 0, 0) self.verticalLayout.setObjectName("verticalLayout") self.label = QtWidgets.QLabel(self.verticalLayoutWidget) self.label.setObjectName("label") self.verticalLayout.addWidget(self.label) self.label_2 = QtWidgets.QLabel(self.verticalLayoutWidget) self.label_2.setStyleSheet("<a href=\\\"https://github.com/pm-str/CountDown-More\\\">CountDown & More</font> </a>") self.label_2.setOpenExternalLinks(True) self.label_2.setObjectName("label_2") self.verticalLayout.addWidget(self.label_2) self.label_3 = QtWidgets.QLabel(self.verticalLayoutWidget) self.label_3.setOpenExternalLinks(True) self.label_3.setObjectName("label_3") self.verticalLayout.addWidget(self.label_3) self.label_4 = QtWidgets.QLabel(Dialog) self.label_4.setGeometry(QtCore.QRect(10, 130, 361, 51)) self.label_4.setObjectName("label_4") self.pushButton = QtWidgets.QPushButton(Dialog) self.pushButton.setGeometry(QtCore.QRect(160, 200, 89, 25)) self.pushButton.setObjectName("pushButton") self.retranslateUi(Dialog) self.pushButton.clicked.connect(Dialog.close) QtCore.QMetaObject.connectSlotsByName(Dialog) def retranslateUi(self, Dialog): _translate = QtCore.QCoreApplication.translate Dialog.setWindowTitle(_translate("Dialog", "Dialog")) self.label.setText(_translate("Dialog", "Author: Mikhail Pavlov")) self.label_2.setText(_translate("Dialog", "GitHub: <a href=\"https://github.com/pm-str/CountDown-and-More\">CountDown & More</a>")) self.label_3.setText(_translate("Dialog", "Contacts: <a href=\"mailto:pavl.mikhail@gmail.com\">pavl.mikhail@gmail.com</a>")) self.label_4.setText(_translate("Dialog", "Thanks for using this program! \n" "You\'re free to ask me any question in any time. \n" "Recommendations and suggestions are welcome.")) self.pushButton.setText(_translate("Dialog", "OK"))
2,714
897
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models from django.conf import settings class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='UserProfile', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('about', models.TextField(verbose_name='About')), ('zip', models.CharField(help_text='Your zip code is used to keep search results local', max_length=5, verbose_name='Zip Code', blank=True)), ('photo', models.ImageField(upload_to=b'', verbose_name='Photo', blank=True)), ('user', models.OneToOneField(related_name='profile', to=settings.AUTH_USER_MODEL)), ], ), ]
948
268
import pymysql conexao = pymysql.connect( host='localhost', user='root', password='admin1234', db='ex_01') cursor = conexao.cursor(pymysql.cursors.DictCursor) def select(fields, tables, where=None): global cursor query = 'SELECT ' + fields + ' FROM ' + tables if where: query += ' WHERE ' + where cursor.execute(query) return cursor.fetchall() def insert(values, table, fields=None): global cursor, conexao query = 'INSERT INTO ' + table if fields: query += ' ('+fields+')' query += ' VALUES ' + '('+values+')' try: cursor.execute(query) conexao.commit() return True except ValueError as Error: return Error def update(table, field, values, where): global cursor, conexao query = 'UPDATE ' + table + ' SET ' + field + " = '" + values + "'" ' WHERE ' + where cursor.execute(query) conexao.commit() def delete(table, where): global cursor, conexao query = 'DELETE FROM ' + table + ' WHERE ' + where cursor.execute('SET FOREIGN_KEY_CHECKS = 0;') cursor.execute(query) conexao.commit() return conexao.commit
1,244
452
#!/usr/bin/python """downsample Author: Christoph Hahn (christoph.hahn@uni-graz.at) February 2017 Extract a random subsample of ~ x % reads from fastq data. The choice is based on a random number generator. For each fastq read, a random number between 1-100 will be generated. If the random number is smaller than the desired proportion in percent, the read will be kept, otherwise it will be discarded. So to extract ~15 % of the reads any read that gets a random number of <=15 will be kept, which will result in roughly 15% of the reads. Subsamples can be taken from several fastq files at the same time. We allow to input paired end data in two separate files. If so specified subsamples will be taken so that the pairs will remain intact and the ouptut will be given in interleaved format. Input fastq files can be compressed with gzipped. Mixed compressed / non-compressed input is possible except in the case of paired end data. In this case both read files need to be either compressed or non-compressed. Examples: # sample ~20 % of reads from three files downsample.py -s 20 -r test.fastq.gz -r test2.fastq -r test3.fastq.gz > test.subsample_20.fastq # sample ~30 % of reads from two files, and interleave reads from the two files on the fly downsample.py -s 30 --interleave -r test_R1.fastq.gz -r test_R2.fastq.gz > test.interleaved.subsample_30.fastq # sample ~40 % of reads from three files, defining a seed for the random number generator, to allow replication of the process. downsample.py -s 20 --rand -421039 -r test.fastq.gz -r test2.fastq -r test3.fastq.gz > test.subsample_40.fastq # sample ~20 % of reads from two files, compressing results on the fly. downsample.py -s 20 -r test.fastq.gz -r test2.fastq | gzip > test.subsample_20.fastq.gz """ import sys # import re # import random def parse_arguments(): import sys import argparse VERSION="0.1" DESCRIPTION=''' downsample.py - version: v.%s ''' %VERSION parser = argparse.ArgumentParser(description=DESCRIPTION, prog='downsample.py', formatter_class=argparse.RawDescriptionHelpFormatter, epilog=''' examples: # sample ~20 % of reads from three files downsample.py -s 20 -r test.fastq.gz -r test2.fastq -r test3.fastq.gz # sample ~30 % of reads from two files, and interleave reads in output downsample.py -s 30 --interleave -r test_R1.fastq.gz -r test_R2.fastq.gz # sample ~40 % of reads from three files, defining a seed for the random number generator, to allow replication of the process. downsample.py -s 20 --rand -421039 -r test.fastq.gz -r test2.fastq -r test3.fastq.gz > test.subsample_40.fastq # sample ~20 % of reads from two files, compressing results on the fly. downsample.py -s 20 -r test.fastq.gz -r test2.fastq | gzip > test.subsample_20.fastq.gz #sample ~5% of reads from a single file that contains interleaved read data downsample.py -s 5 --interleave -r test.interleaved.fastq.gz | gzip > test.interleaved.subsample_5.fastq.gz ''') parser.add_argument("-r", "--reads", help="Readfile (flag can be used repeatadly to process several files", metavar="<FILE>", action="append") parser.add_argument("-s", "--sample", help="Desired size of subsample in percent (1-100; default = 50)", type=int, metavar="<INT>", action="store", default=50) parser.add_argument("--interleave", help="Optional. In case of two input files, data will be interleaved from these in the output. Otherwise data will be treated as already interleaved.", action="store_true") parser.add_argument("--seed", help="Optional. Seed for random number generator", metavar="<INT>", type=int, action="store") parser.add_argument("--version", action="version", version=VERSION) if not parser.parse_args().reads or len(sys.argv) == 1: print parser.print_usage() print sys.exit(1) return parser.parse_args() def check_args(args): if args.sample < 1 or args.sample > 100: sys.exit("\n only sample size 1-100 is valid\n") def set_seed(seed): import random if not seed: seed = random.randint(-100000000,100000000) sys.stderr.write("seed for random number generator is: %i\n" %seed) random.seed(seed) def decide(string, percent): import random if (random.randint(1,100) <= percent): print string, def sample_interleave(file1, file2, percent): import re while True: out = '' line = file1.readline() if line.strip() == "": break out+=re.sub(r" 1:n.*", "/1",line) for i in xrange(3): out+=re.sub(r" 2:n.*","/2",file1.readline()) for i in xrange(4): out+=re.sub(r" 2:n.*","/2",file2.readline()) decide(out, percent) def sample(fi, percent, step): import re while True: out = '' line = fi.readline() if line.strip() == "": break out+=line for i in xrange(step-1): out+=fi.readline() decide(out, percent) def main(): import sys args = parse_arguments() check_args(args) sys.stderr.write("\ndownsampling to %i percent\n" %args.sample) set_seed(args.seed) if args.interleave and len(args.reads) == 2: sys.stderr.write("interleaving sample from input files %s and %s\n" %(args.reads[0], args.reads[1])) if args.reads[0][-2:] == "gz": import gzip with gzip.open(args.reads[0]) as f1: with gzip.open(args.reads[1]) as f2: sample_interleave(f1, f2, args.sample) else: with open(args.reads[0]) as f1: with open(args.reads[1]) as f2: sample_interleave(f1, f2, args.sample) f1.close() f2.close() else: #that is all other cases if args.interleave: sys.stderr.write("you indicated interleaved input file(s) -> stepsize = 8 lines\n") step = 8 else: sys.stderr.write("you indicated single end data -> stepsize = 4 lines\n") step = 4 for readsfile in args.reads: if readsfile[-2:] == "gz": import gzip f = gzip.open(readsfile) else: f = open(readsfile) sample(f, args.sample, step) f.close() sys.stderr.write("Done!\n\n") if __name__ == '__main__': sys.exit(main())
6,792
2,205
""" Your task is to construct a building which will be a pile of n cubes. The cube at the bottom will have a volume of n^3, the cube above will have volume of (n-1)^3 and so on until the top which will have a volume of 1^3. You are given the total volume m of the building. Being given m can you find the number n of cubes you will have to build? The parameter of the function findNb (find_nb, find-nb, findNb) will be an integer m and you have to return the integer n such as n^3 + (n-1)^3 + ... + 1^3 = m if such a n exists or -1 if there is no such n. """ def find_nb(n): pass
585
186
"""Class to handle iterating through tweets in real time.""" import json import os import pandas as pd # Said this was unused. # from bluebird import BlueBird from bluebird.scraper import BlueBird from sentiment import PoliticalClassification from train import TrainingML col_names32 = "created_at,id,id_str,full_text,truncated,display_text_range,entities,source,in_reply_to_status_id,in_reply_to_status_id_str,in_reply_to_user_id,in_reply_to_user_id_str,in_reply_to_screen_name,user_id,user_id_str,geo,coordinates,place,contributors,is_quote_status,retweet_count,favorite_count,conversation_id,conversation_id_str,favorited,retweeted,possibly_sensitive,possibly_sensitive_editable,lang,supplemental_language,,self_thread" # api = TwitterClient() # trained_model = TrainingML() # sentiment = PoliticalClassification() user_results = "../data/results.csv" def search_term(): """Using a user-specified keyword to find related tweets.""" index = 0 searching = input("Enter a term to search. \n") query = { 'fields': [ {'items': [searching]}, ] } for tweet in BlueBird().search(query): index += 1 with open('../data/temp.json', 'w') as temp: json.dump(tweet, temp) df = pd.read_json('../data/temp.json', lines=True) with open(user_results, 'a') as f: df.to_csv(f, header=None, index=False) if index == 50: dummy_file = user_results + '.bak' with open(user_results, 'r') as read_obj, open(dummy_file, 'w') as write_obj: write_obj.write(col_names32 + '\n') for line in read_obj: write_obj.write(line) os.remove(user_results) os.rename(dummy_file, user_results) break def search_hashtag(): """"Using a user-specified hashtag to find related tweets.""" index = 0 searching = input("Enter a hashtag to search. \n") query = { 'fields': [ {'items': [searching], 'target':'hashtag'}, ] } for tweet in BlueBird().search(query): index += 1 with open('data/temp.json', 'w') as temp: json.dump(tweet, temp) df = pd.read_json('data/temp.json', lines=True) with open(user_results, 'a') as f: df.to_csv(f, header=None, index=False) if index == 50: dummy_file = user_results + '.bak' with open(user_results, 'r') as read_obj, open(dummy_file, 'w') as write_obj: write_obj.write(col_names32 + '\n') for line in read_obj: write_obj.write(line) os.remove(user_results) os.rename(dummy_file, user_results) break def search_user(): """Using a user-specified username to find related tweets.""" index = 0 searching = input("Enter a user to search. \n") query = { 'fields': [ {'items': [searching], 'target':'from'}, ] } for tweet in BlueBird().search(query): index += 1 with open('data/temp.json', 'w') as temp: json.dump(tweet, temp) df = pd.read_json('data/temp.json', lines=True) with open(user_results, 'a') as f: df.to_csv(f, header=None, index=False) if index == 50: dummy_file = user_results + '.bak' with open(user_results, 'r') as read_obj, open(dummy_file, 'w') as write_obj: write_obj.write(col_names32 + '\n') for line in read_obj: write_obj.write(line) os.remove(user_results) os.rename(dummy_file, user_results) break def main(): """Main method to give selection options.""" try: os.remove('../results.csv') os.remove('../temp.csv') except: print() print("Welcome to the Fake News Dection Program! \n") print("Would you like to search by:\nkeyword\nhashtag\nuser") done = False while done == False: choice = input("keyword/hashtag/user: ") if choice == "keyword": search_term() done = True elif choice == "hashtag": search_hashtag() done = True elif choice == "user": search_user() done = True else: print("Sorry, Bad Input. Please Enter One of the Options Below") done = False try: os.remove('data/temp.json') except: print() if __name__ == '__main__': # calls main function main()
4,597
1,459
#!/usr/bin/env python3 import aws_cdk.aws_iam as iam import aws_cdk.aws_s3 as s3 from aws_cdk import core class ExportingStack(core.Stack): exported_role_a: iam.Role exported_role_b: iam.Role def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) self.exported_role_a = iam.Role( self, "exporting-role-a", assumed_by=iam.ServicePrincipal("ec2.amazonaws.com") ) self.exported_role_b = iam.Role( self, "exporting-role-b", assumed_by=iam.ServicePrincipal("ec2.amazonaws.com") ) # compat_output = core.CfnOutput( # self, # id="will-be-overwritten", # # TODO: Update the value according to your environment # value=f"arn:aws:iam::{core.Aws.ACCOUNT_ID}:role/export-exportingroleb66286D65-CZGEAEVHHA32", # export_name="export:ExportsOutputFnGetAttexportingroleb66286D65ArnE09A9A52" # ) # compat_output.override_logical_id("ExportsOutputFnGetAttexportingroleb66286D65ArnE09A9A52") class ImportingStack(core.Stack): def __init__( self, scope: core.Construct, id: str, role_a: iam.Role, role_b: iam.Role, **kwargs ) -> None: super().__init__(scope, id, **kwargs) test_bucket = s3.Bucket( self, "some-bucket", removal_policy=core.RemovalPolicy.DESTROY ) test_bucket.add_to_resource_policy( iam.PolicyStatement( actions=["s3:GetObject"], principals=[ role_a, # role_b ], resources=[ test_bucket.arn_for_objects("*"), test_bucket.bucket_arn ] ) ) app = core.App() export = ExportingStack(app, "export") ImportingStack( app, "import", role_a=export.exported_role_a, role_b=export.exported_role_b ) app.synth()
2,107
719
# -*- coding: utf-8 -*- # vim:fenc=utf-8 import pytest import trustpaylib try: unicode py3 = False except NameError: py3 = True unicode = lambda s: s class TestTrustPayCore: secret_key = "abcd1234" aid = "9876543210" pay_request = trustpaylib.build_pay_request( AID=aid, AMT="123.45", CUR="EUR", REF="1234567890", ) def test_validate_request(self): pr = trustpaylib.build_pay_request() with pytest.raises(ValueError): trustpaylib.TrustPay.validate_request(pr) trustpaylib.TrustPay.validate_request(self.pay_request) pr = trustpaylib.build_pay_request(SIG="SIG") with pytest.raises(ValueError): trustpaylib.TrustPay.validate_request(pr) env = trustpaylib.build_environment( aid=self.aid, secret_key=self.secret_key, ) tp_client = trustpaylib.TrustPay(env) pr = tp_client.sign_request(self.pay_request) trustpaylib.TrustPay.validate_request(pr) pr = trustpaylib.build_pay_request( AID=self.aid, AMT="123.45", REF="1234567890", CUR="GRG", ) with pytest.raises(ValueError): trustpaylib.TrustPay.validate_request(pr) pr = trustpaylib.build_pay_request( AID=self.aid, AMT="123.45", REF="1234567890", CUR="EUR", LNG="prd", ) with pytest.raises(ValueError): trustpaylib.TrustPay.validate_request(pr) pr = trustpaylib.build_pay_request( AID=self.aid, AMT="123.45", REF="1234567890", CUR="EUR", CNT="tra", ) with pytest.raises(ValueError): trustpaylib.TrustPay.validate_request(pr) pr = trustpaylib.build_pay_request( AID=self.aid, AMT="123.4566", REF="1234567890", CUR="EUR", CNT="tra", ) with pytest.raises(ValueError): trustpaylib.TrustPay.validate_request(pr) def test_cls_creation(self): with pytest.raises(ValueError): trustpaylib.build_environment(lol='olo') assert trustpaylib.build_environment(aid=self.aid) pr = trustpaylib.build_pay_request( AMT=123.45, NURL=None, ) if not py3: assert isinstance(pr.AMT, unicode) assert pr.NURL is None assert pr.RURL is None def test_sign_msg(self): sign = ( "DF174E635DABBFF7897A82822521DD7" "39AE8CC2F83D65F6448DD2FF991481EA3" ) msg = "".join(( self.aid, self.pay_request.AMT, self.pay_request.CUR, self.pay_request.REF, )) sign_message = trustpaylib.sign_message assert sign_message(self.secret_key, msg) == sign env = trustpaylib.build_environment( aid=self.aid, secret_key=self.secret_key, ) tp_client = trustpaylib.TrustPay(env) assert sign_message( self.secret_key, tp_client.create_signature_msg(self.pay_request), ) == sign assert tp_client.pay_request_signature(self.pay_request) == sign def test_environment(self): env = trustpaylib.build_environment( aid=self.aid, secret_key=self.secret_key, ) assert env.redirect_url is None assert env.aid and env.secret_key assert env.api_url == trustpaylib.API_URL env = trustpaylib.build_environment( aid=self.aid, secret_key=self.secret_key, api_url="grg prd" ) assert env.api_url == "grg prd" env = trustpaylib.build_test_environment( aid=self.aid, secret_key=self.secret_key, ) assert env.api_url == trustpaylib.TEST_API_URL env = trustpaylib.build_test_environment( aid=self.aid, secret_key=self.secret_key, api_url="grg prd" ) assert env.api_url == "grg prd" def test_filter_nones(self): assert not trustpaylib._filter_dict_nones({'none': None}) filtered = trustpaylib._filter_dict_nones({ "none": None, "value": "Value", }) assert "none" not in filtered assert "value" in filtered def test_build_link(self): env = trustpaylib.build_environment( aid=self.aid, secret_key=self.secret_key, ) tp_client = trustpaylib.TrustPay(env) assert tp_client.build_link(self.pay_request) assert trustpaylib.build_link_for_request( env.api_url, self.pay_request) client_link = tp_client.build_link( self.pay_request, sign=False, ) link = trustpaylib.build_link_for_request( env.api_url, self.pay_request) assert client_link == link def test_result_codes(self): redirect = trustpaylib.build_redirect( RES=1001, REF="12345", PID="1234", ) assert trustpaylib.TrustPay.get_result_desc_from_redirect(redirect) notification = trustpaylib.build_notification( RES=1001, REF="12345", ) assert len(trustpaylib.TrustPay.get_result_desc_from_notification( notification)) == 2 def test_check_notif_signature(self): notification = trustpaylib.build_notification( AID=unicode("1234567890"), TYP=unicode("CRDT"), AMT=unicode("123.45"), CUR=unicode("EUR"), REF=unicode("9876543210"), RES=unicode("0"), TID=unicode("11111"), OID=unicode("1122334455"), TSS=unicode("Y"), SIG=unicode( "97C92D7A0C0AD99CE5DE55C3597D5ADA" "0D423991E2D01938BC0F684244814A37" ) ) env = trustpaylib.build_environment( aid=unicode("1234567890"), secret_key=self.secret_key, ) tp_client = trustpaylib.TrustPay(env) assert tp_client.check_notification_signature(notification)
6,344
2,189
inputfile = open('inputDay01.txt', 'r') values = [int(i) for i in inputfile.readlines()] #PART1 def aoc01(numbers, value): for x in numbers: if value - x in numbers: return x * (value - x) #PART2 def aoc02(numbers, value): num1, num2 = None, None for x in numbers: n = value - x for y in numbers: if n-y in numbers: num1 = y num2 = n-y if x + num1 + num2 == value: return x * num1 * num2 print("Part1:",aoc01(values,2020)) print("Part2:",aoc02(values,2020)) inputfile.close()
606
216
import logging from flask import request from flask_restplus import Resource, Namespace, fields from ..managers import auth_manager from ..managers.auth_manager import token_required from ..exceptions import HTTP_EXCEPTION api = Namespace('auth', description='Authentication related operations') dto = api.model('auth', { 'username': fields.String(required=True, description='The (Linux) username'), 'password': fields.String(required=True, description='The user password'), }) @api.route('/login/') class UserLogin(Resource): @api.expect(dto, validate=True) def post(self): """Login and retrieve JWT token""" try: return auth_manager.login_user(request.json), 200 except HTTP_EXCEPTION as e: api.abort(e.code, e.payload) except Exception as e: logging.exception(e, exc_info=True) api.abort(500, str(e)) @api.route('/refresh/') class TokenRefresh(Resource): def post(self): """Use JWT refresh token to retrienve a new JWT access token""" try: return auth_manager.refresh_token(), 200 except HTTP_EXCEPTION as e: api.abort(e.code, e.payload) except Exception as e: logging.exception(e, exc_info=True) api.abort(500, str(e)) @api.route('/logout/') class LogoutAPI(Resource): def post(self): """Logout and invalidate JWT token""" try: return auth_manager.logout_user(), 200 except HTTP_EXCEPTION as e: api.abort(e.code, e.payload) except Exception as e: logging.exception(e, exc_info=True) api.abort(500, str(e))
1,686
515
from abc import ABC, abstractmethod, abstractproperty from typing import Dict __all__ = ["AbstractAlgorithm"] class AbstractAlgorithm(ABC): @abstractmethod def preprocess(self, text: str) -> Dict: """Применяется к каждому фрагменту текста. :param text: Текст :type text: str :rtype: Dict :return: Словарь вида { Параметр: Значение } """ pass @abstractmethod def compare(self, res1: Dict, res2: Dict) -> Dict: """Сравнивает два словаря, возвращаемые preprocess. Применяется к каждой уникальной паре фрагментов текста :type res1: Dict :type res2: Dict :rtype: Dict :return: Словарь вида { intersection: Численная характеристика связи от 0 до 1 data: Прочие характеристики связи } """ pass @abstractproperty def name(self) -> str: pass @abstractproperty def preprocess_keys(self) -> list: """Набор ключей словаря, которые возвращает метод preprocess. Предполагается, что результаты методов preprocess для разных алгоритмов могут пересекаться. При этом одинаковые результаты записываются под одинаковыми ключами. Если для фрагмента уже имеются все необходимые результаты, то нет смысла выполнять метод preprocess. :rtype: list """ pass def analyze(self, res: Dict, acc=None): """Получить общие результаты. Этот метод должен применится к каждому фрагменту. После чего полученный объект-аккумулятор передается в AbstractAlgorithm.describe_result для получения общих результатов работы В отличие от остальных методов, имеет стандартную реализацию, но имеет смысл его переопределить/расширить в наследниках, чтобы получить более конкретные результаты. :param res: результат AbstractAlgorithm.preprocess :type res: Dict :param acc: аккумулятор. Хранит данные об обработке всех предыдущих :return: Аккумулятор """ if acc is None: acc = { 'fragments': 0, 'edges': 0, 'sum_intersect': 0 } acc['fragments'] += 1 return acc def analyze_comparison(self, res1: Dict, res2: Dict, comp_res: Dict, acc): """Проанализировать результаты сравнения фрагментов. Этот метод должен применится к каждой связи. :param res1: Результат AbstractAlgorithm.preprocess :type res1: Dict :param res2: Результат AbstractAlgorithm.preprocess :type res2: Dict :param comp_res: Результат AbstractAlgorithm.compare(res1, res2) :type comp_res: Dict :param acc: тот же аккумулятор, что и в AbstractAlgorithm.analyze """ acc['edges'] += 1 acc['sum_intersect'] += comp_res['intersection'] return acc def describe_result(self, acc) -> str: """Описывает общие результаты работы алгоритма в формате HTML-строки :param acc: Результат применение AbstractAlgorithm.analyze ко всем фрагментам :rtype: str """ if acc['edges']: avg_inter = f"{acc['sum_intersect'] / acc['edges'] * 100:.2f}%" else: avg_inter = "0%" return f""" Проанализировано фрагментов: {acc['fragments']} <br> Найдено связей: {acc['edges']} <br> Среднее пересечение: {avg_inter} """ @abstractmethod def describe_comparison(self, comp_dict) -> str: """Описывает результаты сравнения фрагментов :param comp_dict: Словарь из AbstractAlgorithm.compare :rtype: str """ pass @abstractmethod def describe_preprocess(self, prep_dict) -> str: """Описывает результаты предобработки фрагмента в виде HTML-строки :param prep_dict: Словарь из AbstractAlgorithm.preprocess :rtype: str """ pass
4,079
1,290
#!/usr/bin/python3 from cffi import FFI source = open("libsec.h", "r").read() header = """ #include <secp256k1.h> #include <secp256k1_extrakeys.h> #include <secp256k1_schnorrsig.h> """ ffi = FFI() ffi.cdef(source) ffi.set_source("_libsec", header, libraries=["secp256k1"]) ffi.compile(verbose=True)
304
146
from kafka import KafkaConsumer, KafkaProducer import json class Producer: producer = None producer_topic = None def __init__(self, server, topic): self.producer = KafkaProducer ( bootstrap_servers=[server], value_serializer=lambda x: json.dumps(x).encode('utf-8') ) self.producer_topic = topic def send_message(self, msg): producer.send(producer_topic,value=msg) producer.flush() class Consumer: __consumer = None consumer_topic = None def __init__(self, server, topic): self.__consumer = KafkaConsumer( 'kafka-python-topic', bootstrap_servers=[server], auto_offset_reset='latest', enable_auto_commit=True, value_deserializer=lambda x: json.loads(x.decode('utf-8')) ) def get_consumer(self): return self.__consumer
938
276
import psycopg2 import sys from nltk.tokenize import sent_tokenize import re import csv import os # pmid {16300001 - 16400000} try: # starting_pmid = 16300001 # intermediate_pmid = 16400000 starting_pmid = 100001 intermediate_pmid = 200000 ending_pmid = 32078260 while 1: if intermediate_pmid<ending_pmid: #open existing csv files with open('pmid {%s - %s}.csv' % (starting_pmid, intermediate_pmid), mode='r') as csv_file: reader = csv.reader(csv_file) if len(list(reader))==1: #removing the file if there is only header in the file and there is no data os.remove('pmid {%s - %s}.csv' % (starting_pmid, intermediate_pmid)) print ("File " + str(starting_pmid) + " - " + str(intermediate_pmid) + " has been removed.") else: print ("File " + str(starting_pmid) + " - " + str(intermediate_pmid) + " is not empty.") starting_pmid = intermediate_pmid + 1 intermediate_pmid = intermediate_pmid + 100000 else: print("Entering base case ...") with open('pmid {%s - %s}.csv' % (starting_pmid, ending_pmid), mode='r') as csv_file: reader = csv.reader(csv_file) if len(list(reader))==1: os.remove('pmid {%s - %s}.csv' % (starting_pmid, ending_pmid)) print ("File " + str(starting_pmid) + " - " + str(ending_pmid) + " has been removed.") else: print ("File " + str(starting_pmid) + " - " + str(ending_pmid) + " is not empty.") break #94357012, total rows #51556076, null affiliation #42800936, not null affiliation #21, minimum pmid #32078260, maximum pmid # print(len(temp_row)) sys.exit('Script completed') except (Exception, psycopg2.Error) as error: sys.exit('Script failed')
2,038
694
#! /usr/bin/env python3 # -*- coding: utf-8 -*- import utility ################### ## main ################### if __name__=='__main__': import argparse parser = argparse.ArgumentParser() parser.add_argument("-m", "--model", help="the path to the model file") args = parser.parse_args() print('Loading association matrix') matrix = utility.loadObjFromPklFile(args.model) interactive_solver(matrix)
439
140
import json from collections.abc import Callable from typing import Optional import numpy as np from PIL import Image from volcengine_ml_platform.datasets.dataset import _Dataset from volcengine_ml_platform.io.tos_dataset import TorchTOSDataset class ImageDataset(_Dataset): """ ImageDataset创建函数 ``ImageDataset`` ,需要提供于 ml_engine 交互的基本信息,方便传输下载内容 Args: dataset_id(str): ml_engine 创建时提供的 dataset_id annotation_id(str, None): ml_engine 创建时提供的 注释集 annotation_id local_path(str): 数据下载到本地的目录 tos_source(str, None): 数据集的manifest文件的 tos url,一般可不设置 """ def download(self, local_path: str = "ImageDataset", limit=-1): """把数据集从 TOS 下载到本地 Args: local_path(str): 设置下载目录 limit (int, optional): 设置最大下载数据条目 """ """download datasets from source Args: limit (int, optional): download size. Defaults to -1 (no limit). """ if local_path: self.local_path = local_path self._create_manifest_dataset( manifest_keyword="ImageURL", ) def split(self, training_dir: str, testing_dir: str, ratio=0.8, random_state=0): return super().split_dataset( ImageDataset, training_dir, testing_dir, ratio, random_state ) def load_as_np(self, offset=0, limit=-1): """load images as numpy array Args: offset (int, optional): num of images to skip. Defaults to 0. limit (int, optional): num of images to load. Defaults to -1. Returns: np array of images list of annotations """ images = [] annotations = [] with open(self._manifest_path(), encoding="utf-8") as f: for i, line in enumerate(f): manifest_line = json.loads(line) if i < offset: continue if limit != -1 and i >= offset + limit: break file_path = manifest_line["Data"]["FilePath"] image = Image.open(file_path) images.append(np.asarray(image)) annotations.append(manifest_line["Annotation"]) return np.array(images), annotations def parse_image_manifest(self, manifest_file_path): # parse manifest manifest_info = {"buckets": [], "keys": [], "annotations": []} with open(manifest_file_path, encoding="utf-8") as f: for _, line in enumerate(f): manifest_line = json.loads(line) url = manifest_line["Data"]["ImageURL"] bucket = url.split("//")[1].split("/")[0] key = url.split(f"{bucket}/")[1] manifest_info["buckets"].append(bucket) manifest_info["keys"].append(key) manifest_info["annotations"].append( manifest_line["Annotation"], ) return manifest_info def init_torch_dataset( self, transform: Optional[Callable] = None, target_transform: Optional[Callable] = None, ): manifest_info = self.get_manifest_info(self.parse_image_manifest) torch_dataset = TorchTOSDataset( manifest_info=manifest_info, transform=transform, target_transform=target_transform, ) return torch_dataset
3,412
1,023
# Copyright 2020 TrueMLGPro # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import os import pyfiglet import subprocess import sys parser = argparse.ArgumentParser(add_help=False) group_download = parser.add_argument_group('Download Tools') group_download.add_argument('URL', metavar='url', help='a url to download', nargs='?') group_download.add_argument('-c', '--curl', dest='curl', action='store_true', help='Uses curl for download') group_download.add_argument('-w', '--wget', dest='wget', action='store_true', help='Uses wget for download') group_download.add_argument('-H', '--httrack', dest='httrack', action='store_true', help='Uses httrack for mirroring') group_download_args = parser.add_argument_group('Download Arguments') group_download_args.add_argument('-v', '--verbose', dest='verbose', action='store_true', help='Makes output more detailed') group_download_args.add_argument('-d', '--depth', dest='depth', help='Defines depth of mirror (httrack only)') group_download_args.add_argument('-eD', '--ext-depth', dest='ext_depth', help='Defines depth of mirror for external links (httrack only)') group_download_args.add_argument('-cN', '--conn-num', dest='conn_num', help='Defines a number of active connections during mirroring (httrack only)') group_files = parser.add_argument_group('Files') group_files.add_argument('-f', '--filename', dest='filename', help='Sets filename (or path) for file which is being downloaded') group_misc = parser.add_argument_group('Misc') group_misc.add_argument('-u', '--update', dest='update', action='store_true', help='Updates MultiDownloader') group_misc.add_argument('-h', '--help', action='help', help='Shows this help message and exits') args = parser.parse_args() def banner(): banner_figlet = pyfiglet.figlet_format("MultiDownloader", font="small") print(banner_figlet + "Made by TrueMLGPro | v1.0") def menu(): print("\n" + "1. Download using curl" + "\n" + "2. Download using wget" + "\n" + "3. Mirror website using httrack" + "\n" + "4. Update Multidownloader" + "\n" + "5. Exit" + "\n" + "6. Get args") def main(): if (len(sys.argv) <= 1): banner() menu() while True: choice = input("[>>] ") if (choice == "1"): print("[i] Using curl to download...") curl_download(input("[+] Enter URL: "), input("[+] Enter filename: "), input("[+] Verbose? (y/n): ")) menu() elif (choice == "2"): print("[i] Using wget to download...") wget_download(input("[+] Enter URL: "), input("[+] Enter filename: "), input("[+] Verbose? (y/n): ")) menu() elif (choice == "3"): print("[i] Using httrack to mirror...") httrack_download(input("[+] Enter URL: "), input("[+] Enter project path for mirror: "), input("[+] Enter depth level: "), input("[+] Enter external links depth level: "), input("[+] Enter number of connections: "), input("[+] Verbose? (y/n): ")) elif (choice == "4"): print("[i] Getting latest updates for MultiDownloader..." + "\n") subprocess.call('sh scripts/update.sh', shell=True) menu() elif (choice == "5"): print("[!] Exiting...") sys.exit() elif (choice == "6"): print(args) elif type(choice) != int: print("[!!!] Invalid choice. Exiting...") sys.exit() def curl_download(url, filename, verbose=None): print("[i] Downloading using curl - " + url + " with filename: " + filename) if (verbose == "y"): subprocess.call(f"curl -L -O {filename} -v {url}", shell=True) elif (verbose == "n"): subprocess.call(f"curl -L -O {filename} {url}", shell=True) else: subprocess.call(f"curl -L -O {filename} {url}", shell=True) def wget_download(url, filename, verbose=None): print("[i] Downloading using wget - " + url + " with filename: " + filename + "\n" + ("Verbose: ") + str(verbose)) if (verbose == "y"): subprocess.call(f"wget -O {filename} -v {url}", shell=True) elif (verbose == "n"): subprocess.call(f"wget -O {filename} {url}", shell=True) else: subprocess.call(f"wget -O {filename} {url}", shell=True) def httrack_download(url, path, mirror_depth, ext_links_depth, conn_num, verbose=None): print("[i] Cloning using httrack - " + url + " on path: " + path) subprocess.call(f"httrack {url} -O {path} -r{mirror_depth} -%e{ext_links_depth} -c{conn_num}", shell=True) def launch_updater(): print("[i] Getting latest updates for MultiDownloader..." + "\n") subprocess.call('sh scripts/update.sh', shell=True) if (args.curl): if (args.verbose): curl_download(args.URL, args.filename, args.verbose) else: curl_download(args.URL, args.filename) if (args.wget): if (args.verbose): wget_download(args.URL, args.filename, args.verbose) else: wget_download(args.URL, args.filename) if (args.httrack): if (args.verbose): httrack_download(args.URL, args.filename, args.depth, args.ext_depth, args.conn_num, args.verbose) else: httrack_download(args.URL, args.filename, args.depth, args.ext_depth, args.conn_num) if (args.update): launch_updater() try: main() except KeyboardInterrupt: print("[!] Exiting...") sys.exit()
5,634
2,065
import os, glob from tasks import task, TaskError, get, sh, SHResult def is_yaml_empty(dir): for name in glob.glob("%s/*.yaml" % dir): with open(name) as f: if f.read().strip(): return False return True class Kubernetes(object): def __init__(self, namespace=None, context=None, dry_run=False): self.namespace = namespace or os.environ.get("K8S_NAMESPACE", None) self.context = context self.dry_run = dry_run @task() def resources(self, yaml_dir): if is_yaml_empty(yaml_dir): return [] cmd = "kubectl", "apply", "--dry-run", "-f", yaml_dir, "-o", "name" if self.namespace: cmd += "--namespace", self.namespace return sh(*cmd).output.split() @task() def apply(self, yaml_dir): if is_yaml_empty(yaml_dir): return SHResult("", 0, "") cmd = "kubectl", "apply", "-f", yaml_dir if self.namespace: cmd += "--namespace", self.namespace if self.dry_run: cmd += "--dry-run", result = sh(*cmd) return result
1,129
361
from dataclasses import dataclass from typing import List, Dict, Any @dataclass class WebsitePage: title: str body: str tags: List[str] created_at: str url: str slug: str meta: Dict @dataclass class WebsiteTag: name: str slug: str pages: List[WebsitePage] @dataclass class WebsiteCollection: name: str pages: List[WebsitePage] tags: List[WebsiteTag] @dataclass class Website: collections: Dict[str, WebsiteCollection] meta: Dict @dataclass class Templates: # TODO: Remove Any index_template: Any page_template: Any tag_template: Any
615
202
# Copyright (c) Facebook, Inc. and its affiliates. # All rights reserved. # # This source code is licensed under the license found in the # LICENSE file in the root directory of this source tree. # Inspired from https://github.com/rwightman/pytorch-image-models from contextlib import contextmanager import torch from .states import swap_state class ModelEMA: """ Perform EMA on a model. You can switch to the EMA weights temporarily with the `swap` method. ema = ModelEMA(model) with ema.swap(): # compute valid metrics with averaged model. """ def __init__(self, model, decay=0.9999, unbias=True, device='cpu'): self.decay = decay self.model = model self.state = {} self.count = 0 self.device = device self.unbias = unbias self._init() def _init(self): for key, val in self.model.state_dict().items(): if val.dtype != torch.float32: continue device = self.device or val.device if key not in self.state: self.state[key] = val.detach().to(device, copy=True) def update(self): if self.unbias: self.count = self.count * self.decay + 1 w = 1 / self.count else: w = 1 - self.decay for key, val in self.model.state_dict().items(): if val.dtype != torch.float32: continue device = self.device or val.device self.state[key].mul_(1 - w) self.state[key].add_(val.detach().to(device), alpha=w) @contextmanager def swap(self): with swap_state(self.model, self.state): yield def state_dict(self): return {'state': self.state, 'count': self.count} def load_state_dict(self, state): self.count = state['count'] for k, v in state['state'].items(): self.state[k].copy_(v)
1,952
591
import os import fnmatch import deep_learning tests = [file for file in os.listdir(os.getcwd()) if fnmatch.fnmatch(file, 'test_*.py')] tests.remove('test_all.py') for test in tests: print '---------- '+test+' ----------' execfile(test)
250
96
import warnings import argparse import os import logging import lib.metadata as metadata import lib.model as model import lib.text as text import lib.website as website warnings.filterwarnings('ignore') logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) ######################################################################################################################## # ---------------------------------------------------------------------------------------------------------------------- ######################################################################################################################## DATA_FOLDER = os.path.join(os.path.curdir, 'data') MODELS_FOLDER = os.path.join(os.path.curdir, 'output', 'models') SCRAPPED_PDF_FOLDER = os.path.join(os.path.curdir, 'data', 'scrap', 'pdf') FASTTEXT_PATH = os.path.join(os.path.curdir, 'fastText', 'fasttext') SCRAPPED_TEXT_FOLDER = os.path.join(os.path.curdir, 'data', 'scrap', 'text') PRODUCED_TEXTS_FOLDER = os.path.join(os.path.curdir, 'output', 'texts') LIB_FOLDER = os.path.join(os.path.curdir, 'lib') MODEL_FILE_EXTENSION = '.model' TEXT_FILE_EXTENSION = '.txt' PDF_FILE_EXTENSION = '.pdf' POST_URLS_FILENAME = 'post_urls.pickle' METADATA_FILENAME = 'raw_metadata.csv' CORPORA = [ { 'name': 'openbook', 'textFilesFolder': os.path.join(DATA_FOLDER, 'corpora', 'openbook', 'text', 'parsable'), 'metadataFilename': os.path.join(DATA_FOLDER, 'corpora', 'openbook', 'metadata.tsv') }, { 'name': 'project_gutenberg', 'textFilesFolder': os.path.join(DATA_FOLDER, 'corpora', 'project_gutenberg', 'text', 'parsable'), 'metadataFilename': os.path.join(DATA_FOLDER, 'corpora', 'project_gutenberg', 'metadata.tsv') }, ] COMBINED_TEXTS_FILENAME = 'corpus_combined.txt' COMBINED_MODEL_FILENAME = os.path.join(MODELS_FOLDER, 'corpus_combined_model.bin') NEIGHBORS_COUNT = 20 ##################################### # Set up required folders and perform any other preliminary tasks ##################################### if not os.path.exists(SCRAPPED_PDF_FOLDER): os.makedirs(SCRAPPED_PDF_FOLDER) if not os.path.exists(SCRAPPED_TEXT_FOLDER): os.makedirs(SCRAPPED_TEXT_FOLDER) ######################################################################################################################## # ---------------------------------------------------------------------------------------------------------------------- ######################################################################################################################## def websiteParser(args): if args.action == 'fetchLinks': logger.info('Selected action: Fetch website links') links = website.fetchLinks(args.target) print(links) elif args.action == 'fetchMetadata': logger.info('Selected action: Fetch website metadata') metadata = website.fetchMetadata(args.target, PDF_FILE_EXTENSION, METADATA_FILENAME) print(metadata) elif args.action == 'fetchFiles': logger.info('Selected action: Fetch website files') website.fetchFiles(args.target, PDF_FILE_EXTENSION, METADATA_FILENAME, SCRAPPED_PDF_FOLDER) ######################################################################################################################## # ---------------------------------------------------------------------------------------------------------------------- ######################################################################################################################## def metadataParser(args): if (args.action == 'printStandard'): combinedMetadata = metadata.getCombined(CORPORA, args.corpus, False) print(combinedMetadata) elif (args.action == 'printEnhanced' or args.action == 'exportEnhanced'): combinedMetadata = metadata.getCombined(CORPORA, args.corpus, True) if args.action == 'printEnhanced': print(combinedMetadata) if args.action == 'exportEnhanced': text.exportMetadata(combinedMetadata) ######################################################################################################################## # ---------------------------------------------------------------------------------------------------------------------- ######################################################################################################################## def textParser(args): combinedMetadata = metadata.getCombined(CORPORA, args.corpus, True) if args.action == 'exportByPeriod': logger.info('Selected action: Export combined text by period') text.exportTextByPeriod(combinedMetadata, args.fromYear, args.toYear, args.splitYearsInterval) elif args.action == 'extractFromPDF': logger.info('Selected action: Extract text from PDF') text.extractTextFromPdf(combinedMetadata, SCRAPPED_PDF_FOLDER, PDF_FILE_EXTENSION, SCRAPPED_TEXT_FOLDER, TEXT_FILE_EXTENSION) ######################################################################################################################## # ---------------------------------------------------------------------------------------------------------------------- ######################################################################################################################## def modelParser(args): if args.action == 'create': logger.info('Selected action: Create models') model.createModelsFromTextFiles(args.textsFolder, TEXT_FILE_EXTENSION, MODELS_FOLDER, MODEL_FILE_EXTENSION) elif args.action == 'getNN': logger.info('Selected action: Retrieve Nearest Neighbours') modelFilename = args.period + MODEL_FILE_EXTENSION nearestNeighbours = model.getNeighboursForWord(text.preProcessText(args.word), modelFilename, MODELS_FOLDER, FASTTEXT_PATH, NEIGHBORS_COUNT) print(nearestNeighbours) elif args.action == 'getCD': logger.info('Selected action: Get cosine distance') model.exportByDistance(args.action, MODEL_FILE_EXTENSION, MODELS_FOLDER, args.fromYear, args.toYear, NEIGHBORS_COUNT, FASTTEXT_PATH) elif args.action == 'getCS': logger.info('Selected action: Get cosine similarity') model.exportByDistance(args.action, MODEL_FILE_EXTENSION, MODELS_FOLDER, args.fromYear, args.toYear, NEIGHBORS_COUNT, FASTTEXT_PATH) ######################################################################################################################## # ---------------------------------------------------------------------------------------------------------------------- ######################################################################################################################## parser = argparse.ArgumentParser() parser.add_argument('--version', action='version', version='1.0.0') subparsers = parser.add_subparsers() ######################################################################################################################## # ---------------------------------------------------------------------------------------------------------------------- ######################################################################################################################## parser_website = subparsers.add_parser('website') parser_website.add_argument('--target', default='openbook', choices=['openbook'], help='Target website to ' 'scrap data from') parser_website.add_argument('--action', default='fetchFiles', choices=['fetchLinks', 'fetchMetadata', 'fetchFiles'], help='The action to execute on the selected website') parser_website.set_defaults(func=websiteParser) ######################################################################################################################## # ---------------------------------------------------------------------------------------------------------------------- ######################################################################################################################## parser_metadata = subparsers.add_parser('metadata') parser_metadata.add_argument('--corpus', default='all', choices=['all', 'openbook', 'project_gutenberg'], help='The name of the target corpus to work with') parser_metadata.add_argument('--action', default='printStandard', choices=['printStandard', 'printEnhanced', 'exportEnhanced'], help='Action to perform against the metadata of the selected text corpus') parser_metadata.add_argument('--fromYear', default=1800, type=int, help='The target starting year to extract data from') parser_metadata.add_argument('--toYear', default=1900, type=int, help='The target ending year to extract data from') parser_metadata.add_argument('--splitYearsInterval', default=10, type=int, help='The interval to split the years with ' 'and export the extracted data') parser_metadata.set_defaults(func=metadataParser) ######################################################################################################################## # ---------------------------------------------------------------------------------------------------------------------- ######################################################################################################################## parser_text = subparsers.add_parser('text') parser_text.add_argument('--corpus', default='all', choices=['all', 'openbook', 'project_gutenberg'], help='The name of the target corpus to work with') parser_text.add_argument('--action', default='exportByPeriod', choices=['exportByPeriod', 'extractFromPDF'], help='Action to perform against the selected text corpus') parser_text.add_argument('--fromYear', default=1800, type=int, help='The target starting year to extract data from') parser_text.add_argument('--toYear', default=1900, type=int, help='The target ending year to extract data from') parser_text.add_argument('--splitYearsInterval', default=10, type=int, help='The interval to split the years with ' 'and export the extracted data') parser_text.set_defaults(func=textParser) ######################################################################################################################## # ---------------------------------------------------------------------------------------------------------------------- ######################################################################################################################## parser_model = subparsers.add_parser('model') parser_model.add_argument('--action', default='getNN', choices=['create', 'getNN', 'getCS', 'getCD'], help='Action to perform against the selected model') parser_model.add_argument('--word', help='Target word to get nearest neighbours for') parser_model.add_argument('--period', help='The target period to load the model from') parser_model.add_argument('--textsFolder', default='./output/texts', help='The target folder that contains the ' 'texts files') parser_model.add_argument('--fromYear', default='1800', help='the target starting year to create the model for') parser_model.add_argument('--toYear', default='1900', help='the target ending year to create the model for') parser_model.set_defaults(func=modelParser) ######################################################################################################################## # ---------------------------------------------------------------------------------------------------------------------- ######################################################################################################################## if __name__ == '__main__': args = parser.parse_args() args.func(args)
12,328
2,841
from abc import ABC, abstractmethod from collections.abc import MutableSequence from datetime import datetime from typing import NamedTuple, Any, Optional, Iterator from .encoders import ENCODERS, string_encode, default_encoder, datetime_encode, blob_encode class Element(NamedTuple): value: Any tag: str @classmethod def from_pair(cls, value: Any, tag: Optional[str]=None) -> 'Element': if isinstance(value, Element): assert tag is None return value if isinstance(value, tuple): assert tag is None value, tag = value return cls(value, tag or default_encoder(value)) def encode(self) -> bytes: return ENCODERS[self.tag](self.value) def __repr__(self): return f'Element({self.value!r}, tag={self.tag!r})' class AbstractMessage(ABC, MutableSequence): address: str _items: list def clear(self): self._items.clear() def __len__(self): return len(self._items) def __getitem__(self, index): return self._items[index] def __delitem__(self, index): del self._items[index] @abstractmethod def _build_message(self) -> Iterator[bytes]: assert NotImplementedError # no coverage: abstract method def __bytes__(self): return b''.join(self._build_message()) def __eq__(self, other): if type(other) is not type(self): return False return other.address == self.address and other._items == self._items class Message(AbstractMessage): def __init__(self, address: str, *args: Any) -> None: self.address = address self._items = [] self.extend(args) def append(self, value: Any, *, tag: Optional[str]=None) -> None: self._items.append(Element.from_pair(value, tag)) def insert(self, index: int, value: Any, *, tag: Optional[str]=None) -> None: self._items.insert(index, Element.from_pair(value, tag)) def __setitem__(self, index, value): self._items[index] = Element.from_pair(value) def values(self) -> Iterator[Any]: for item in self._items: yield item.value def tags(self) -> Iterator[str]: for item in self._items: yield item.tag def _build_message(self) -> Iterator[bytes]: yield string_encode(self.address) yield string_encode(',' + ''.join(self.tags())) for item in self._items: yield item.encode() def __repr__(self): return f'Message({self.address!r}, {repr(self._items)[1:-1]})' class Bundle(AbstractMessage): timetag: datetime def __init__(self, timetag: datetime, *args: Message) -> None: self.timetag = timetag self._items = [] self.extend(args) @property def address(self): return '#bundle' def valid_to_insert(self, item: AbstractMessage): if not isinstance(item, AbstractMessage): return False if isinstance(item, Bundle): return item.timetag >= self.timetag return True def append(self, item: AbstractMessage) -> None: assert self.valid_to_insert(item) self._items.append(item) def insert(self, index: int, value: AbstractMessage) -> None: assert self.valid_to_insert(value) self._items.insert(index, value) def __setitem__(self, index, value): assert self.valid_to_insert(value) self._items[index] = value def _build_message(self) -> Iterator[bytes]: yield string_encode(self.address) yield datetime_encode(self.timetag) for item in self._items: yield blob_encode(bytes(item)) def __eq__(self, other): return super().__eq__(other) and other.timetag == self.timetag def __repr__(self): return f'Bundle({self.timetag!r}, {repr(self._items)[1:-1]})'
3,887
1,163
from django.shortcuts import get_object_or_404, render from django.http import HttpResponse from django.template import RequestContext, loader from Organizer.models import Department from Organizer.models import Advisor from Organizer.models import Student from Organizer.models import Course from Organizer.models import Degree from Organizer.models import Certificate from Organizer.models import Degree_Core_Course_Structure from Organizer.models import Degree_Elective_Course_Structure from Organizer.models import Certificate_Course_Structure def index(request): department_list = Department.objects.all() template = loader.get_template('Organizer/index.html') context = RequestContext(request, { 'department_list': department_list }) return HttpResponse(template.render(context)) def index2(request, department_id): department = get_object_or_404(Department, pk=department_id) return render(request, 'Organizer/index2.html', {'department': department}) def advisorinfo(request, department_id, advisor_id): department = get_object_or_404(Department, pk=department_id) advisor = get_object_or_404(Advisor, pk = advisor_id) return render(request, 'Organizer/advisorinfo.html', {'department': department, 'advisor': advisor}) def detail(request, department_id, advisor_id): department = get_object_or_404(Department, pk=department_id) advisor = get_object_or_404(Advisor, pk=advisor_id) return render(request, 'Organizer/detail.html', {'department': department,'advisor': advisor}) def advisordegree(request, department_id, advisor_id): department = get_object_or_404(Department, pk=department_id) advisor = get_object_or_404(Advisor, pk=advisor_id) return render(request, 'Organizer/advisordegree.html', {'department': department,'advisor': advisor}) def degree(request, department_id, degree_id): department = get_object_or_404(Department, pk=department_id) degree = get_object_or_404(Degree, pk=degree_id) return render(request, 'Organizer/degree.html', {'department': department,'degree': degree}) def coursedegree(request, degree_id, degree_core_course_structure_id): core_course_structure = get_object_or_404(Degree_Core_Course_Structure, pk=degree_core_course_structure_id) return render(request, 'Organizer/coursedegree.html', {'core_course_structure': core_course_structure}) def certificate(request, department_id, certificate_id): department = get_object_or_404(Department, pk=department_id) certificate = get_object_or_404(Certificate, pk=certificate_id) return render(request, 'Organizer/certificate.html', {'department': department,'certificate': certificate}) # Create your views here.
2,716
867
import numpy as np matrixA = np.loadtxt('./mat-A-32.txt') matrixB = np.loadtxt('./mat-B-32.txt') checking = np.loadtxt('./out32.txt') result = np.dot(matrixA, matrixB) diff = result - checking print(checking) print(result) print(diff) np.absolute(diff) print(np.max(diff)) [rows, cols] = diff.shape with open ('./out2048-diff.txt','w') as f: for i in range(rows): for j in range(cols): f.write("%.6f "%diff[i, j]) f.write('\n')
462
186
import re from collections import defaultdict from util import * input1="""sesenwnenenewseeswwswswwnenewsewsw neeenesenwnwwswnenewnwwsewnenwseswesw seswneswswsenwwnwse nwnwneseeswswnenewneswwnewseswneseene swweswneswnenwsewnwneneseenw eesenwseswswnenwswnwnwsewwnwsene sewnenenenesenwsewnenwwwse wenwwweseeeweswwwnwwe wsweesenenewnwwnwsenewsenwwsesesenwne neeswseenwwswnwswswnw nenwswwsewswnenenewsenwsenwnesesenew enewnwewneswsewnwswenweswnenwsenwsw sweneswneswneneenwnewenewwneswswnese swwesenesewenwneswnwwneseswwne enesenwswwswneneswsenwnewswseenwsese wnwnesenesenenwwnenwsewesewsesesew nenewswnwewswnenesenwnesewesw eneswnwswnwsenenwnwnwwseeswneewsenese neswnwewnwnwseenwseesewsenwsweewe wseweeenwnesenwwwswnew""" input2="""nwesesesesewswenwswsesesenesenwnesenwse nwnenwnwnenenwnenwnenewnwenenwwnenesesenw neneswnenwenwseeneweswsenesewnenenee senwewnwnenenwnwnwwesenenwswnenwwnwnw swseseeseswseseeswseneseswsesesenwsesew weeneeneswsewnwnesweseneswenwneswne swseseswswneswswsesewswswseswse swswseeswswwswnweenewswswesenwswwse swswswswsweswseeswseseseseeswwsewnw eneeseenenweeneenenee eesesenwsesweeseeese neenenenewnenenenenenwnenenenwnwne nenenwnwnwnenwnenwnwswnenesenenwnw neneweweneneenenenenesewneeneenee nwweswswewneenenwneneneeswneneneswne eeseeneseesesesewneswseeeseese swseswsenwswnewswseswswswseswswse senenenwnwnenwnwnwewnwwnwswnenenwnwnwenw senwnenenwnwnenwnwwnwswnwnwnenwnwenenwnw neweseneswswnwswnwswseneseenwseeswee esesweeneeneswsenwsweeeeseeseee nenenwewseswseseswsewseneewwwnww neeswswenwnewnwnwwswwwneswswnwwwnwnw wwweswwwwwwswwwwww eeseenweenwseneeeeeeweeenee eeeeesenenenwesweeeswenwswseswee neswenenesenenenewnwenesweneneeswne swswswenwswwswswswswswwwswweswnwsww seseswseseseeswneeeeesewesesenenw swwswwwswwwswwswsweneswwwsesww eneeswenweewenwseeeseeeseswwnw swnenwswswswseseswswswwseswswswswswswsw seeseseeseeeesesesenwsenwseweseese swswswswnwnesweswewseseneswswwnenwsw eewnenweneswwseeeeneneeeeeene esenweswwnwnwnwnwnwnwnwnwnwnwnwnwwnw seeeeeseeneeswweeeeeeneenw weneswswenenenenwneswneswneneneesene wnwsesesenwnwnenwnwnesweneenwseswwsw sewsesesesesesesesewsenesesesesesenesese swswswwnwswswwweswswswnewwseswsww nwneneswnwnwnenenenwsenenwnwnenwwnene neenwenenwsweseeswsesweeseseswneswene eneeenewewneeneeneweneeesee nwnwwwnwnewsenwsenw sesesewswswwneneneeseeewswnwswnwsenw sewwswwnwwewwwneswswswwwneew nwsenwwnenenenesenwsenenenenenenenenenwne sewsewnesenwsenesenwsesweswswsesenenw eseeeeeeeenweeeeseesee eseenwseesweswenweseenweeeeswee neseseseswwneswsesesesewseseseswse sesweewseseeesenwseeeseeswsweneenw wnwneseeeseseeseenwwenwseseesese enwneswnwneneneneneneneenenwnenwwnene wnwneneneneneneewnwwnenweneesw nwnenenenwnenenwenenwneneseswnewnenene nwwsenwnwnenwnenwnenwneneneenwnwsenenww wwwsewwnwwwnewwneswwewww swseswwswseswswswswsenwseneeneewsenwsw nwnesenenwenwnwnwnenwnwnwneswnwnwnenene seeeweswnenenwsenewenenwewneseee nwwnwneseswsesweenweswsese seeseseswsesenesesesenesewseseseeese swwswneswwnwswneswwewsesewswswsww seswneswswseswswseseswnwswswswswseswew wwwwwswwewseswwwwwswwnenesw nwnwwsenwnenwnwnwnwneenwnwnwnwnenwnww nwnwneswwswseswswnwnwenwnenesenenenwswenw neneneneeneeseneneneeneneneswnwnenee neeeswswnweenwsweseneeseswnwnewe neswesenwneneneenenweeneene swseeneewnwseeeenwesenweseseeswnw eweneeneseeneneneneeeseeeneewene eeneneewneneeeswneneeneeenwsenenew nwnwswsweswswnenwswseswswswsweswswnesw neeeeseeeswewenenwswnene nwenwnwenwswnwnwwnwswnwnwneswnwneswswese neswseweeneneeeseenwwnenesenenwnee wswwseewwwewweewwswnewwwsww swswswwswswwwswswswswswnweeswswswsw enenenenenenenenenenwnenenwsenenenewnw seseswseseseeswseneseseseseseneseesee neewneeweeeeeneese enewneseeweneneeneneewenesenene enwswneeswnwswsewenwwnesewneswseswe senwswnwnenwnwwnenwnwswnwnwnwnwnwnwnwnwe sewswneswswswseseseseswswseneswseswswsw nwnenwwsewneneswnweenwnwnenwnwnwsenene eswwewswswnwswsw wwwwwwwwswsenw nwnwnwnwnwnwnwwwnwnwwnwnwwnwnenwsese seswnewnenwnweswnwsesenwseeseesesewnw neneseneeswneneneswwsenwnw nwnwnwnewnwnesenwnwnenwnwswnwneseenwnw wwneneeneswneneeewwnesesenenenese eseeswswsesenwneeewswnenwnwnewnw nwnesesenewwwswnwewsenwwsewnwwww eneeenewneneeneneneneswnenwewnesee neneenewenenenenew nwsenweewnwwwwenwnwswnwnenwswnwnwse seseeeeeeswwsenwseeseeseseeese wwwnwwwwwwwewewwwwwww swswseseneseswswsewseseseswenwneseseswsw seswsesesenweseeseswwseseneswsesesesese swswseswswswswswswswswswnwswswseswsw nweeneewneeseseesenwsenwseweswnwnw eeeeeseseeeeewsweenweeeese nwesesesesenenewwwneeeeweeee eenenwneneeswewneeeeneenee seseseswseswseswsenwseeswsesenwseseswne eseseseswseeneseeseseewnwswsesenese nwnenwnwseewsenwenewsenwsweswswenenee wsenwnwwnwwnwneeenwnwne seswswswswseneneseseswswswswswswswwnesww wswseswnwsweswseseswesesenwswseseseswsw sewweseseneseneswsesesenesesesesesese nwwnwneewwewwwwnwwwwwswwwswsw nwsweswwneeeeeenwseeenwnwswswesw wwwwwwsenwwnewwnwwwenwwwew swneneneswneneswenenwnwnewnwnwsenenenwnene eswnweseweseeseenwsene esewewneneneneseneneneneneneneneewwne eneeeenesenwnenwseneneenenenweesw nwnwnwseenwnweswnwnwnwnenewnwnwnwswenw neenenesewsewneeswseseenwweeeesw eewneeeneeesweeeeeeenenee nenenenenenwwsenenenenenenenwswneneneene nwwwwswwwnewwwwwwnwwww enwswseswenesenwenwseseeswesesenewse swesweneeenwenenweeneneesweeee wnwnwnesenwnwwsenewswnwwnwsenwseneswse neseswseneenwsweneswwnwsenwnesewsenwsw swswswseseneswweenwswswsesenewseswnesenw weseseeseseswseseseneeeesesewnese seeneswnewsesewnwwwwnw sewseeseeeeesewneeese seseseenwewsesewneseeeeesweenw ewswwwswswwswwwswswswsw nwnwwwsenewswnwwwenwsenwnenwnwnwnw esenwseseweeneneneswwsewsesewneese wnwnwswnenwnewwenenesewnenenwnesenesene wnwnenwwnwnwnwnwnwnwswnwnwnwnwwnenwnwse eewnesenwsesesweeneeewesweeesee swenwesweseenwseeseseseenenwesee nwnwneswnwenwwnwnwnenenwnwnweneswsenw swwwewwnewseewswwswnewwwww swwwswewwwsewwsewnwwswwwwnww wsenwewnwwsewwwnwsewnwnwwsenwnwnw neswswnenenwneneneenewneneneswnwsw wwwswwwwnwewwwsewwwwwwwnw seseseswseswswnweswwswswswnwsesesesesee nwneneenesenenenwwnwneneswnwnwnenenenwnw neeenwneeneesweenweeeesw eweeeeeeeeeeeeneswneeeswe wseewesesesesesesesesewseenesesesesee eeeeeseeeeweeee wsewneseeewseswnewnenenwnenewnesenw wswenwnwnwnwwnwnwnwwewnwwnwwnww wwwwwwnewwwsewwwswwneseww wwwwewwwwwwnw nwnwnenenwnwnwseenwwsenwenwnwwwnw seeswswsewnewnwwsweswwswnwswswnwnw eweseseneeseese sweeeeeswenenesweseeeeseneee wnwswewnenewsewwnewwwswwsww nweenwwwneswnwsenwsewewnwwnwnwww eswneeneneneneeeneeneeneneeeswne eeseseneeeeeeeweeneeeswee enweeseneswnenwnwnwswswswnw swseseseseseeseeneeese swswswswswswswswwseswswswswswseeneswnwsesw senwseenwwsweswseseswse wnwwnwwnwnwnwwsenwnwnwnewnwnwnwsenwnwse seseenwesesenwseseseseseseseseseseswswsw nesewnenenwnwneneswneneswsenwnenwnenw eswnwweenweseeneeswneeeeeee seesweneewenenweswseseweseneswsenwse wsesesesenesesesesesesesesesesese neneneneenenenenenewneneneneneneswne eseseeseeseeenweeeswseesenwse neneseeeeweseewwseseeenweseee senewnwwwswwewnwwnwwsewenewse seseseeweseseseneseeeseweseseseee wswseswseenwwswneswswnwswsww nwnwnwnwwswnwnwnwnwenenwenwnwnwnwne sewnwsenweswswswneenwwsenewnwnewnwnw swseeseseswseneswwsesewwwswnenesesese eeeneneesweeeeeeneee nwnweswnwnwenwnwnwnwnwswnwsenwnwnwsenw swswswswnwenwswswswswweswswesw nenwnwneswnwneswnenwnenenenwnwnenenenene wneneneneneneneneseswneneesenewnenwe enwsenenweneeswswsesesweseseseseswsese swwseseseeseewnesewnewswseseswseswse enwneeneneswneneneenenenene eswweeeeseeneeeeeesesenweee nenwsenwnwnenwneswnwnwnwnwenwnwnenwnenw esenwswwnwnwenwsenwnwseseenwswnwwew nwswnewwwnwswnwwnenwnenwswnwwwwnwnw eeweseeseseeeeeeesesesewese nwseeeeenwseeseeeeseeeseeeew senenwswnweswnwwwwwnwnenwwseswwnwe ewswnwnewewsenwswseneswswswswseswsw nwseswnenwwenwwswsesenwnwneewwnwse seeneseseneweseseseeseseswseseseese wwsewwswswwswswneswweswswswswwsww swswswsweswswswswswnwwswswsesesweswsw seeeeweweeweeeseneenewene nwseseesewseesewnwneewseesesenenwee swewwnwnwswswwwweswswswswswneswe eeneeeneneneneeweneneenesenenenew swsewwwsewnewwwnwwwwwwnewww seneswwweswswswwsweswswswswswwswswwnw seneseseseswsesesewsesesesesw seswswseswswwewswswswswseswswswswnenw eseseseesesenenweseesweseeewseseese swesenenwswnesesenwwwnwse nenewswnenenenenenesenenenenenenenenesw senwneseneeneenenw wseseseeseseeseseseeseseseeenesewe neeeneweenenee nwsenenwnenwneeneeeneneneneeswnesene nwswseeneseenwswnweseneswswnweesesese nwseseseswsesesesewseeesesesese eswenesewnenwnwwwnwnwnwneswesenwswsene sewswwswwswswwswwswswwwneneswnwsww nenwswenenenenesesesenwwneswnenenewew senenwswseswsewwsewseseseneeswneswswsw nwwwnwswswseswseswswwnwweswwwew eswswswswseseseswswseswswnwswsweswwswse nenesenenewnenenwnenenenesenenenenenenesw wnwnwnwwwwwewwwswwwnewnwsenwsw enwnewnwneswewnewwswwneeseswesew nwnwnenenwsenwewnenwnenenenwnenwnwnwnwsw nesenewneenwnwnwnwnwneneneswneswnewnee ewenewswwsewenwwsenenwwswnwsenwnw nesenwsenwseseeswswnwese wnwsenwnwsenwnwswwnwwnenwnwseswnwnwne newnenwneneenwesenesenenwseseweswswe senwsesesenwsweseswswsenwnesesesww sweswseswswwseseseswswsesesenwneseseswnw nwwenwnwnwsenwnweswnwswnwwswnwnwnenw enesenenwsewesewsweeneeeeweeee nwnwnwnwwnwnwnwnwnwwwewnwenwnwnwnw wseswseseswneeseeseenwseenwseswnwse seesenwnwwwewseswswnwnwnwe sewwwwwwwweswwswwwwnewww neneeswnenwneneswsenweneswneseswseeww nwnwswenwnwnwnwnwneseswnwsweneswenwnwsw wwwswwneswwwwwnewswww senwnwnenwenenenenwnwnewswnwnwnwesw wswwwwnwswwswwewswnweswswwswew swseseseswseseswswseseswsesesesenesenwse nwwnenenenwswsweneenenenenesweneeene wwnwswwswwswewwwsewwwswswswe ewwnwwnenesweseenwswswseeswwneenww eseswsesenwwnwseseseseseseseseswesesesw wwwwwsewwwwnenwsewnwnenwwwww nenenenwneneeneswnenenenenwwnenwneenw seeeeeeeswwseeeeeweneeeenw senweeneeneswwneeneesweeeswenenesw nwwnenwnwnwenwnenwnwnwswswnwnwnwenwse sesenwseseseeseseseseseseese swswneswsewseseswseswseswseswseeswsewse seseeseseseeeesesesesewee seneneeswnenenenenenenenwsenwnenenwnenenew eeeneeneneweeewneeneneneeseene swneneneneeneneneenenenenwneenewnene seswnwseswnwnewneswswnesenewswwwswswsw enweseeweewewesweeenw wwwswewneswwwsesewwwnwwswswww nwwwswwneswwsewswse swwneseswswnewwswnewwwwse nwwnwwwnwewwnwswwwwnwwnwwewnw seseseswneseswseeseswswseswseswwesesenwsw nweseseseseseseswewseseswsesesesesesese seseswesesenwseenwsenwseseseseseseseswse swsenwwnwnwnwnwneswewnenwnweweeswne eeeenweeeweeeeeeeeesee nwwwwswnewswewwenwnwwwewswwnw nwnwnwnwwnwswenwnwnwnwnwnwnwnwnwnwnw nwwsewewnesewswewnwswwnwwneewse wnwnwwnwwwnwwnwnewwswwswwwne enenesweeeeeeneenweeneeneeesw neeeenweneeneneneneeeeeswsweee sweswwewseswwwseneneswsewnwwsww neesesenweweseneseeesesewseeseenwe sweeeneeeswnene nwwwswwnwseeweswwwnw ewswswswwwwneswswnwswswwswswswwww neewseenwneeswseeneweneweenwesw seneweseeseseseewseseswweeeese eneswswswnwswwwswswswswswswswswneswsw sewswswswswnwswseswswswswnewswwwsww wwsesenenwnewwwsenw swnewweswwenenwneseenenenenenenewne nwwnwnwnwswnwnenwwwnwnwnw sesesesenwseswnwseseseseseeseswswswswse swnwswenenwswswneweswwsewsw nwnwnwnwnenenewewnenenenesenwnwswnwnw seseswsewswswsenwseseesesenenwesenesww neneeneeseeeewwwneeenweeeswe enenwewwswswsenewswsenwewseeneenee nwwenwswwwnwwnwnwwnwwwwewenwww sweswnwswesenwsweswseswswnwnwswsweswnwsw seseseneseseweseewseseswsesewsesese eweneeneeeeeeseeeeeeeeesw ewwwnwwwwswwwswswwwwwswwnesw swnwnenwnwnwwnwnewnwswnwenwnwnwsenwnw swneeswseseneswwnesesenwsesesenwswnww seswneseseesesewseseene wnwwsewnenwnwwsesesenwnwsesesewwwne eswswwwswseswewwswwswnwswswwwnw enwnwnenenwnwnwswnwnenwnenwnwswnwnw newsesenwnenenwnwenwnewnwwnwnwswnwnwnwnw swneneenesenwwsenwnewnesesenenenwnenw neneenwnwswswswweeeeeeeenenee swswseswnwswswswenwswsesenwswseswnewswse newnenwnenewsenewnesenewneesenwnene neseseseseswsesewseswneseseswsesesesese ewseeeeseeesesesesenwseeeswse wseeeseeeseseesewseenwswseneeee neseneseswswsesenesewswsesenewsesesenwse swneneneneneenwneeswneneneeneneneswne eenewnenenesweeenenenenene nenenesenenenwneeneeneewseeeene nwsenenwnenewnwnwnwnwnwnenwnenwnwsenenenw swnwenwnwwnwnwnwswswewnwnwnwnenwnwnwnww wwswnwwewwwwsewnwwwww wseseswnenewwwwwswwwsw swswwswswwnewswwwseeswwwswwswsw enesesewewsesweeeseeseseseseesese nwwnewenwnenwnwnweneswnwneswnenwneenw eweeeeeseeeweweneeeesesese wswswswswswswnwseswneswswswswwwswsww swsenwnwwnwseseseswweeneenwnesenwnee neeswneneneneewneneneneneneneneneene nwnewnwnwnwwnwnenwwswnwnwnwwsenwsenwnenw nwnwnwnwnwnwnwnwsenwnwnwnw nwnewnwswwwswneewsewnewwswwwww eeswwesesesenwseeeeeeseeenwe nenwneswswenwsweneeswneneneneneeswnenw neneseeneneesewneswnenenwnw nwnwwwenwenwnwnwwnwnwnwnwwnwswnwnwnw seseseswswseswsenwswnenwseswwweseswnese wwewwwseswwswsenwwnwweswnwnee neenwnwseeneewwneneenenenesewseenese nwwswswenesewwwwswwswenwneswnewse seswwnenwnwnenwwneeswsewewsewsesw eseeseseeeesweneee eenesesweeeeenwswnwneenenwswnenene seneseseeeseeswswseswsenwsenwnwsesesese seseseseseeeeseseeenw wnwwwweewwwwwswnewwswwww swswnewnwswseswswswswswwwswswswswwsw nwnwenwnwnwnwewnwnwnwnwnwnwnwswnwnwnw seswswswseseseswseswswnwneseneswswseswne swseswseswswswsewseswsesenenwsesenwseseese swswsweseswneneswwnewswswswswswswswnew nwwnwnwnwswnwnwnwnwsenwnwneeeswnwnwnenw wneneneneseneneew weeeseweneenewseesesewesesesese swnweswswseeseswswswseswswswswnwseswew eneseenweeeswneeeenweswneeee neneenewneweneneeneseneneswneenenwnene nesenewneneneeseswneneeneenenwenewnw neneneweeneneewneeneeneneneene senenewnesewwwwswswneswwneswsenwse eenweneseeswnenweswnwsee nwnwnwsenwnwnwnenwneneneswnwew sweswneseenwesweeswnwewseneneneeenw swnwneswwswseswswswswseswseseswswsesw nwnwnwnwnewnwnwnwsewewsenw swseseseseswseswswswsenwse nenwnwesenwnwsenwwnwsenwneswneeneswnw ewwwewnwwsewnw nwwnwwnwnwswwnwnwnwnwwnenw wneeneneneenwswswwneneeneneenesene nenwnenwnenwnwnenwnwneesenwnenenenwwnew eenewnenwswwseeenwsenwweneneswne nwnwnenenwnwwnwnwnwnwswnwswsenw eeseneenenenenwwseswneneewneenenenee nenwnenewnwswnenewnwseswneenwnenesene wnwswwnwwenwenwnwwnwswnwnwnwnwnwnww neeneneenesewneseenenewnwenwswenese nwewneeswnwnwseseneswneneswnenwswnwnw nwnesesewseswsewsewnenenesesenewsesese seneseswswswswswsenwseseseswseswswswsee nwwswnwsewwwnewsewnwewesewnwnwnw eswwenwnenwnwnenwswnwnwnwnwsenenwwne senweesenwwsewseeneeeenesewseee nenwnenwnwnwswnwswnenwnenweeswnenwnenene wnwnwwewsewnewwswwwnwnwwnwwwww nwwnwswnwnwwnwnenwnwswseewnwnwnwnwe nenwnenenenwnwnwnenwnenenweenenenwwnesw wnwnenweseneswwswnwneeseswnenenwswwe eeeweeeeeeweeeseeee wwwswwnwwnweswweneswnenwwwnwww swswwswswseswswswsweseswswseneswswse seswnwewswwwwswwsewnwneswswewww seswwwwnewwnwwewwnewwwwww seneswnenwsweewnwnwenwswswswnesenew eswnweeesweeseneeeeeeeeeee wnwswswswswwswswswswwswwwewewsww nwnwnenwnwnweeswsenweesewswswnwnwswnw seswwnwsewwwwswsenenw wwswwwwswswseswwwwewswwswnww seswseseswseseswseseseswswseseswswnw swnesewwnwwneswne wswswsesweswswswseswwswswsweswswnwnwe seenwsenweseseseesesewseseseseesese esenwnwnwneswnwnenwwsenwnenwwsenenww eeeeeneseswseseeseenwseeesw swseseeneseneseeswwnwese eeeenweswseeeesee seseswweenwswnewwwwnew wswswswnwswswwswswswseneneswseseseeswse nwswwnwsewewswwswwwenenwwnwww seneseweseseeneesesesesesenweseseswse nwnenwnwnwnwsewwenwnenwsenesenwnwnenwne senwenewsesesewnwwseeweseswsesesenwe wenwewnwnwnwwnwewnwwwnwwwwnw seeeeseseeseseesenwseenweesesese swswswwnwswwwwswnewswswwwswswwew nenwnenenwnwnwsenenwneneneswnwnwnwsesene wnewswsenesewswwwswnwwswswnewwseew wsesenwenwseswsenwwseeseenesenenwwnw senewewswwswwewwwwnewswwwswsw swneenwseweseeenwweseseeesenwnwse""" lines=input1.split('\n') tiles=defaultdict(lambda:False)# false = white def get_neighbours(x,y): yield x+1,y+1 yield x+1,y-1 yield x-1,y+1 yield x-1,y-1 yield x+2,y yield x-2,y def day(): global tiles tiles2=defaultdict(lambda:False) for x,y in list(tiles.keys()): black_tiles=sum(tiles[nx,ny] for nx,ny in get_neighbours(x,y)) if tiles[x,y]: # black if black_tiles==1 or black_tiles==2: tiles2[x,y]=True else: if black_tiles==2: tiles2[x,y]=True # calculate for neighbour tiles too because all tiles needs to be covered for x,y in get_neighbours(x,y): black_tiles=sum(tiles[nx,ny] for nx,ny in get_neighbours(x,y)) if tiles[x,y]: # black if black_tiles==1 or black_tiles==2: tiles2[x,y]=True else: if black_tiles==2: tiles2[x,y]=True return tiles2 #main start_profiling() for l in lines: # find directions in order directions=re.findall('(se|sw|ne|nw|e|w)',l) x=y=0 for d in directions: if d=='se': x+=1 y+=1 elif d=='sw': x-=1 y+=1 elif d=='ne': x+=1 y-=1 elif d=='nw': x-=1 y-=1 elif d=='e': x+=2 elif d=='w': x-=2 tiles[x,y]=not tiles[x,y] print('a)',sum(t for t in tiles.values())) end_profiling() start_profiling() for _ in range(100): tiles=day() print('b)', sum(t for t in tiles.values())) end_profiling()
16,749
7,936
#!/usr/bin/env python3 import argparse import sys import re import subprocess import os import glob import copy import aff3ct_help_parser as ahp # read all the lines from the given file and set them in a list of string lines with striped \n \r def readFileInTable(filename): aFile = open(filename, "r") lines = [] for line in aFile: line = re.sub('\r','',line.rstrip('\n')) if len(line) > 0: lines.append(line) aFile.close() return lines; def get_keys(filename): lines = readFileInTable(filename) list_keys = [] for l in lines: if l.startswith(".. |"): start_pos = 4 end_pos = l.find("|", start_pos) list_keys.append(l[start_pos:end_pos]) return list_keys def run_aff3ct(args_list): try: processAFFECT = subprocess.Popen(args_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdoutAFFECT, stderrAFFECT) = processAFFECT.communicate() except KeyboardInterrupt: os.kill(processAFFECT.pid, signal.SIGINT) (stdoutAFFECT, stderrAFFECT) = processAFFECT.communicate() err = stderrAFFECT.decode(encoding='UTF-8') std = stdoutAFFECT.decode(encoding='UTF-8').split("\n") return std, err def aff3ct_helpmap_to_keys_list(help_map, aff3ct_keys): # fill aff3ct_keys from help_map # ahp.print_help_map(help_map) for m in help_map: # module for a in help_map[m]: # argument if type(help_map[m][a]) is dict: key = help_map[m][a]["key"] if key != "": try: aff3ct_keys.index(key) except Exception as e: aff3ct_keys.append(key) else: pass def get_aff3ct_help_keys(aff3ct_path): # get the available codes and simulation types args_list = [aff3ct_path, "-h"] std, err = run_aff3ct(args_list) helpMap = ahp.help_to_map(std) codesList = helpMap["Simulation"]["--sim-cde-type, -C"]["limits"] [1:-1].split("|") simList = helpMap["Simulation"]["--sim-type" ]["limits"] [1:-1].split("|") # try to run all codes ans simu to get their helps aff3ct_keys = [] for c in codesList: for s in simList: args_list = [aff3ct_path, "-C", c, "-H", "-k", "--sim-type", s, "-p", "8"] std, err = run_aff3ct(args_list) helpMap = ahp.help_to_map(std) aff3ct_helpmap_to_keys_list(helpMap, aff3ct_keys) return aff3ct_keys def get_doc_keys(doc_path): doc_keys = [] for filename in glob.iglob(doc_path + '**/*.rst', recursive=True): pattern = re.compile("\|(factory::[^ ]*)\|") for i, line in enumerate(open(filename)): for match in re.finditer(pattern, line): doc_keys.append(match.group(1)) # remove duplicates doc_keys = list(set(doc_keys)) return doc_keys def display_keys(keys): for e in keys: print (" - [" + e + "]") if len(keys) == 0: print (" The keys list is empty.") def check_keys(keys_file, aff3ct_path, doc_path): list_keys = get_keys(keys_file) aff3ct_keys = get_aff3ct_help_keys(aff3ct_path) doc_keys = get_doc_keys(doc_path) list_keys.sort() aff3ct_keys.sort() doc_keys.sort() aff3ct_keys_save = copy.deepcopy(aff3ct_keys) not_in_aff3ct_keys = [] for k in list_keys: try: idx = aff3ct_keys.index(k) del aff3ct_keys[idx] except Exception as e: not_in_aff3ct_keys.append(k) not_in_doc_keys = [] for k in aff3ct_keys_save: try: idx = doc_keys.index(k) del doc_keys[idx] except Exception as e: not_in_doc_keys.append(k) # manages special key exceptions exceptions_not_in_doc_keys = ["factory::Frozenbits_generator::p+pb-path"] exceptions_doc_keys = ["factory::BFER::p+mpi-comm-freq", "factory::Launcher::except-a2l"] for e in exceptions_not_in_doc_keys: if e in not_in_doc_keys: not_in_doc_keys.remove(e) for e in exceptions_doc_keys: if e in doc_keys: doc_keys.remove(e) print("Keys used in the AFF3CT help but not defined in the strings database (undocumented keys):") display_keys(aff3ct_keys) print() print("Keys used in the AFF3CT doc but not used in the AFF3CT help:") display_keys(doc_keys) print() print("Keys used in the AFF3CT help but not used in the AFF3CT doc:") display_keys(not_in_doc_keys) print() print("Keys defined in the strings database but not used in the AFF3CT help or in the AFF3CT doc:") display_keys(not_in_aff3ct_keys) print() nDiff = len(aff3ct_keys) + len(doc_keys) + len(not_in_doc_keys) return nDiff; if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('--keys', action='store', dest='keys_file', type=str, default='doc/strings.rst') parser.add_argument('--aff3ct', action='store', dest='aff3ct_path', type=str, default='build/bin/aff3ct') parser.add_argument('--doc', action='store', dest='doc_path', type=str, default='doc/source/user/simulation/parameters/') args = parser.parse_args() nDiff = check_keys(args.keys_file, args.aff3ct_path, args.doc_path) sys.exit(nDiff);
4,773
1,971
from todoster.file_operations import load_projects from todoster.output_formatter import format_string def list_projects(arguments): projects = load_projects() if not arguments.show_all_projects: projects = list(filter(lambda x: x["active"], projects)) print() project_counter = 1 for project in projects: counter = format_string(str(project_counter).rjust(3), dim=True) title = format_string(project["title"], dim=(not project["active"])) shortcode = format_string("#" + project["shortcode"], color=project["color"]) print(counter + " " + title + " (" + shortcode + ")") project_counter += 1 print()
677
191
#!/usr/bin/env python __author__ = 'Tomas Novacik' import unittest2 from game import Game from board import Board, PlayerType, Move class GameTest(unittest2.TestCase): def test_winning_move(self): game = Game() game.start() # set winning status to board board = Board() [board.place_move(Move(0, i, PlayerType.CIRCLE)) for i in range(4)] winning_move = 0, 4 game._board = board game.move(*winning_move) self.assertTrue(game.is_finished) def test_clone(self): game = Game() game.start() game.clone() # eof
621
207
""" Functions to mask sentences of undesirable words (stopwords, punctuation etc). Used in get_sentence_embeddings.py to process sentences before finding embeddings. """ import re from skills_taxonomy_v2.pipeline.skills_extraction.cleaning_sentences import ( separate_camel_case, ) def is_token_word(token, token_len_threshold, stopwords, custom_stopwords): """ Returns true if the token: - Doesn't contain 'www' - Isn't too long (if it is it is usually garbage) - Isn't a proper noun/number/quite a few other word types - Isn't a word with numbers in (these are always garbage) """ return ( ("www" not in token.text) and (len(token) < token_len_threshold) and ( token.pos_ not in [ "PROPN", "NUM", "SPACE", "X", "PUNCT", "ADP", "AUX", "CONJ", "DET", "PART", "PRON", "SCONJ", ] ) and (not re.search("\d", token.text)) and (not token.text.lower() in stopwords + custom_stopwords) and (not token.lemma_.lower() in stopwords + custom_stopwords) ) def process_sentence_mask( sentence, nlp, bert_vectorizer, token_len_threshold, stopwords, custom_stopwords ): """ Mask sentence of stopwords etc, then get sentence embedding """ sentence = separate_camel_case(sentence) doc = nlp(sentence) masked_sentence = "" for i, token in enumerate(doc): if is_token_word(token, token_len_threshold, stopwords, custom_stopwords): masked_sentence += " " + token.text else: masked_sentence += " [MASK]" return masked_sentence
1,911
557
"""destinations Revision ID: 033809bcaf32 Revises: 4a77b8fb792a Create Date: 2017-08-24 05:56:45.166590 """ from alembic import op import sqlalchemy as sa import geoalchemy2 # revision identifiers, used by Alembic. revision = '033809bcaf32' down_revision = '4a77b8fb792a' branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('destinations', sa.Column('id', sa.Integer(), nullable=False), sa.Column('created_at', sa.DateTime(timezone=True), nullable=True), sa.Column('point', geoalchemy2.types.Geometry(geometry_type='POINT'), nullable=True), sa.Column('name', sa.String(length=80), nullable=True), sa.Column('address', sa.String(length=300), nullable=True), sa.PrimaryKeyConstraint('id') ) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_table('destinations') # ### end Alembic commands ###
1,000
388