index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
999,200 | 52e18b327c4158bec5ff002230304ed75230c7e1 | from flask import Flask, redirect, url_for, render_template, request, session, flash, abort
from requests import get
import os,subprocess
from subprocess import Popen, PIPE
import pwd
app = Flask(__name__)
app.secret_key = "A_skdfjPskdfkPdsgllflkdnfkljadklf"
ip = get('https://api.ipify.org').content.decode('utf8')
def makeNewFolder(newpath,user = None):
if not os.path.exists(newpath):
os.makedirs(newpath)
if(user != None):
try:
pwd.getpwnam(user)
except KeyError:
os.system("useradd "+user)
def makeNewFile(file, content):
f = open(file,"w+")
f.write(content)
f.close()
def decodeFolderStructure(folder_structure,location):
for key in folder_structure.keys():
location += "/"+key
keyData = folder_structure[key]
if str(type(keyData)) != "<class 'str'>" :
makeNewFolder(location)
decodeFolderStructure(keyData,location)
else:
print(key)
makeNewFile(location,keyData)
@app.route("/")
def index():
return ip
@app.route("/send_files_data/<user>", methods=['POST'])
def method_name(user):
defPath = "cgi-bin/"+user
makeNewFolder(defPath,user)
folder_structure = request.json
decodeFolderStructure(folder_structure,defPath)
return defPath
@app.route("/get_files_data/<user>")
def get_fild_data(user):
#Setup Files in file system
defPath = "cgi-bin/"+user
start_file = defPath+"/run.sh"
assert os.path.isfile(start_file)
#Setup Enviornment
pw_record = pwd.getpwnam(user)
my_env = os.environ.copy()
user_name = pw_record.pw_name
user_home_dir = pw_record.pw_dir
env = os.environ.copy()
env[ 'HOME' ] = user_home_dir
env[ 'LOGNAME' ] = user_name
env[ 'USER' ] = user_name
#Run Subprocess
pipe = subprocess.Popen(start_file, stdout=PIPE, timeout=15, preexec_fn=os.setuid(pw_record.pw_uid), env=my_env)
#Return Standard Output of Process
return pipe.stdout
if __name__ == '__main__':
app.run(port="5001",debug=True) |
999,201 | 58c23e231133e702be3172b8125dad89ae373a29 | import pygame, sys, os, random
from classes import *
from pygame.locals import *
blocksFile = "blocks.txt"
thisBlock = ""
allBlocks = []
boardWidth = 15
boardHeight = 20
gameOver = False
# Make all the blocks which are in file "blocks.txt"
file = open(blocksFile, "r")
while file:
line = file.readline()
if line.find("END") >= 0:
break
if line.find("/") >= 0:
allBlocks.append(blockStyle(thisBlock))
thisBlock = ""
continue
thisBlock = thisBlock + line
# Make board
gameBoard = board(boardWidth, boardHeight)
# All pygame init
pygame.init()
gameWindow = pygame.display.set_mode((640, 480))
pygame.display.set_caption('PyTetris')
clock = pygame.time.Clock()
playerBlock = block(boardWidth, boardHeight, allBlocks[random.randrange(len(allBlocks))].getStyle(), gameBoard)
pygame.time.Clock()
pygame.time.set_timer(pygame.USEREVENT + 1, 150)
pygame.time.set_timer(pygame.USEREVENT + 2, 1000)
#Game loop
while gameOver == False:
clock.tick(60)
for event in pygame.event.get():
if event.type == pygame.QUIT:
gameOver = True
elif event.type == KEYDOWN and event.key == K_ESCAPE:
gameOver = True
elif event.type == pygame.USEREVENT + 1:
playerBlock.handlePlayerInput()
elif event.type == pygame.USEREVENT + 2:
playerBlock.updatePlayer()
if playerBlock.isDown == True:
playerBlock.changeStyle(allBlocks[random.randrange(len(allBlocks))].getStyle())
gameWindow.fill((0,0,0))
gameBoard.drawBoard()
gameBoard.update()
playerBlock.drawBlock()
pygame.display.flip()
pygame.quit()
|
999,202 | 533cc4fa07ad8698472bda43f710578d2fc1731d | # coding:utf-8
from math import log
class BitMap(object):
def __init__(self, max_num=0):
self.bit_mask=1
self.bucket_mask=0x40
self.right = int(log(self.bucket_mask, 2))
self.int_numbers = int(max_num/self.bucket_mask)+1
self.bit_map = [0 for _ in range(self.int_numbers)]
self.bit_init()
def bit_init(self):
for bucket in range(self.int_numbers):
self.bit_map[bucket] &= 0x0
def check_bit(self,number):
return self.bit_map[number>>self.right] & self.bit_mask<<(number%self.bucket_mask)
def set_bit(self, number):
self.bit_map[number>>self.right] |= self.bit_mask<<(number%self.bucket_mask)
|
999,203 | 981df90fb317e228f1730619b12d5f58087e8842 | import argparse
import os
import json5
import numpy as np
import torch
from torch.utils.data import DataLoader
from util.utils import initialize_config
def main(config, resume):
torch.manual_seed(config["seed"]) # for both CPU and GPU
np.random.seed(config["seed"])
train_dataloader = DataLoader(
dataset=initialize_config(config["train_dataset"]),
batch_size=config["train_dataloader"]["batch_size"],
num_workers=config["train_dataloader"]["num_workers"],
shuffle=config["train_dataloader"]["shuffle"],
pin_memory=config["train_dataloader"]["pin_memory"]
)
valid_dataloader = DataLoader(
dataset=initialize_config(config["validation_dataset"]),
num_workers=1,
batch_size=1
)
model = initialize_config(config["model"])
optimizer = torch.optim.Adam(
params=model.parameters(),
lr=config["optimizer"]["lr"],
betas=(config["optimizer"]["beta1"], config["optimizer"]["beta2"])
)
loss_function = initialize_config(config["loss_function"])
trainer_class = initialize_config(config["trainer"], pass_args=False)
trainer = trainer_class(
config=config,
resume=resume,
model=model,
loss_function=loss_function,
optimizer=optimizer,
train_dataloader=train_dataloader,
validation_dataloader=valid_dataloader
)
trainer.train()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Wave-U-Net for Speech Enhancement")
parser.add_argument("-C", "--configuration", required=True, type=str, help="Configuration (*.json).")
parser.add_argument("-R", "--resume", action="store_true", help="Resume experiment from latest checkpoint.")
args = parser.parse_args()
configuration = json5.load(open(args.configuration))
configuration["experiment_name"], _ = os.path.splitext(os.path.basename(args.configuration))
configuration["config_path"] = args.configuration
main(configuration, resume=args.resume)
|
999,204 | 728c83a72c77f478220b2e63268ea967cec3c5e0 | class SeamCarving:
def carve_seam(self, disruption: [[int]]) -> [int]:
# print(disruption)
l = len(disruption)
for i in range(1,len(disruption)):
for j in range(l):
if j==0:
disruption[i][j] += min(disruption[i-1][j],disruption[i-1][j+1])
elif j==l-1:
disruption[i][j] += min(disruption[i-1][j],disruption[i-1][j-1])
else:
disruption[i][j] += min(disruption[i-1][j-1],disruption[i-1][j],disruption[i-1][j+1])
row = len(disruption)
result = [0]*row
result[row-1] = disruption[row-1].index(min(disruption[row-1]))
# i = len(disruption)-2
for i in range(row-2,-1,-1):
j = result[i+1]
if j==0:
temp = disruption[i][j:j+2]
ind = temp.index(min(temp))
result[i] = ind+j
elif j==l-1:
temp = disruption[i][j-1:j+1]
ind = temp.index(min(temp))
result[i] =ind+j-1
else:
temp = disruption[i][j-1:j+2]
ind = temp.index(min(temp))
result[i]=ind+j-1
return result
# print(SeamCarving().carve_seam([[1, 2, 0, 3], [1, 4, 4, 4], [1, 2, 3, 4], [3, 1, 1, 3]]))
# print(SeamCarving().carve_seam([[3, 2, 2, 3, 1, 2], [2, 1, 3, 2 ,3, 1], [3,4,3,1,3,1], [3,2,1,2,4,3],[1,3,3,2,4,3]]))
|
999,205 | 0a15e1747863ad74b799edfe567cf26e3ba1886f | #!/usr/bin/python
# This code extract gi_accession number, protein sequence and length information from fasta database
#argv[1]: fasta database
#argv[2]: output file
import sys
from Bio import SeqIO
from Bio.Seq import Seq
from Bio.Alphabet import IUPAC
#import argparse
#parser=argparse.ArgumentParser(description='python ***.py input_file database_file output_file')
#args = parser.parse_args()
#shellinput = raw_input('Please type your input file name!\n')
shellinput1 = str(sys.argv[1])
#shellinput2 = str(sys.argv[2])
inputfile1 = open (shellinput1, 'r')
#inputfile2 = open (shellinput2, 'r')
#shelloutput = raw_input('Please type your output file name for over100\n')
#shelloutput =str(sys.argv[2])
#maxsize = input('please type the AA length cutoff \n')
#maxsizenumber = str(maxsize)
#shelloutput = str( 'over' + maxsizenumber + '.txt')
#shelloutput = str(sys.argv[2])
#oversizelimit = open (shelloutput,'w')
#outputfile = open (shelloutput,'w')
#import temfile
temp = []
for n, x in enumerate(SeqIO.parse(inputfile1,"fasta")): #peptide containing file
head = x.description
seq = str(x.seq)
seq_len = len(seq)
temp.append(seq_len)
ave_len = sum(temp)/n
print ave_len
|
999,206 | e2f15466e750fc3559647f23dd5f3e5626220e36 | #!/usr/bin/env python3
# This file is Copyright (c) 2020 Florent Kermarrec <florent@enjoy-digital.fr>
# This file is Copyright (c) 2020 Dolu1990 <charles.papon.90@gmail.com>
# License: BSD
import argparse
from migen import *
from litex.build.generic_platform import *
from litex.build.sim import SimPlatform
from litex.build.sim.config import SimConfig
from litex.soc.integration.soc import SoCRegion
from litex.soc.integration.soc_core import *
from litex.soc.integration.builder import *
from litedram.modules import MT41K128M16
from litedram.phy.model import SDRAMPHYModel
from litedram.core.controller import ControllerSettings
from litex.tools.litex_sim import get_sdram_phy_settings
from vexriscv_smp import VexRiscvSMP
# IOs ----------------------------------------------------------------------------------------------
_io = [
("sys_clk", 0, Pins(1)),
("sys_rst", 0, Pins(1)),
("serial", 0,
Subsignal("source_valid", Pins(1)),
Subsignal("source_ready", Pins(1)),
Subsignal("source_data", Pins(8)),
Subsignal("sink_valid", Pins(1)),
Subsignal("sink_ready", Pins(1)),
Subsignal("sink_data", Pins(8)),
),
]
# Platform -----------------------------------------------------------------------------------------
class Platform(SimPlatform):
def __init__(self):
SimPlatform.__init__(self, "SIM", _io)
# SoCSMP -------------------------------------------------------------------------------------------
class SoCSMP(SoCCore):
def __init__(self, cpu_count, init_memories=False, with_sdcard=False):
# Cluster configs ---------------------------------------------------------------------
VexRiscvSMP.litedram_width = 128
VexRiscvSMP.ibus_width = 64
VexRiscvSMP.dbus_width = 64
VexRiscvSMP.coherent_dma = with_sdcard
# -------------------------------------------------------------------------------------------
platform = Platform()
sys_clk_freq = int(1e6)
sdram_init = []
if init_memories:
sdram_init = get_mem_data({
"images/fw_jump.bin": "0x00f00000",
"images/Image": "0x00000000",
"images/dtb" : "0x00ef0000",
"images/rootfs.cpio": "0x01000000",
}, "little")
# SoCCore ----------------------------------------------------------------------------------
SoCCore.__init__(self, platform, clk_freq=sys_clk_freq,
cpu_type = "vexriscv", cpu_variant="default", cpu_cls=VexRiscvSMP,
uart_name = "sim",
integrated_rom_size = 0x10000,
integrated_main_ram_size = 0x00000000)
self.platform.name = "sim"
self.add_constant("SIM")
self.add_constant("config_cpu_count", cpu_count) # for dts generation
# PLIC ------------------------------------------------------------------------------------
self.bus.add_slave("plic", self.cpu.plicbus, region=SoCRegion(origin=0xf0C00000, size=0x400000, cached=False))
interrupt_map = {**SoCCore.interrupt_map, **{
"uart": 1,
}}
# CLINT ------------------------------------------------------------------------------------
self.bus.add_slave("clint", self.cpu.cbus, region=SoCRegion(origin=0xf0010000, size=0x10000, cached=False))
# CRG --------------------------------------------------------------------------------------
self.submodules.crg = CRG(platform.request("sys_clk"))
# SDRAM ------------------------------------------------------------------------------------
phy_settings = get_sdram_phy_settings(
memtype = "DDR3",
data_width = 16,
clk_freq = 100e6)
self.submodules.sdrphy = SDRAMPHYModel(
module = MT41K128M16(100e6, "1:4"),
settings = phy_settings,
clk_freq = 100e6,
init = sdram_init)
self.add_sdram("sdram",
phy = self.sdrphy,
module = MT41K128M16(100e6, "1:4"),
origin = self.mem_map["main_ram"],
controller_settings = ControllerSettings(
cmd_buffer_buffered = False,
with_auto_precharge = True
)
)
if init_memories:
self.add_constant("MEMTEST_BUS_SIZE", 0) # Skip test if memory is initialized to avoid
self.add_constant("MEMTEST_ADDR_SIZE", 0) # corrumpting the content.
self.add_constant("MEMTEST_DATA_SIZE", 0)
self.add_constant("ROM_BOOT_ADDRESS", 0x40f00000) # Jump to fw_jump.bin
else:
self.add_constant("MEMTEST_BUS_SIZE", 4096)
self.add_constant("MEMTEST_ADDR_SIZE", 4096)
self.add_constant("MEMTEST_DATA_SIZE", 4096)
# SDCard -----------------------------------------------------------------------------------
if with_sdcard:
self.add_sdcard("sdcard", use_emulator=True)
# Build --------------------------------------------------------------------------------------------
def main():
parser = argparse.ArgumentParser(description="Linux on LiteX-VexRiscv Simulation")
VexRiscvSMP.args_fill(parser)
parser.add_argument("--sdram-init", action="store_true", help="Init SDRAM with Linux images")
parser.add_argument("--with-sdcard", action="store_true", help="Enable SDCard support")
parser.add_argument("--trace", action="store_true", help="Enable VCD tracing")
parser.add_argument("--trace-start", default=0, help="Cycle to start VCD tracing")
parser.add_argument("--trace-end", default=-1, help="Cycle to end VCD tracing")
parser.add_argument("--opt-level", default="O3", help="Compilation optimization level")
args = parser.parse_args()
VexRiscvSMP.args_read(args)
sim_config = SimConfig(default_clk="sys_clk")
sim_config.add_module("serial2console", "serial")
for i in range(2):
soc = SoCSMP(args.cpu_count, args.sdram_init and i!=0, args.with_sdcard)
builder = Builder(soc,
compile_gateware = i!=0,
csr_json = "build/sim/csr.json")
builder.build(sim_config=sim_config,
run = i!=0,
opt_level = args.opt_level,
trace = args.trace,
trace_start = int(args.trace_start),
trace_end = int(args.trace_end),
trace_fst = args.trace)
os.chdir("../")
if i == 0:
os.system("./json2dts.py build/sim/csr.json > build/sim/dts") # FIXME
os.system("dtc -O dtb -o images/dtb build/sim/dts") # FIXME
os.system("cp verilog/*.bin build/sim/gateware/")
if __name__ == "__main__":
main()
|
999,207 | bf04c17916127558dcf70bc70fd77e27fb994d5d | print('MAD LIBS CODE 2')
name = input('What is your name? ')
food = input('What is your favorite food? ')
swim = input('Please name your favorite place to swim. ')
print(name, 'is a small boy. He likes to eat', food, 'and loves to swim at the', swim, '.') |
999,208 | e53ca8f48592a78768074e1c9c9e5065b90ce51f | # -*- coding: utf-8 -*-
# Copyright 2015 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Log lines to scribe using the default global logger.
"""
from clog import config
from clog.loggers import FileLogger, monk_dependency_installed,\
ScribeMonkLogger, MonkLogger, ScribeLogger, StdoutLogger
from clog.zipkin_plugin import use_zipkin, ZipkinTracing
# global logger, used by module-level functions
loggers = None
class LoggingNotConfiguredError(Exception):
pass
def create_preferred_backend_map():
"""PyStaticConfig doesn't support having a map in the configuration,
so we represent a map as a list, and we use this function to generate
an actual python dictionary from it."""
preferred_backend_map = {}
for mapping in config.preferred_backend_map:
key, value = list(mapping.items())[0]
preferred_backend_map[key] = value
return preferred_backend_map
def check_create_default_loggers():
"""Set up global loggers, if necessary."""
global loggers
# important to specifically compare to None, since empty list means something different
if loggers is None:
# initialize list of loggers
loggers = []
# possibly add logger that writes to local files (for dev)
if config.clog_enable_file_logging:
if config.log_dir is None:
raise ValueError('log_dir not set; set it or disable clog_enable_file_logging')
loggers.append(FileLogger())
if not config.scribe_disable:
scribe_logger = ScribeLogger(
config.scribe_host,
config.scribe_port,
config.scribe_retry_interval
)
if not config.monk_disable and monk_dependency_installed:
scribe_monk_logger = ScribeMonkLogger(
config,
scribe_logger,
MonkLogger(config.monk_client_id),
preferred_backend_map=create_preferred_backend_map()
)
loggers.append(scribe_monk_logger)
else:
loggers.append(scribe_logger)
if config.clog_enable_stdout_logging:
loggers.append(StdoutLogger())
if use_zipkin():
loggers = list(map(ZipkinTracing, loggers))
if not loggers and not config.is_logging_configured:
raise LoggingNotConfiguredError
def reset_default_loggers():
"""
Destroy the global :mod:`clog` loggers. This must be done when forking to
ensure that children do not share a desynchronized connection to Scribe
Any writes *after* this call will cause the loggers to be rebuilt, so
this must be the last thing done before the fork or, better yet, the first
thing after the fork.
"""
global loggers
if loggers:
for logger in loggers:
logger.close()
loggers = None
def log_line(stream, line):
"""Log a single line to the global logger(s). If the line contains
any newline characters each line will be logged as a separate message.
If this is a problem for your log you should encode your log messages.
:param stream: name of the scribe stream to send this log
:param line: contents of the log message
"""
check_create_default_loggers()
for logger in loggers:
logger.log_line(stream, line)
|
999,209 | 753e309d2552e7d44a850cea1bb6dbe52429e977 | from tkinter import *
from tkinter import ttk
from PIL import ImageTk
from PIL import Image
from api_db.op_api import *
from tkinter import messagebox
import datetime
from tkinter.filedialog import asksaveasfile
import csv
import matplotlib.pyplot as drawer
def disable_close():
return None
# initialize db connection and windows of app
my_db = init_connection()
root_page = Tk()
employee_page = Toplevel()
employees_page = Toplevel()
# hide pages
employee_page.withdraw()
employees_page.withdraw()
# disable x from right up corner
employee_page.protocol("WM_DELETE_WINDOW", disable_close)
employees_page.protocol("WM_DELETE_WINDOW", disable_close)
# initialize logo for root page
logo_root_img = Image.open("logo.png")
logo_root_img = logo_root_img.resize((300, 230))
logo_root_img = ImageTk.PhotoImage(logo_root_img)
# initialize logo for man page
emp_img_man = Image.open("emp_img.png")
emp_img_man = emp_img_man.resize((250, 200))
emp_img_man = ImageTk.PhotoImage(emp_img_man)
# initialize logo for woman page
emp_emp_female = Image.open("emp_img_f.png")
emp_emp_female = emp_emp_female.resize((250, 200))
emp_emp_female = ImageTk.PhotoImage(emp_emp_female)
# first_last_dict_names is used to maintain the uniqueness of first_name, last_name couple
first_last_dict_names = {}
# is a list used to retain the credentials of employee that is logged in
employee_person_logged_in = []
# detached_rows is used to retain rows of table that are hidden on search process
detached_rows = []
# retain the number of employees per job from database, used for statistics
nr_emp_per_job = {}
# it is used to retain the first selected emp from table on pushing select button
selected_first_emp = None
def update_emp_page(page):
showed_img = None
# extract credentials for logged in employee
first_name_emp = employee_person_logged_in[0]
last_name_emp = employee_person_logged_in[1]
age_emp = employee_person_logged_in[2]
job_emp = employee_person_logged_in[3]
gender_emp = employee_person_logged_in[4]
salary_emp = employee_person_logged_in[5]
created_date_emp = employee_person_logged_in[6]
if gender_emp == 'male':
showed_img = emp_img_man
elif gender_emp == 'female':
showed_img = emp_emp_female
img_lbl = Label(page, image=showed_img)
first_name_lbl = Label(page, text=first_name_emp, font=("Segoe UI Black", 12),bg="#ff6666", fg = "#333300")
last_name_lbl = Label(page, text=last_name_emp, font=("Segoe UI Black", 12), bg="#ff6666", fg = "#333300")
age_lbl = Label(page, text=age_emp, font=("Segoe UI Black", 12), bg="#ff6666", fg = "#333300")
job_lbl = Label(page, text=job_emp, font=("Segoe UI Black", 12), bg="#ff6666",fg = "#333300")
gender_lbl = Label(page, text=gender_emp, font=("Segoe UI Black", 12), bg="#ff6666", fg = "#333300")
salary_lbl = Label(page, text=salary_emp, font=("Segoe UI Black", 12), bg="#ff6666", fg = "#333300")
date_created_lbl = Label(page, text=created_date_emp, bg="#ff6666", fg = "#333300", font=("Segoe UI Black", 12))
first_name_txt = Label(page, text="First name is: ", bg ="#99ffcc", fg="#3366ff",font=("Segoe UI Black", 12))
last_name_txt = Label(page, text="Last name is: ", bg ="#99ffcc", fg="#3366ff", font=("Segoe UI Black", 12))
age_txt = Label(page, text="His/Her age is: ", bg ="#99ffcc", fg="#3366ff", font=("Segoe UI Black", 12))
job_txt = Label(page, text="His/her job is: ", bg ="#99ffcc", fg="#3366ff", font=("Segoe UI Black", 12))
gender_txt = Label(page, text="His/Her gender is: ", bg ="#99ffcc", fg="#3366ff", font=("Segoe UI Black", 12))
salary_txt = Label(page, text="His/Her salary is: ", bg ="#99ffcc", fg="#3366ff", font=("Segoe UI Black", 12))
date_created_txt = Label(page, text="Created/Last updated date: ", bg ="#99ffcc", fg="#3366ff", font=("Segoe UI Black", 12))
img_lbl.place(relx=0.59, rely=0.15, anchor=CENTER)
first_name_lbl.place(relx=0.55, rely=0.35, anchor=CENTER)
last_name_lbl.place(relx=0.55, rely=0.4, anchor=CENTER)
age_lbl.place(relx=0.55, rely=0.45, anchor=CENTER)
job_lbl.place(relx=0.55, rely=0.5, anchor=CENTER)
gender_lbl.place(relx=0.55, rely=0.55, anchor=CENTER)
salary_lbl.place(relx=0.55, rely=0.6, anchor=CENTER)
date_created_lbl.place(relx=0.6, rely=0.65, anchor=CENTER)
first_name_txt.place(relx=0.3, rely=0.35, anchor=CENTER)
last_name_txt.place(relx=0.3, rely=0.4, anchor=CENTER)
age_txt.place(relx=0.3, rely=0.45, anchor=CENTER)
job_txt.place(relx=0.3, rely=0.5, anchor=CENTER)
gender_txt.place(relx=0.3, rely=0.55, anchor=CENTER)
salary_txt.place(relx=0.3, rely=0.6, anchor=CENTER)
date_created_txt.place(relx=0.3, rely=0.65, anchor=CENTER)
back_btn = Button(page, width=20, bg="#99ccff", fg="#000000", font=("Segoe UI Black", 12),
command=lambda: back_to_prev_page(page), text="Back!")
back_btn.place(relx=0.5, rely=0.90, anchor=CENTER)
def go_to_next_page(is_admin=0):
if not is_admin:
employee_page.deiconify() # show employee page
root_page.withdraw() # hide root page
# clean employee page
for child in employee_page.winfo_children():
child.destroy()
# update employee page
update_emp_page(employee_page)
else:
employees_page.deiconify() # show employees page
root_page.withdraw() # hide root page
def check_authentication_input_form(user_field, pass_field):
global employee_person_logged_in
# extract credentials from fields
username_emp_in = user_field.get()
password_emp_in = pass_field.get()
if username_emp_in == 'Admin' and password_emp_in == 'admin': # if it is admin
go_to_next_page(1) # go to page for admin
else: # else search for employee credentials
is_emp_found = False
my_cursor = my_db.cursor(buffered=True) # set to extract all the data at once
my_cursor.execute(f'Select * from employees') # extract needed data
for emp in my_cursor:
username_emp = emp[0] # get username == first_name
password_emp = emp[1] # get password == last_name
if username_emp_in == username_emp and password_emp_in == password_emp: # if employee is found in database
is_emp_found = True
employee_person_logged_in = emp # extract emp credentials
break
my_cursor.close()
if is_emp_found:
go_to_next_page(0) # go to page for employee
else:
messagebox.showwarning("Warning!", "The credentials are not registered!")
# delete content of fields from index zero to the end
user_field.delete(0, 'end')
pass_field.delete(0, 'end')
def init_main_frame():
root_page.title("Management of employees")
root_page.geometry('600x600')
root_page.iconbitmap("my_icon.ico")
root_page.configure(bg="#66ff66")
root_page.resizable(False, False)
user_lbl = Label(root_page, width=10, text="Username: ", bg ="#99ffcc", fg="#3366ff", font=("Segoe UI Black", 12))
pass_lbl = Label(root_page, width=10, text="Password: ", bg="#99ffcc", fg="#3366ff", font=("Segoe UI Black", 12))
img_lbl = Label(image=logo_root_img)
user_field = Entry(root_page, width=30)
passwd_field = Entry(root_page, width=30)
login_btn = Button(root_page, text="Login", width=20, bg="#99ccff", fg="#000000",
font=("Segoe UI Black", 12),
command=lambda: check_authentication_input_form(user_field, passwd_field))
img_lbl.place(relx=0.5, rely=0.25, anchor=CENTER)
user_lbl.place(relx=0.27, rely=0.5, anchor=CENTER)
pass_lbl.place(relx=0.27, rely=0.6, anchor=CENTER)
user_field.place(relx=0.5, rely=0.5, anchor=CENTER)
passwd_field.place(relx=0.5, rely=0.6, anchor=CENTER)
login_btn.place(relx=0.5, rely=0.7, anchor=CENTER)
def back_to_prev_page(curr_page):
curr_page.withdraw() # hide current page
root_page.deiconify() # show root page
def init_employee_frame():
employee_page.title("Employee page")
employee_page.geometry('700x700')
employee_page.iconbitmap("my_icon.ico")
employee_page.configure(bg="#66ff66")
employee_page.resizable(False, False)
def update_first_last_couple_dict(first_name_emp, last_name_emp):
if first_name_emp not in first_last_dict_names:
first_last_dict_names[first_name_emp] = []
first_last_dict_names[first_name_emp].append(last_name_emp)
else:
first_last_dict_names[first_name_emp].append(last_name_emp)
def update_nr_emp_per_job_dict(job_emp):
if job_emp not in nr_emp_per_job:
nr_emp_per_job[job_emp] = 1
else:
nr_emp_per_job[job_emp] += 1
def create_table_employees(my_cursor):
# create table
# show just columns with headings, without column 0,
# to insert things in column 0 you must used text='something in column 0' in order to complete column 0
# values is used for new columns created
my_table = ttk.Treeview(employees_page, show='headings')
my_table['columns'] = ('First_Name', 'Last_Name', 'Age', 'Job', 'Gender', 'Salary', 'Hire_date')
# define headings
my_table.heading('First_Name', text='First_Name')
my_table.heading('Last_Name', text='Last_Name')
my_table.heading('Age', text='Age')
my_table.heading('Age', text='Age')
my_table.heading('Job', text='Job')
my_table.heading('Gender', text='Gender')
my_table.heading('Salary', text='Salary')
my_table.heading('Hire_date', text='Hire_date')
for emp in my_cursor:
first_name_emp = emp[0]
last_name_emp = emp[1]
job_emp = emp[3]
update_first_last_couple_dict(first_name_emp, last_name_emp)
update_nr_emp_per_job_dict(job_emp)
# insert credentials of every emp to the root node '', after all created nodes(that is the meaning of END)
my_table.insert('', END, values=emp)
# place the table in the page
my_table.place(relx=0.5, rely=0.5, anchor=CENTER)
return my_table
def check_values_add_employee_form(age, gender, salary):
are_values_ok = True
# verify if age is number!
try:
int(age)
except ValueError:
are_values_ok = False
# verify if salary is number!
try:
int(salary)
except ValueError:
are_values_ok = False
# verify if gender has correct value!
if not(gender.lower() == 'male' or gender.lower() == 'female'):
are_values_ok = False
return are_values_ok
def reset_table(my_table):
global detached_rows
# attach nodes, in order to have all the data in table
for row in detached_rows:
my_table.reattach(row, '', 0) # attach node to tree, with parent node '', at index 0
detached_rows = [] # clean detached rows
def add_employee(my_table, first_name_in, last_name_in, age_in, gender_in, salary_in, job_in):
# get the content from add emp form
first_name_emp = first_name_in.get()
last_name_emp = last_name_in.get()
age_emp = age_in.get()
gender_emp = gender_in.get()
salary_emp = salary_in.get()
job_emp = job_in.get()
# check if al the fields content are not empty
if first_name_emp != '' and last_name_emp != '' and age_emp != '' \
and gender_emp != '' and salary_emp != '' and job_emp != '':
# check the values for age, gender and salary if these are correct!
if check_values_add_employee_form(age_emp, gender_emp, salary_emp) is False:
messagebox.showwarning("Warning!", "The values for gender age and salary are not all correct!")
return
# if the employee is not in the table
if not(first_name_emp in first_last_dict_names and last_name_emp in first_last_dict_names[first_name_emp]):
update_first_last_couple_dict(first_name_emp, last_name_emp)
update_nr_emp_per_job_dict(job_emp)
# get teh current time
time_now = datetime.datetime.now()
# create the a new row for a new emp
new_emp = (first_name_emp, last_name_emp, int(age_emp), job_emp, gender_emp,
int(salary_emp), time_now.strftime('%Y-%m-%d %H:%M:%S'))
# add emp in database
add_emp_in_db(new_emp, my_db)
# insert emp in table of employees, with parent ='', at index 0
my_table.insert('', 0, values=new_emp)
# clean input fields
first_name_in.delete(0, END)
last_name_in.delete(0, END)
age_in.delete(0, END)
gender_in.delete(0, END)
job_in.delete(0, END)
salary_in.delete(0, END)
# else if employee is already registered
else:
messagebox.showwarning("Warning!", "The Employee "
"with this first name and this last name is already registered!")
# else if all the fields are not completed!
else:
messagebox.showwarning("Warning!", "Complete all the fields in order to add!")
def delete_employees(my_table):
# get the selected employees
records = my_table.selection()
if records != (): # if some employees are selected
for record in records:
# get info about emp
first_name = my_table.item(record)['values'][0]
last_name = my_table.item(record)['values'][1]
job = my_table.item(record)['values'][3]
# decrement the person with that job
nr_emp_per_job[job] -= 1
# eliminate (first,last names) couple
first_last_dict_names[first_name].remove(last_name)
# delete from table of the employees page
my_table.delete(record)
# delete emp from database
delete_emp_from_db(first_name, last_name, my_db)
else:
messagebox.showwarning("Warning!", "You didn't select any employee!")
def select_line(first_name_in, last_name_in, age_in, job_in, gender_in, salary_in, my_table):
global selected_first_emp
first_name_in.delete(0, END)
last_name_in.delete(0, END)
age_in.delete(0, END)
job_in.delete(0, END)
gender_in.delete(0, END)
salary_in.delete(0, END)
if my_table.selection() != (): # if something is selected
# get info for for first employee selected
first_emp_selected = my_table.item(my_table.selection()[0])
selected_first_emp = my_table.selection()[0]
first_name_emp = first_emp_selected['values'][0]
last_name_emp = first_emp_selected['values'][1]
age_emp = first_emp_selected['values'][2]
job_emp = first_emp_selected['values'][3]
gender_emp = first_emp_selected['values'][4]
salary_emp = first_emp_selected['values'][5]
# put the info in to the input fields, starting with index 0 in content
first_name_in.insert(0, first_name_emp)
last_name_in.insert(0, last_name_emp)
age_in.insert(0, age_emp)
job_in.insert(0, job_emp)
gender_in.insert(0, gender_emp)
salary_in.insert(0, salary_emp)
# if is not selected
else:
messagebox.showwarning("Warning!", "You didn't select anything!")
def update_employee(first_name_in, last_name_in, age_in, job_in, gender_in, salary_in, my_table):
global selected_first_emp
if selected_first_emp is not None:
# get the information from form
first_name_form = first_name_in.get()
last_name_form = last_name_in.get()
age_form = age_in.get()
job_form = job_in.get()
gender_form = gender_in.get()
salary_form = salary_in.get()
# check if all the fields are completed
if not(first_name_form != '' and last_name_form != '' and age_form != '' \
and job_form != '' and gender_form != '' and salary_form != ''):
messagebox.showwarning("Warning!", "You must have all the fields completed in order to update!!")
return
# check if age, gender, and salary have correct values!
if check_values_add_employee_form(age_form, gender_form, salary_form) is False:
messagebox.showwarning("Warning!", "The values for gender age and salary are not all correct!")
return
# get the selected person
selected_emp = selected_first_emp
# get the attributes of selected emp
selected_emp = my_table.item(selected_emp)
first_name_emp = selected_emp['values'][0]
last_name_emp = selected_emp['values'][1]
job_emp = selected_emp['values'][3]
data_created_emp = selected_emp['values'][6]
# decrement the person with that job
nr_emp_per_job[job_emp] -= 1
# eliminate (first,last names) couple
first_last_dict_names[first_name_emp].remove(last_name_emp)
update_first_last_couple_dict(first_name_form, last_name_form)
update_nr_emp_per_job_dict(job_form)
update_emp_in_bd(my_db, first_name_form, last_name_form, age_form, job_form, gender_form, salary_form,
first_name_emp, last_name_emp)
# update for select emp the data
my_table.item(selected_first_emp, values=(first_name_form, last_name_form, age_form, job_form, gender_form,
salary_form, data_created_emp))
# clean the input fields, from index 0 to the end
first_name_in.delete(0, END)
last_name_in.delete(0, END)
age_in.delete(0, END)
job_in.delete(0, END)
gender_in.delete(0, END)
salary_in.delete(0, END)
selected_first_emp = None
else:
messagebox.showwarning("Warning!", "You didn't select anything in order to update!")
def search_employees(text_to_search, my_table):
global detached_rows
for row in detached_rows:
my_table.reattach(row, '', 0) # reattach the nodes row at index 0, with parent ''
detached_rows = [] # clean detached nodes
children = my_table.get_children() # get the children nodes(rows)
for child in children:
# extract values from rows
first_name_emp = my_table.item(child)['values'][0].lower()
last_name_emp = my_table.item(child)['values'][1].lower()
job_emp = my_table.item(child)['values'][3].lower()
gender_emp = my_table.item(child)['values'][4].lower()
# in TreeView values per nod in list are
# 0 -> first name(string)
# 1 -> last name(string)
# 2 -> age(numerical)
# 3 -> job(string)
# 4 -> gender(string)
# 5 -> salary(numerical)
# 6 -> hire date(also string)
# if the value is not in one column, detach the node
if text_to_search.get().lower() not in first_name_emp.lower() \
and text_to_search.get().lower() not in last_name_emp.lower() \
and text_to_search.get().lower() not in job_emp.lower() \
and text_to_search.get().lower() != gender_emp.lower() :
detached_rows.append(child)
my_table.detach(child)
def write_to_excel(my_table):
# save the cv file
my_file = asksaveasfile(initialfile='Untitled.csv', defaultextension=".csv", filetypes=[("CSV File", "*.csv")])
if my_file is not None:
# open the csv file
opener_file = open(my_file.name, 'w', newline="")
csv_writer = csv.writer(opener_file, dialect='excel')
# write the headings
csv_writer.writerow(('first_Name', 'last_name', 'age', 'job', 'gender', 'salary', 'hire_date'))
# write the records
for child in my_table.get_children():
record = my_table.item(child)['values']
csv_writer.writerow(record)
def draw_statistics():
nr_persons = []
jobs = []
for job in nr_emp_per_job.keys():
if nr_emp_per_job[job] != 0:
jobs.append(job)
nr_persons.append(nr_emp_per_job[job])
# display statistics
# set the values, labels. and to show percentage with 2 decimals
drawer.pie(nr_persons, labels=jobs, autopct='%.2f')
drawer.legend(loc=(1.04, 0)) # set where to place legend
drawer.show() # show the pie
def create_employees_page():
# set to extract all the the data at once
my_cursor = my_db.cursor(buffered=True)
my_cursor.execute("select * from employees order by hire_date desc;")
# create input fields
first_name_in = Entry(employees_page)
last_name_in = Entry(employees_page)
age_in = Entry(employees_page)
job_in = Entry(employees_page)
gender_in = Entry(employees_page)
salary_in = Entry(employees_page)
search_bar_in = Entry(employees_page, width=32)
# create table
my_table = create_table_employees(my_cursor)
# back button
bck_btn = Button(employees_page, text="Back!", width=20, bg="#99ccff", fg="#000000", font=("Segoe UI Black", 12),
command=lambda: back_to_prev_page(employees_page))
bck_btn.place(relx=0.3, rely=0.05, anchor=CENTER)
# add employee button
add_btn = Button(employees_page, text="Insert employee!",
width=20, bg="#99ccff", fg="#000000", font=("Segoe UI Black", 12),
command=lambda: add_employee(my_table,
first_name_in, last_name_in, age_in, gender_in, salary_in, job_in))
add_btn.place(relx=0.5, rely=0.05, anchor=CENTER)
# delete employees button
del_btn = Button(employees_page, text="Delete Some Employees!", width=20, bg="#99ccff", fg="#000000",
font=("Segoe UI Black", 12),
command=lambda: delete_employees(my_table))
del_btn.place(relx=0.7, rely=0.05, anchor=CENTER)
# reset table to initial state button
reset_table_btn = Button(employees_page, text="Reset table!!", width=20, bg="#99ccff",
fg="#000000", font=("Segoe UI Black", 12),
command=lambda: reset_table(my_table))
reset_table_btn.place(relx=0.87, rely=0.05, anchor=CENTER)
# update the employee button
update_btn = Button(employees_page, text="Update!", width=20, bg="#99ccff", fg="#000000",
font=("Segoe UI Black", 12),
command=lambda: update_employee(first_name_in, last_name_in, age_in,
job_in, gender_in, salary_in, my_table))
update_btn.place(relx=0.3, rely=0.15, anchor=CENTER)
# select the employee to be update button
select_btn = Button(employees_page, text="Select!",
width=20, bg="#99ccff", fg="#000000", font=("Segoe UI Black", 12),
command=lambda: select_line(first_name_in,
last_name_in, age_in, job_in, gender_in, salary_in, my_table))
select_btn.place(relx=0.5, rely=0.15, anchor=CENTER)
# button to activate search process
search_btn = Button(employees_page, text="Search employees!", width=20, bg="#99ccff", fg="#000000",
font=("Segoe UI Black", 12),
command=lambda: search_employees(search_bar_in, my_table))
search_btn.place(relx=0.7, rely=0.15, anchor=CENTER)
imp_btn = Button(employees_page, text="Export to Excel File!", width=20, bg="#99ccff", fg="#000000",
font=("Segoe UI Black", 12),
command=lambda: write_to_excel(my_table))
imp_btn.place(relx=0.1, rely=0.05, anchor=CENTER)
st_btn = Button(employees_page, text="Show statistics!", width=20, bg="#99ccff", fg="#000000",
font=("Segoe UI Black", 12), command=lambda: draw_statistics())
st_btn.place(relx=0.1, rely=0.15, anchor=CENTER)
# input place on the window
first_name_in.place(relx=0.10, rely=0.27, anchor=CENTER)
last_name_in.place(relx=0.23, rely=0.27, anchor=CENTER)
age_in.place(relx=0.36, rely=0.27, anchor=CENTER)
job_in.place(relx=0.49, rely=0.27, anchor=CENTER)
gender_in.place(relx=0.62, rely=0.27, anchor=CENTER)
salary_in.place(relx=0.75, rely=0.27, anchor=CENTER)
search_bar_in.place(relx=0.85, rely=0.15, anchor=CENTER)
my_cursor.close()
def init_employees_frame():
employees_page.title("Employees page")
employees_page.geometry('1500x600')
employees_page.iconbitmap("my_icon.ico")
employees_page.configure(bg="#66ff66")
employees_page.resizable(False, False)
create_employees_page()
|
999,210 | b38cd3ae179533c25c6bac25d103823c8291b567 | from __main__ import session, config, paths
from dotabase import *
from utils import *
from valve2json import valve_readfile
def load():
session.query(Item).delete()
print("items")
print("- loading items from item scripts")
# load all of the item scripts data information
data = valve_readfile(config.vpk_path, paths['item_scripts_file'], "kv")["DOTAAbilities"]
for itemname in data:
if itemname == "Version":
continue
item_data = data[itemname]
item = Item()
item.name = itemname
item.id = item_data['ID']
item.cost = item_data.get('ItemCost')
item.aliases = "|".join(item_data.get("ItemAliases", "").split(";"))
item.quality = item_data.get("ItemQuality")
item.mana_cost = clean_values(item_data.get('AbilityManaCost'))
item.cooldown = clean_values(item_data.get('AbilityCooldown'))
item.base_level = item_data.get("ItemBaseLevel")
item.ability_special = json.dumps(get_ability_special(item_data.get("AbilitySpecial"), itemname), indent=4)
item.json_data = json.dumps(item_data, indent=4)
session.add(item)
print("- loading item data from dota_english")
# Load additional information from the dota_english.txt file
data = valve_readfile(config.vpk_path, paths['localization_abilities'], "kv", encoding="UTF-16")["lang"]["Tokens"]
for item in session.query(Item):
item_tooltip = "DOTA_Tooltip_Ability_" + item.name
item_tooltip2 = "DOTA_Tooltip_ability_" + item.name
item.localized_name = data.get(item_tooltip, item.name)
item.description = data.get(item_tooltip + "_Description", data.get(item_tooltip2 + "_Description", ""))
item.lore = data.get(item_tooltip + "_Lore", data.get(item_tooltip2 + "_Lore", ""))
ability_special = json.loads(item.ability_special, object_pairs_hook=OrderedDict)
ability_special = ability_special_add_header(ability_special, data, item.name)
item.ability_special = json.dumps(ability_special, indent=4)
item.description = clean_description(item.description, ability_special, base_level=item.base_level)
print("- adding item icon files")
# Add img files to item
for item in session.query(Item):
if os.path.isfile(config.vpk_path + paths['item_img_path'] + item.name.replace("item_", "") + ".png"):
item.icon = paths['item_img_path'] + item.name.replace("item_", "") + ".png"
else:
if "recipe" in item.name:
item.icon = paths['item_img_path'] + "recipe.png"
else:
print(f"icon file not found for {item.name}")
session.commit()
|
999,211 | 8f2d0b7eb61004771c4cde93ac630004ef650cf3 |
smallno=0
n=0
while (True):
## counter =c 1
n = int(input("Enter the number: "))
if(smallno > n):
smallno=n
if (n == -1):
break;
print("Smallest number is",smallno)
|
999,212 | d68e82b8c3c6fa21bf866f6b1a0d843aeba33b38 | class Solution:
def maxTurbulenceSize(self, A):
"""
:type A: List[int]
:rtype: int
"""
ans=1
cur=[1,1]
for i in range(1,len(A)):
if A[i]==A[i-1]:
cur=[1,1]
elif A[i]>A[i-1]:
cur=[1,cur[0]+1]
else:
cur=[cur[1]+1,1]
ans=max(ans,max(cur))
return ans
|
999,213 | 7184d69379c7031984d37472f7b2d29508c1e118 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""config.py"""
__author__ = 'Gary.Z'
from data_cleansing.logging import *
EXCEL_INDEX_BASE = 1
HEADER_ROW_INDEX = 0 + EXCEL_INDEX_BASE
BASE_INFO_COLS_MIN = 23
MAJOR_FILTER_LIST = ('测试专业',)
NC_OPTION_FILTER_LIST = ('无法评价', '以上均不需要改进')
G1_OPTION_FILTER_LIST = ('国际组织', '军队')
SALARY_FILTER_LOWER_LIMIT = 1000
SALARY_FILTER_HIGHER_LIMIT = 50000
SALARY_FILTER_TOP_RATIO = 0.003
RINSE_RULE_KEY_QUESTION = 'question_id'
RINSE_RULE_KEY_ANSWER = 'answer'
RINSE_RULE_KEY_OPERATOR = 'operator'
RINSE_RULE_KEY_ACTION = 'rinse_ids'
RINSE_RULE_OPERATOR_IN = 'IN'
RINSE_RULE_OPERATOR_NOTIN = 'NOT_IN'
# definition of irrelevant question rinse rule
RINSE_RULE_IRRELEVANT_QUESTIONS = [
# IF A2 not in (在国内工作, 自由职业) then rinse
# B1, B2, B3, B4, B5, B6, B7-1, B7-2, B7-3, B7-4, B8, B9-1, B9-2, B10-1, B10-2, D1, D2
{RINSE_RULE_KEY_QUESTION: 'A2',
RINSE_RULE_KEY_ANSWER: ('在国内工作', '自由职业'),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['B1', 'B2', 'B3', 'B4', 'B5', 'B6', 'B7-1', 'B7-2', 'B7-3', 'B7-4', 'B8',
'B9-1', 'B9-2', 'B10-1', 'B10-2', 'D1', 'D2']},
# IF A2 = 自由职业 then rinse B1,B2,B3,B4, B10-1
{RINSE_RULE_KEY_QUESTION: 'A2',
RINSE_RULE_KEY_ANSWER: ('自由职业',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_IN,
RINSE_RULE_KEY_ACTION: ['B1', 'B2', 'B3', 'B4', 'B10-1', 'B10-2']},
# IF B9-1 not in (比较不相关, 很不相关) then rinse B9-2
{RINSE_RULE_KEY_QUESTION: 'B9-1',
RINSE_RULE_KEY_ANSWER: ('比较不相关', '很不相关'),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['B9-2']},
# IF B10-1 = 0次 then rinse B10-2
{RINSE_RULE_KEY_QUESTION: 'B10-1',
RINSE_RULE_KEY_ANSWER: ('0次',),
RINSE_RULE_KEY_OPERATOR: 'IN',
RINSE_RULE_KEY_ACTION: ['B10-2']},
# IF A2 != 未就业 then rinse C1, C2
{RINSE_RULE_KEY_QUESTION: 'A2',
RINSE_RULE_KEY_ANSWER: ('未就业',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['C1', 'C2']},
# IF A2 != 在国内升学 then rinse E1,E2,E3,E4
{RINSE_RULE_KEY_QUESTION: 'A2',
RINSE_RULE_KEY_ANSWER: ('在国内求学',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['E1', 'E2', 'E3', 'E4']},
# IF E3 not in (比较不相关, 很不相关) then rinse E4
{RINSE_RULE_KEY_QUESTION: 'E3',
RINSE_RULE_KEY_ANSWER: ('比较不相关', '很不相关'),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['E4']},
# IF A2 != 出国/出境 then rinse F1, F2, F3, F4
{RINSE_RULE_KEY_QUESTION: 'A2',
RINSE_RULE_KEY_ANSWER: ('出国/出境',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['F1', 'F2', 'F3', 'F4']},
# IF F1 != 求学 then rinse F2, F3
{RINSE_RULE_KEY_QUESTION: 'F1',
RINSE_RULE_KEY_ANSWER: ('求学',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['F2', 'F3']},
# IF F3 not in (比较不相关, 很不相关) then rinse F4
{RINSE_RULE_KEY_QUESTION: 'F3',
RINSE_RULE_KEY_ANSWER: ('比较不相关', '很不相关'),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['F4']},
# IF A2 != 自主创业 then rinse G1, G2, G3, G4, G5
{RINSE_RULE_KEY_QUESTION: 'A2',
RINSE_RULE_KEY_ANSWER: ('自主创业',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['G1', 'G2', 'G3', 'G4', 'G5']},
# I2-1 rules, 22 rules in total
{RINSE_RULE_KEY_QUESTION: 'I2-1-A',
RINSE_RULE_KEY_ANSWER: ('1',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['I2-2-1-1', 'I2-2-1-2']},
{RINSE_RULE_KEY_QUESTION: 'I2-1-B',
RINSE_RULE_KEY_ANSWER: ('1',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['I2-2-2-3', 'I2-2-2-4']},
{RINSE_RULE_KEY_QUESTION: 'I2-1-C',
RINSE_RULE_KEY_ANSWER: ('1',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['I2-2-3-5', 'I2-2-3-6']},
{RINSE_RULE_KEY_QUESTION: 'I2-1-D',
RINSE_RULE_KEY_ANSWER: ('1',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['I2-2-4-7', 'I2-2-4-8']},
{RINSE_RULE_KEY_QUESTION: 'I2-1-E',
RINSE_RULE_KEY_ANSWER: ('1',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['I2-2-5-9', 'I2-2-5-10', 'I2-2-5-11', 'I2-2-5-12', 'I2-2-5-13']},
{RINSE_RULE_KEY_QUESTION: 'I2-1-F',
RINSE_RULE_KEY_ANSWER: ('1',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['I2-2-6-14', 'I2-2-6-15', 'I2-2-6-16', 'I2-2-6-17', 'I2-2-6-18']},
{RINSE_RULE_KEY_QUESTION: 'I2-1-G',
RINSE_RULE_KEY_ANSWER: ('1',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['I2-2-7-19', 'I2-2-7-20']},
{RINSE_RULE_KEY_QUESTION: 'I2-1-H',
RINSE_RULE_KEY_ANSWER: ('1',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['I2-2-8-21', 'I2-2-8-22']},
{RINSE_RULE_KEY_QUESTION: 'I2-1-I',
RINSE_RULE_KEY_ANSWER: ('1',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['I2-2-9-23', 'I2-2-9-24', 'I2-2-9-25']},
{RINSE_RULE_KEY_QUESTION: 'I2-1-J',
RINSE_RULE_KEY_ANSWER: ('1',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['I2-2-10-26', 'I2-2-10-27']},
{RINSE_RULE_KEY_QUESTION: 'I2-1-K',
RINSE_RULE_KEY_ANSWER: ('1',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['I2-2-11-28', 'I2-2-11-29']},
{RINSE_RULE_KEY_QUESTION: 'I2-1-L',
RINSE_RULE_KEY_ANSWER: ('1',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['I2-2-12-30', 'I2-2-12-31', 'I2-2-12-32']},
{RINSE_RULE_KEY_QUESTION: 'I2-1-M',
RINSE_RULE_KEY_ANSWER: ('1',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['I2-2-13-33', 'I2-2-13-34']},
{RINSE_RULE_KEY_QUESTION: 'I2-1-N',
RINSE_RULE_KEY_ANSWER: ('1',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['I2-2-14-35', 'I2-2-14-36']},
{RINSE_RULE_KEY_QUESTION: 'I2-1-O',
RINSE_RULE_KEY_ANSWER: ('1',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['I2-2-15-37', 'I2-2-15-38', 'I2-2-15-39']},
{RINSE_RULE_KEY_QUESTION: 'I2-1-P',
RINSE_RULE_KEY_ANSWER: ('1',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['I2-2-16-40', 'I2-2-16-41', 'I2-2-16-43', 'I2-2-16-43']},
{RINSE_RULE_KEY_QUESTION: 'I2-1-Q',
RINSE_RULE_KEY_ANSWER: ('1',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['I2-2-17-44', 'I2-2-17-45', 'I2-2-17-46', 'I2-2-17-47']},
{RINSE_RULE_KEY_QUESTION: 'I2-1-R',
RINSE_RULE_KEY_ANSWER: ('1',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['I2-2-18-48', 'I2-2-18-49', 'I2-2-18-50', 'I2-2-18-51']},
{RINSE_RULE_KEY_QUESTION: 'I2-1-S',
RINSE_RULE_KEY_ANSWER: ('1',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['I2-2-19-52', 'I2-2-19-53', 'I2-2-19-54', 'I2-2-19-55', 'I2-2-19-56', 'I2-2-19-57']},
{RINSE_RULE_KEY_QUESTION: 'I2-1-T',
RINSE_RULE_KEY_ANSWER: ('1',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['I2-2-20-58', 'I2-2-20-59', 'I2-2-20-60', 'I2-2-20-61', 'I2-2-20-62', 'I2-2-20-63']},
{RINSE_RULE_KEY_QUESTION: 'I2-1-U',
RINSE_RULE_KEY_ANSWER: ('1',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['I2-2-21-64', 'I2-2-21-65']},
{RINSE_RULE_KEY_QUESTION: 'I2-1-V',
RINSE_RULE_KEY_ANSWER: ('1',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_NOTIN,
RINSE_RULE_KEY_ACTION: ['I2-2-22-66', 'I2-2-22-67', 'I2-2-22-68']},
]
RINSE_RULE_IRRELEVANT_QUESTIONS_V6_COMPATIBLE = [
{RINSE_RULE_KEY_QUESTION: 'A2',
RINSE_RULE_KEY_ANSWER: ('自由职业',),
RINSE_RULE_KEY_OPERATOR: RINSE_RULE_OPERATOR_IN,
RINSE_RULE_KEY_ACTION: ['B5', 'B6', 'B7-1', 'B7-2', 'B7-3', 'B7-4', 'B8', 'B9-1', 'B9-2']},
]
# definition of question code mapping to excel column index
# QUESTION_TO_EXCEL_COLUMN_MAP = {
# 'A1': ['X', 'Y'],
# 'A2': ['Z'],
# 'B1': ['AB'],
# 'B2': ['AC'],
# 'B3': ['AD', 'AE', 'AF'],
# 'B4': ['AG', 'AH', 'AI'],
# 'B5': ['AJ', 'AK', 'AI'],
# 'B6': ['AL'],
# 'B7-1': ['AM'],
# 'B7-2': ['AN'],
# 'B7-3': ['AO'],
# 'B7-4': ['AP'],
# 'B8': ['AQ'],
# 'B9-1': ['AR'],
# 'B9-2': ['AS'],
# 'B10-1': ['AT'],
# 'B10-2': ['AU'],
# 'C1': ['AV'],
# 'C2': ['AW'],
# 'D1': ['AX'],
# 'D2': ['AY'],
# 'E1': ['AZ'],
# 'E2': ['BA'],
# 'E3': ['BB'],
# 'E4': ['BC'],
# 'F1': ['BD'],
# 'F2': ['BE'],
# 'F3': ['BF'],
# 'F4': ['BG'],
# 'G1': ['BH', 'BI'],
# 'G2': ['BJ'],
# 'G3': ['BK', 'BL', 'BM', 'BN', 'BO', 'BP', 'BQ'],
# 'G4': ['BR', 'BS', 'BT', 'BU', 'BV', 'BW'],
# 'G5': ['BX', 'BY', 'BZ', 'CA', 'CB', 'CC', 'CD', 'CE'],
# 'I2-1-A': ['EL'],
# 'I2-1-B': ['EM'],
# 'I2-1-C': ['EN'],
# 'I2-1-D': ['EO'],
# 'I2-1-E': ['EP'],
# 'I2-1-F': ['EQ'],
# 'I2-1-G': ['ER'],
# 'I2-1-H': ['ES'],
# 'I2-1-I': ['ET'],
# 'I2-1-J': ['EU'],
# 'I2-1-K': ['EV'],
# 'I2-1-L': ['EW'],
# 'I2-1-M': ['EX'],
# 'I2-1-N': ['EY'],
# 'I2-1-O': ['EZ'],
# 'I2-1-P': ['FA'],
# 'I2-1-Q': ['FB'],
# 'I2-1-R': ['FC'],
# 'I2-1-S': ['FD'],
# 'I2-1-T': ['FE'],
# 'I2-1-U': ['FF'],
# 'I2-1-V': ['FG'],
#
# 'I2-1-1': ['FH'],
# 'I2-1-2': ['FI'],
#
# 'I2-3-5': ['FJ'],
# 'I2-3-6': ['FK'],
#
# 'I2-4-7': ['FL'],
# 'I2-4-8': ['FM'],
#
# 'I2-6-14': ['FN'],
# 'I2-6-15': ['FO'],
# 'I2-6-16': ['FP'],
# 'I2-6-17': ['FQ'],
# 'I2-6-18': ['FR'],
#
# 'I2-2-3': ['FS'],
# 'I2-2-4': ['FT'],
#
# 'I2-5-9': ['FU'],
# 'I2-5-10': ['FV'],
# 'I2-5-11': ['FW'],
# 'I2-5-12': ['FX'],
# 'I2-5-13': ['FY'],
#
# 'I2-7-19': ['FZ'],
# 'I2-7-20': ['GA'],
#
# 'I2-8-21': ['GB'],
# 'I2-8-22': ['GC'],
# 'I2-9-23': ['GD'],
# 'I2-9-24': ['GE'],
# 'I2-9-25': ['GF'],
# 'I2-10-26': ['GG'],
# 'I2-10-27': ['GH'],
# 'I2-11-28': ['GI'],
# 'I2-11-29': ['GJ'],
# 'I2-12-30': ['GK'],
# 'I2-12-31': ['GL'],
# 'I2-12-32': ['GM'],
# 'I2-13-33': ['GN'],
# 'I2-13-34': ['GO'],
# 'I2-14-35': ['GP'],
# 'I2-14-36': ['GQ'],
# 'I2-15-37': ['GR'],
# 'I2-15-38': ['GS'],
# 'I2-15-39': ['GT'],
# 'I2-16-40': ['GU'],
# 'I2-16-41': ['GV'],
# 'I2-16-42': ['GW'],
# 'I2-16-43': ['GX'],
# 'I2-17-44': ['GY'],
# 'I2-17-45': ['GZ'],
# 'I2-17-46': ['HA'],
# 'I2-17-47': ['HB'],
# 'I2-18-48': ['HC'],
# 'I2-18-49': ['HD'],
# 'I2-18-50': ['HE'],
# 'I2-18-51': ['HF'],
# 'I2-19-52': ['HG'],
# 'I2-19-53': ['HH'],
# 'I2-19-54': ['HI'],
# 'I2-19-55': ['HJ'],
# 'I2-19-56': ['HK'],
# 'I2-19-57': ['HL'],
# 'I2-20-58': ['HM'],
# 'I2-20-59': ['HN'],
# 'I2-20-60': ['HO'],
# 'I2-20-61': ['HP'],
# 'I2-20-62': ['HQ'],
# 'I2-20-63': ['HR'],
# 'I2-21-64': ['HS'],
# 'I2-21-65': ['HT'],
# 'I2-22-66': ['HU'],
# 'I2-22-67': ['HV'],
# 'I2-22-68': ['HW'],
# }
|
999,214 | ad3dd11ca7efce8c982291ee2877f0922fea1ff3 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import json
import math
import torch
import numpy
import pandas
import logging
from IPython import embed
from common import scikit_wrappers, data_preprocess
from common.utils import print_to_json
from common.dataloader import load_SMAP_MSL_dataset, load_CSV_dataset
from common.sliding import BatchSlidingWindow, WindowIterator
from common.config import parse_arguments, set_logger, initialize_config
# python univariate_smd.py
if __name__ == '__main__':
args = parse_arguments()
# load config
config_dir = "./hypers/" if not args["load"] else args["load"]
params = initialize_config(config_dir, args)
# load & preprocess data
data_dict = load_SMAP_MSL_dataset(params["path"], params["dataset"])
pp = data_preprocess.preprocessor()
if params["discretized"]:
data_dict = pp.discretize(data_dict)
vocab_size = pp.build_vocab(data_dict)
pp.save(params["save_path"])
train_windows, test_windows, test_labels = data_preprocess.generate_windows(data_dict, window_size=params["window_size"])
embed()
train_iterator = WindowIterator(train_windows, batch_size=params["batch_size"], shuffle=True)
test_iterator = WindowIterator(test_windows, batch_size=params["batch_size"], shuffle=False)
params['in_channels'] = data_dict["dim"]
logging.info("Proceeding using {}...".format(params["device"]))
logging.info(print_to_json(params))
# training
encoder = scikit_wrappers.CausalCNNEncoder(vocab_size=vocab_size, **params)
if params["load"]:
encoder.load_encoder()
else:
encoder.fit(train_iterator, save_memory=False)
encoder.save_encoder()
# inference
features = encoder.encode(test_iterator.loader)
logging.info("Final features have shape: {}".format(features.shape))
|
999,215 | 936bc0b22ba9785cf5c586579ef51ca9c38cffc2 | import time
debug = False
def checkreduceboard(board):
while board:
if debug:
print '---'
for row in board:
print row
nextpass = False
boardmin = min(min(board[i]) for i in range(len(board)))
for rc in range(len(board)):
for cc in range(len(board[0])):
if board[rc][cc] != boardmin:
continue
else:
# found one. check horizontal
ok = True
for i in range(1,len(board[0])):
if board[rc][i] != board[rc][i-1]:
ok = False
break
if ok:
# remove current row
board = board[:rc] + board[rc+1:]
else:
for i in range(1,len(board)):
if board[i][cc] != board[i-1][cc]:
# This was the last chance!
return False
# it was ok, remove the column
for crow in range(len(board)):
board[crow] = board[crow][:cc] + board[crow][cc+1:]
# must have removed a row or a column to get here
nextpass = True
break
if nextpass:
break
return True
def checkwin(board,player):
# look for a T
foundT = False
for irow in range(len(board)):
for icol in range(len(board[0])):
if board[irow][icol] == 'T':
foundT = True
trow = irow
tcol = icol
board[trow][tcol] = player
break
if foundT == True:
break
for irow in range(len(board)):
won = True
for icol in range(len(board[0])):
if board[irow][icol] != player:
won = False
break
if won:
return True
for icol in range(len(board[0])):
won = True
for irow in range(len(board)):
if board[irow][icol] != player:
won = False
break
if won:
return True
won = True
for i in range(len(board)):
if board[i][i] != player:
won = False
break
if won:
return True
won = True
for i in range(len(board)):
if board[3-i][i] != player:
won = False
break
if won:
return True
if foundT:
board[trow][tcol] = 'T'
return False
tStart = time.time()
fname = "A-large"
fin = open(fname+".in","r")
flines = fin.readlines()
fin.close()
fout = open(fname+".out","w")
numcases = int(flines[0])
for icase in range(1,numcases+1):
board = list()
boardFull = True
for irow in range((icase-1)*5+1,(icase-1)*5+4+1):
row = list()
for i in range(4):
cell = flines[irow][i]
if cell == ".":
boardFull = False
row.append(flines[irow][i])
board.append(row)
if checkwin(board,"X"):
result = "X won"
elif checkwin(board,"O"):
result = "O won"
elif boardFull:
result = "Draw"
else:
result = "Game has not completed"
outstr = "Case #%d: %s" % (icase,result)
print outstr
fout.write("%s\n" % (outstr))
fout.close()
tEnd = time.time()
print "run time = %s" % (str((tEnd - tStart)))
|
999,216 | 496ce8576c45f0b8f4b4d903987d9351ee1fa652 | from django.shortcuts import render
from django.http import HttpResponse
from django.db.models import Max
import json
from datetime import date
from .models import Application,EduExp,WorkingExp,Recommander,ProfExp
# Create your views here.
def confirm_application(request):
if request.method == "POST":
apply_params = json.loads(request.body)
print(apply_params)
records = Application.objects.all()
if records.count() != 0:
max_sessionid = records.aggregate(Max('sessionid'))
sessionid = max_sessionid['sessionid__max'] + 1
else:
sessionid = '00000001'
# TODO 增加wechatid(openid)的追踪
viptype = confirm_viptype(apply_params['request_paras']['viptype'])
title = apply_params['request_paras']['parta']['title']
applicant_cn_surname = apply_params['request_paras']['parta']['cn_surname']
applicant_cn_name = apply_params['request_paras']['parta']['cn_name']
applicant_en_first = apply_params['request_paras']['parta']['en_surname']
applicant_en_name = apply_params['request_paras']['parta']['en_name']
gender = apply_params['request_paras']['parta']['gender']
hkid = apply_params['request_paras']['parta']['hkid']
hkid_path = apply_params['request_paras']['parta']['hkid_path']
email = apply_params['request_paras']['parta']['email']
dob = date(int(apply_params['request_paras']['parta']["doby"]),int(apply_params['request_paras']['parta']["dobm"]),
int(apply_params['request_paras']['parta']["dobd"]))
phone = apply_params['request_paras']['parta']['phone']
address = apply_params['request_paras']['parta']['district']+'-'+apply_params['request_paras']['parta']['street']+"-"+\
apply_params['request_paras']['parta']['building']+"-"+apply_params['request_paras']['parta']['door']
edu_level = apply_params['request_paras']['partb']['first_edu_level']
total_working_years = apply_params['request_paras']['partc']['working_range']
application = Application(sessionid=sessionid,vip_type=viptype,title=title,cn_surname=applicant_cn_surname,
cn_name=applicant_cn_name,en_first=applicant_en_first,en_other=applicant_en_name,
gender=gender,hkid=hkid,email=email,phone=phone,dob=dob,address=address,hkid_path=hkid_path,
edu_level=edu_level,total_working_years=total_working_years)
application.save()
#EduExp生成,加保存
gen_edu_exps(apply_params,application)
#ProfExp生成,加保存
gen_prof_exps(apply_params,application)
#WorkingExp生成,加保存
gen_working_exp(apply_params,application)
#Recommander生成,加保存
gen_recommander_exp(apply_params,application)
#TODO 返回sessionid
return HttpResponse("Received.")
else:
return HttpResponse("Bad Request!")
def received_certificates():
pass
def received_signature():
pass
def gen_edu_exps(apply_params,application):
first_edu_org = apply_params['request_paras']['partb']['first_edu_org']
if first_edu_org != "":
first_grad_date = date(int(apply_params['request_paras']['partb']['first_edu_year']),
int(apply_params['request_paras']['partb']['first_edu_month']),
int(apply_params['request_paras']['partb']['first_edu_day']))
first_edu_prof = apply_params['request_paras']['partb']['first_edu_prof']
edu_one = EduExp(edu_org=first_edu_org, grad_date=first_grad_date, edu_maj=first_edu_prof,
application=application)
edu_one.save()
second_edu_org = apply_params['request_paras']['partb']['sec_edu_org']
if second_edu_org != "":
second_grad_date = date(int(apply_params['request_paras']['partb']['sec_edu_year']),
int(apply_params['request_paras']['partb']['sec_edu_month']),
int(apply_params['request_paras']['partb']['sec_edu_day']))
second_edu_prof = apply_params['request_paras']['partb']['sec_edu_prof']
edu_two = EduExp(edu_org=second_edu_org, grad_date=second_grad_date, edu_maj=second_edu_prof,
application=application)
edu_two.save()
def gen_prof_exps(apply_params,application):
first_prof_org = apply_params['request_paras']['partb']['first_prof_org']
first_prof_name = apply_params['request_paras']['partb']['first_prof_name']
first_prof_date = apply_params['request_paras']['partb']['first_prof_date']
second_prof_org = apply_params['request_paras']['partb']['sec_prof_org']
second_prof_name = apply_params['request_paras']['partb']['sec_prof_name']
second_prof_date = apply_params['request_paras']['partb']['sec_prof_date']
# prof is not required
if first_prof_org != "":
prof_one = ProfExp(prof_org=first_prof_org,prof_name=first_prof_name,prof_date=first_prof_date,
application=application)
prof_one.save()
if second_prof_org != "":
prof_two = ProfExp(prof_org=second_prof_org, prof_name=second_prof_name, prof_date=second_prof_date,
application=application)
prof_two.save()
def gen_working_exp(apply_params,application):
exps = apply_params['request_paras']['partc']['records']
if len(exps) != 0:
for exp in exps:
start_date = date(int(exp['start_year']),int(exp['start_month']),int(exp['start_day']))
end_date = date(int(exp['end_year']),int(exp['end_month']),int(exp['end_day']))
company = exp['company']
occupation = exp['occupation']
role = exp['role']
WorkingExp(from_date=start_date,to_date=end_date,company=company,occupation=occupation,role=role,application=application).save()
def gen_recommander_exp(apply_params,application):
first_surname= apply_params['request_paras']['partd']['first_surname']
first_name = apply_params['request_paras']['partd']['first_other_name']
first_id = apply_params['request_paras']['partd']['first_id']
second_surname = apply_params['request_paras']['partd']['sec_surname']
second_name = apply_params['request_paras']['partd']['sec_other_name']
second_id = apply_params['request_paras']['partd']['sec_id']
if first_surname != "":
rec_one = Recommander(surname=first_surname,othername=first_name,vipid=first_id,application=application)
rec_one.save()
if second_surname != "":
rec_two = Recommander(surname=second_surname,othername=second_name,vipid=second_id,application=application)
rec_two.save()
def confirm_viptype(viptypes):
for viptype in viptypes:
if viptype['selected']:
return viptype['type']
else:
return "error" |
999,217 | 7f2c5f6d6f62d3ac8fdd14b4e4c024715977892b | #!/usr/bin/env python
"""
Tight-binding chain
e0:on-site energy
t:hopping t
N:chain length N.
"""
import numpy as np
import matplotlib.pyplot as plt
def band_energy(k,t=1.0,e0=0.2,a=1.0):
"""The function of energy with respect to k."""
return e0-t*np.exp(1j*k*a)-t*np.exp(-1j*k*a)
def band_plot(N=400,a=1.0):
"""Plot the band in k-space."""
foot_step=2*np.pi/N
x=np.arange(0.0,2*np.pi/a,foot_step)
y=band_energy(x)
plt.plot(x,y)
def density_of_state_plot(N=400,a=1.0,eita=0.01):
"""Plot the density_of_state respect to E."""
foot_step=2*np.pi/N
k=np.arange(0.0,2*np.pi/a,foot_step)
Ek=band_energy(k)
E=np.arange(-3.0,3.0,0.01)
Ek.shape=(N,1)
E.shape=(1,600)
"""Reshape E and Ek series with broadcasting method."""
dirac_function=np.imag(np.true_divide(1/np.pi,np.subtract(E-Ek,1j*eita)))
D=np.sum(np.true_divide(dirac_function,N),axis=0)
"""Calculate the density of state with lorentzian broadenning method."""
E.shape=(600)
plt.plot(D,E)
if __name__ == "__main__":
band_plot()
density_of_state_plot()
plt.show()
|
999,218 | b6a301905f38d0c392555fc9250b8a8bdb88b91b | from django.conf.urls import url
from django.urls import path, include
from django.contrib import admin
from django.urls import path, include
from django.contrib.auth import views as auth_views
from . import views
app_name='logistics'
urlpatterns = [
path('register', views.register_request, name='register'),
path('login', views.login_request, name='login'),
path('logout', views.logout_request, name='logout'),
path('password_reset/done/',
auth_views.PasswordResetDoneView.as_view(template_name='logistics/password_reset_done.html'),
name='password_reset_done'),
path('reset/<uidb64>/<token>/',
auth_views.PasswordResetConfirmView.as_view(template_name="logistics/password_reset_confirm.html"),
name='password_reset_confirm'),
path('reset/done/',
auth_views.PasswordResetCompleteView.as_view(template_name='logistics/password_reset_complete.html'),
name='password_reset_complete'),
path("password_reset", views.password_reset_request, name="password_reset")
]
|
999,219 | 60151d0c3b19737e06c56da3798df258008bb813 | # coding=utf8
__author__ = 'smilezjw'
class Solution:
def combinationSum(self, candidates, target):
candidates.sort() # 先对候选数字进行排序
Solution.res = [] # 初始化全局变量
self.dfsSum(candidates, target, 0, [])
return Solution.res
def dfsSum(self, candidates, target, start, temp): # 深度优先搜索
if target == 0:
Solution.res.append(temp)
for i in xrange(start, len(candidates)):
if candidates[i] > target: # 如果候选数字已经大于target,则剪枝
return
# 注意这里从i开始搜索,这里用temp.append(candidates[i])会报错
self.dfsSum(candidates, target - candidates[i], i, temp + [candidates[i]])
if __name__ == '__main__':
s = Solution()
print s.combinationSum([2, 3, 6, 7], 7)
###############################################################################################
# 这道题用深度优先搜索,递归遍历所有可能解。注意每次都从当前查找的位置递归遍历下去,而不是i+1。
# 这道题貌似还可以用动态规划做= =
# |
999,220 | 8558e6466caaaadddbd0f930dc19c886d222edb7 |
import functools
from .util import preserve_signature
def file_writer(fn):
def wrapper(path,*args,**kwargs):
with open(path,'w') as f:
for l in fn(*args,**kwargs):
print(l,file=f)
return preserve_signature(wrapper,fn)
#return functools.update_wrapper(wrapper,fn,assigned=('__module__', '__name__', '__qualname__', '__doc__'))
def file_reader(fn):
def wrapper(path,*args,**kwargs):
with open(path,'r') as f:
for l in f:
yield fn(l,*args,**kwargs)
return preserve_signature(wrapper,fn)
#return functools.update_wrapper(wrapper,fn,assigned=('__module__', '__name__', '__qualname__', '__doc__')) |
999,221 | eef003dbb9866dea7c08a13fa72f61079af304c6 | import pandas as pd
import numpy as np
import seaborn as sns
import csv
# import pdb
import ipdb
from scipy.stats import median_test
from sklearn.model_selection import KFold
from sklearn.linear_model import LinearRegression as LR
from sklearn.ensemble import RandomForestRegressor as RF
import matplotlib.pyplot as plt
from keras.models import Sequential
from keras.layers.core import Dense, Activation
from keras.layers.recurrent import SimpleRNN
from keras.layers.recurrent import LSTM
from keras.optimizers import Adam
from tensorflow.keras.callbacks import EarlyStopping
from sklearn.metrics import mean_squared_error as MSE
import time
import statistics
import scipy
from scipy import signal
from sklearn.preprocessing import StandardScaler
train = pd.read_csv("./train.csv")
test = pd.read_csv("./test.csv")
sample = pd.read_csv("./sample.csv",header=None)
train.index = pd.to_datetime(train["datetime"])
dat = pd.concat([train,test],sort=False).reset_index(drop=True)
dat.index = pd.to_datetime(dat["datetime"])
dat=dat.reset_index(drop=True)
dat["days"] = dat.index
dat["payday"] = dat["payday"].fillna(0)
dat["kcal"] = dat["kcal"].fillna(-1)
dat["precipitation"] = dat["precipitation"].apply(lambda x : -1 if x == "--" else float(x)).astype(np.float)
dat["event"] = dat["event"].fillna("なし")
dat["remarks"] = dat["remarks"].fillna("なし")
dat["month"] = dat["datetime"].apply(lambda x : int(x.split("-")[1]))
dat["amuse"] = dat["remarks"].apply(lambda x : 1 if x == "お楽しみメニュー" else 0)
dat["curry"] = dat["name"].apply(lambda x : 1 if x.find("カレー") >= 0 else 0)
dat["zeroRain"] = dat["precipitation"].apply(lambda x : 1 if x == -1 else 0 )
dat["y"] = dat["y"].fillna(0)
def change_day7(day):
if day == "月":
return "Mon"
elif day == "火":
return "Tues"
elif day == "水":
return "Wed"
elif day == "木":
return "Thur"
elif day == "金":
return "Fri"
def change_weather(weather):
if weather == "快晴":
return "Fine"
elif weather == "晴れ":
return "Sunny"
elif weather == "曇":
return "Cloudy"
elif weather == "薄曇":
return "ThinCloudy"
elif weather == "雨":
return "Rainy"
elif weather == "雷電":
return "Thunder"
elif weather == "雪":
return "Snowy"
def translation_remarks(remarks):
if remarks == "なし":
return "Nothing"
elif remarks == "鶏のレモンペッパー焼(50食)、カレー(42食)":
return "ChickenLemon_Curry"
elif remarks == "酢豚(28食)、カレー(85食)":
return "Subuta_Curry"
elif remarks == "お楽しみメニュー":
return "Amuse"
elif remarks == "料理長のこだわりメニュー":
return "Chef'sCommitment"
elif remarks == "手作りの味":
return "HomemadeTaste"
elif remarks == "スペシャルメニュー(800円)":
return "SpecialMenu"
def translation_event(event):
if event == "なし":
return "Nothing"
elif event == "ママの会":
return "Mom'sMeet"
elif event == "キャリアアップ支援セミナー":
return "CareerSupportSeminar"
dat["week"] = dat["week"].apply(lambda x : change_day7(x))
dat["weather"] = dat["weather"].apply(lambda x : change_weather(x))
dat["remarks"] = dat["remarks"].apply(lambda x : translation_remarks(x))
dat["event"] = dat["event"].apply(lambda x : translation_event(x))
dat = pd.get_dummies(dat)
# ipdb.set_trace()
elems_basic = ["y","soldout","kcal","payday","precipitation","temperature","days","month","amuse","curry","zeroRain"]
elems_week = ["y","week_Mon","week_Tues","week_Wed","week_Thur","week_Fri"]
elems_weather = ["y","weather_Fine","weather_Sunny","weather_Cloudy","weather_ThinCloudy","weather_Rainy","weather_Snowy","weather_Thunder"]
elems_remarks = ["y","remarks_Amuse","remarks_Nothing","remarks_SpecialMenu","remarks_HomemadeTaste", "remarks_Chef'sCommitment","remarks_Subuta_Curry","remarks_ChickenLemon_Curry"]
elems_event = ["y","event_Nothing","event_CareerSupportSeminar","event_Mom'sMeet"]
dat.to_csv("analysis2.csv")
correlation_matrix_basic = dat[elems_basic].corr().round(2)
correlation_matrix_week = dat[elems_week].corr().round(2)
correlation_matrix_weather = dat[elems_weather].corr().round(2)
correlation_matrix_remarks = dat[elems_remarks].corr().round(2)
correlation_matrix_event = dat[elems_event].corr().round(2)
# sns.heatmap(data=correlation_matrix_basic, annot=True)
# plt.show()
# sns.heatmap(data=correlation_matrix_week, annot=True)
# plt.show()
# sns.heatmap(data=correlation_matrix_weather, annot=True)
# plt.show()
# sns.heatmap(data=correlation_matrix_remarks, annot=True)
# plt.show()
# sns.heatmap(data=correlation_matrix_event, annot=True)
# plt.show()
# ipdb.set_trace()
correlation_matrix_basic_150 = dat[150:][elems_basic].corr().round(2)
correlation_matrix_week_150 = dat[150:][elems_week].corr().round(2)
correlation_matrix_weather_150 = dat[150:][elems_weather].corr().round(2)
correlation_matrix_remarks_150 = dat[150:][elems_remarks].corr().round(2)
correlation_matrix_event_150 = dat[150:][elems_event].corr().round(2)
# sns.heatmap(data=correlation_matrix_basic_150, annot=True)
# plt.show()
# sns.heatmap(data=correlation_matrix_week_150, annot=True)
# plt.show()
# sns.heatmap(data=correlation_matrix_weather_150, annot=True)
# plt.show()
# sns.heatmap(data=correlation_matrix_remarks_150, annot=True)
# plt.show()
# sns.heatmap(data=correlation_matrix_event_150, annot=True)
# plt.show()
non_linear_elems = np.array([])
# ipdb.set_trace()
high_spike = dat[dat["days"].isin([155, 174, 181, 183, 196, 205])]
high_spike = high_spike[["y","soldout","kcal","payday","precipitation","temperature","days","month","amuse","curry","zeroRain","week_Mon","week_Tues","week_Wed","week_Thur","week_Fri","weather_Fine","weather_Sunny","weather_Cloudy","weather_ThinCloudy","weather_Rainy","weather_Snowy","weather_Thunder","remarks_Amuse","remarks_Nothing","remarks_SpecialMenu","remarks_HomemadeTaste", "remarks_Chef'sCommitment","remarks_Subuta_Curry","remarks_ChickenLemon_Curry","event_Nothing","event_CareerSupportSeminar","event_Mom'sMeet"]]
high_spike = high_spike[high_spike["days"] > 150]
# ipdb.set_trace()
high_spike.to_csv("high_spike.csv")
low_spike = dat[dat["days"].isin([18,27,56,64,113,175,191])]
low_spike = low_spike[["y","soldout","kcal","payday","precipitation","temperature","days","month","amuse","curry","zeroRain","week_Mon","week_Tues","week_Wed","week_Thur","week_Fri","weather_Fine","weather_Sunny","weather_Cloudy","weather_ThinCloudy","weather_Rainy","weather_Snowy","weather_Thunder","remarks_Amuse","remarks_Nothing","remarks_SpecialMenu","remarks_HomemadeTaste", "remarks_Chef'sCommitment","remarks_Subuta_Curry","remarks_ChickenLemon_Curry","event_Nothing","event_CareerSupportSeminar","event_Mom'sMeet"]]
# correlation_matrix_low_spike1 = low_spike.corr().round(2)
correlation_matrix_basic_high = low_spike[elems_basic].corr().round(2)
correlation_matrix_week_high = low_spike[elems_week].corr().round(2)
correlation_matrix_weather_high = low_spike[elems_weather].corr().round(2)
correlation_matrix_remarks_high = low_spike[elems_remarks].corr().round(2)
correlation_matrix_event_high = low_spike[elems_event].corr().round(2)
# sns.heatmap(data=correlation_matrix_basic_high, annot=True)
# plt.show()
# ipdb.set_trace()
# sns.heatmap(data=correlation_matrix_week_high, annot=True)
# plt.show()
# sns.heatmap(data=correlation_matrix_weather_high, annot=True)
# plt.show()
# sns.heatmap(data=correlation_matrix_remarks_high, annot=True)
# plt.show()
# sns.heatmap(data=correlation_matrix_remarks_high, annot=True)
# plt.show()
low_spike.to_csv("low_spike.csv")
# train = pd.read_csv("./train.csv")
# ipdb.set_trace()
# sns.boxplot(x="curry",y="y",data=high_spike)
# plt.show()
low_spike_train = pd.read_csv("./low_spike_train.csv")
# low_spike_train["week"] = low_spike_train["week"].apply(lambda x : change_day7(x))
# low_spike_train["weather"] = low_spike_train["weather"].apply(lambda x : change_weather(x))
# low_spike_train["remarks"] = low_spike_train["remarks"].apply(lambda x : translation_remarks(x))
# low_spike_train["event"] = low_spike_train["event"].apply(lambda x : translation_event(x))
# low_spike_train = pd.get_dummies(low_spike_train)
# ipdb.set_trace()
low_spike_train.to_csv("low_spike_train.csv")
low_spike_train = low_spike_train[["y","soldout","kcal","payday","precipitation","temperature","days","month","amuse","curry","zeroRain","week_Mon","week_Tues","week_Wed","week_Thur","week_Fri","weather_Fine","weather_Sunny","weather_Cloudy","weather_ThinCloudy","weather_Rainy","weather_Snowy","remarks_Nothing","remarks_ChickenLemon_Curry","event_Nothing","event_Mom'sMeet","detrend_y"]]
elems_basic = ["detrend_y","soldout","kcal","payday","precipitation","temperature","days","month","amuse","curry","zeroRain"]
elems_week = ["detrend_y","week_Mon","week_Tues","week_Wed","week_Thur","week_Fri"]
elems_weather = ["detrend_y","weather_Fine","weather_Sunny","weather_Cloudy","weather_ThinCloudy","weather_Rainy","weather_Snowy"]
elems_remarks = ["detrend_y","remarks_Nothing","remarks_ChickenLemon_Curry"]
elems_event = ["detrend_y","event_Nothing","event_Mom'sMeet"]
correlation_matrix_basic_high = low_spike_train[elems_basic].corr().round(2)
correlation_matrix_week_high = low_spike_train[elems_week].corr().round(2)
correlation_matrix_weather_high = low_spike_train[elems_weather].corr().round(2)
correlation_matrix_remarks_high = low_spike_train[elems_remarks].corr().round(2)
correlation_matrix_event_high = low_spike_train[elems_event].corr().round(2)
# sns.heatmap(data=correlation_matrix_basic_high, annot=True)
# plt.show()
# sns.heatmap(data=correlation_matrix_week_high, annot=True)
# plt.show()
# sns.heatmap(data=correlation_matrix_weather_high, annot=True)
# plt.show()
# sns.heatmap(data=correlation_matrix_remarks_high, annot=True)
# plt.show()
# sns.heatmap(data=correlation_matrix_remarks_high, annot=True)
# plt.show()
sns.boxplot(x="soldout",y="detrend_y",data=low_spike_train)
plt.show()
sns.boxplot(x="kcal",y="detrend_y",data=low_spike_train)
plt.show()
|
999,222 | f3fe9d3e1c2611e87e2345a8fef67c48d9cf5c54 | from scrapy.linkextractors import LinkExtractor
from scrapy.spiders import Spider, Request, Rule, CrawlSpider
from datetime import datetime
from scrapy.selector import Selector
from faa.items import FaaDocItem
import pdb
import re
import os
from scrapy.xlib.pydispatch import dispatcher
from scrapy import signals
from faa import settings
import smtplib
from email.mime.text import MIMEText
from win32com.client.gencache import EnsureDispatch
from win32com.client import constants
from email.mime.text import MIMEText
doc_id_regex = re.compile('.*\/documentID\/(\w*\d*)\/?')
def load_xpaths():
xpaths = {
'relevant': '',
'number': './/td[1]/text()',
'office_name': '',
'office_subname': '',
'office_acronym': '//*[@id="content"]/table/tbody/tr/td[2]/text()',
'office_page_link': './/td[2]/a/@href',
'title': './/td[3]/a/text()',
'date': './/td[4]/text()',
'document_id': '',
'document_link': '',
'document_page_link': './/td[3]/a/@href',
'output_folder': '',
'updated': ''
}
return xpaths
def build_sublink(link_suffix):
return 'https://www.faa.gov{}'.format(link_suffix)
class AcSpider(CrawlSpider):
name = 'ac'
allowed_domains = [r'www.faa.gov']
start_urls = [
r'https://www.faa.gov/regulations_policies/advisory_circulars/index.cfm/go/document.list'
]
rules = (
# link to get document pdf
Rule(
LinkExtractor(allow=('.*/documentID/.*',)),
callback="parse_document_page",
),
# next page button
Rule(
LinkExtractor(restrict_xpaths=['//ul[@class="pagination join"]/li/a[contains(text(), "Next")]']),
follow=True,
callback="parse_start_url"
),
)
def __init__(self, *args, **kwargs):
super(AcSpider, self).__init__(*args, **kwargs)
self.name = 'ac'
self.item_xpaths = load_xpaths()
self.current_data = {}
self.document_file = settings.AC_DOCUMENT_FILE
self.headers = settings.AC_HEADERS
self.relevant_offices = {office.split(',')[0].strip(): office.split(',')[1].strip() for office in open(settings.RELEVANT_OFFICE_FILE)}
self.items = []
self.updated = 0
dispatcher.connect(self.spider_opened, signals.spider_opened)
dispatcher.connect(self.spider_closed, signals.spider_closed)
def parse_start_url(self, response):
sel = Selector(response)
for row in sel.xpath(r'//*[@id="content"]/table/tbody/tr'):
item = FaaDocItem()
for name, path in self.item_xpaths.iteritems():
try:
item[name] = row.xpath(path).extract()[0].strip().replace(',', '')
except (KeyError, IndexError, ValueError):
item[name] = ''
item['document_page_link'] = build_sublink(item['document_page_link'])
item['document_id'] = doc_id_regex.search(item['document_page_link']).groups()[0]
item['title'] = item['title'].replace('/', '_')
item['number'] = item['number'].replace('/', '_')
# check if newer on faa site
if not self.current_data.get(item['document_id']):
self.current_data[item['document_id']] = item['date']
if item['date'] > self.current_data.get(item['document_id']):
self.updated += 1
item['updated'] = True
else:
item['updated'] = False
yield Request(item['document_page_link'], callback=self.process_document_page, meta={'item': item})
def process_document_page(self, response):
sel = Selector(response)
item = response.meta['item']
try:
office_name = sel.xpath('//dt[contains(text(), "Responsible Office")]/following-sibling::dd/a/text()').extract()[0]
except IndexError:
try:
office_name = sel.xpath('//dt[contains(text(), "Responsible Office")]/following-sibling::dd/text()').extract()[0]
except IndexError:
office_name = item['office_acronym']
if office_name == '':
office_name = 'misc'
if ', ' and ' - ' in office_name:
office_name = office_name.split(',')
office_acronym = office_name[0].strip()
office_name = office_name[1].split(' - ')
office_subname = office_name[1].strip()
office_name = office_name[0].strip()
elif len(office_name.split(',')) == 3:
office_name = office_name.split(',')
office_acronym = office_name[0].strip()
office_subname = office_name[2].strip()
office_name = office_name[1].strip()
elif len(office_name.split(',')) == 2:
office_name = office_name.split(',')
office_acronym = office_name[0].strip()
office_name = office_name[1].strip()
office_subname = ''
else:
office_subname = ''
office_acronym = office_name
if item['office_acronym'] == '':
item['office_acronym'] = office_acronym
item['office_subname'] = office_subname
item['office_name'] = office_name
for office, output_name in self.relevant_offices.iteritems():
if office.lower() in item['office_name'].lower():
item['output_folder'] = os.path.join(settings.AC_DOCUMENT_FOLDER, output_name)
item['relevant'] = True
break
else:
item['relevant'] = False
try:
item['document_link'] = sel.xpath('//*[@id="content"]/div/ul/li/a/@href').extract()[0]
except IndexError:
item['document_link'] = ''
self.items.append(item)
yield item
def send_mail_via_smtp(self):
username = 'kbonnet_cwec@yahoo.com' #os.environ['YAHOO_USERNAME'] + '@yahoo.com'
password = 'vOgel1234' #os.environ['YAHOO_PASSWORD']
updated = [item for item in self.items if item['updated'] is True]
if len(updated) == 0: # final check for updated data
return None
address_book = {line.split(',')[0].strip(): line.split(',')[1].strip() for line in open(settings.ADDRESS_BOOK_FILE)}
recipients_names = [recipient.strip() for recipient in open(settings.RECIPIENT_FILE)]
recipients_emails = [address_book[name] for name in recipients_names if name in address_book]
fp = open(settings.EMAIL_TEMPLATE_AC_FILE, 'rb')
body = fp.read()
for i in updated:
body += '> {0}: {1} \n\tupdated on {2}\n'.format(i['number'].encode('utf-8'), i['title'].encode('utf-8'), i['date'])
body += '''
\n\n
This bot searches www.faa.gov/regulations_policies/advisory_circulars/index.cfm/go/document.list every morning and
downloads the latest ACs from relevant FAA offices to the server location above. This email is only generated if
updated documents are found.
Reply to be removed from this list.
Kyle Bonnet
'''
msg = MIMEText(body)
msg['Subject'] = 'Updates to FAA guidance'
msg['From'] = 'Kyle Bonnet'
msg['To'] = ','.join(recipients_names)
try:
smtpserver = smtplib.SMTP("smtp.mail.yahoo.com", 587)
smtpserver.ehlo()
smtpserver.starttls()
smtpserver.ehlo()
smtpserver.login(username, password)
fromaddr = username
smtpserver.sendmail(fromaddr, recipients_emails, msg.as_string())
print '{0} EMAIL SENT {0}'.format('*' * 10)
except Exception as e:
print "failed to send mail"
print e
def spider_opened(self, spider):
with open(settings.AC_DOCUMENT_FILE) as f:
for row in f:
try:
row = row.strip().split(',')
self.current_data[row[0]] = row[1]
except:
continue
def spider_closed(self, spider):
if len(self.items) > 0:
with open(self.document_file, 'w') as f:
for item in self.items:
for header in self.headers:
f.write('{},'.format(item[header]))
f.write('\n')
if self.updated > 0:
# self.send_email()
self.send_mail_via_smtp()
#
# def send_mail_via_com(self):
# updated = [item for item in self.items if item['updated'] is True]
# if len(updated) == 0: # final check for updated data
# return None
# address_book = {line.split(',')[0].strip(): line.split(',')[1].strip() for line in open(settings.ADDRESS_BOOK_FILE)}
# recipients_names = [recipient.strip() for recipient in open(settings.RECIPIENT_FILE)]
# recipients_emails = [address_book[name] for name in recipients_names if name in address_book]
#
# o = EnsureDispatch("Outlook.Application")
#
# fp = open(settings.EMAIL_TEMPLATE_AC_FILE, 'rb')
# body = fp.read()
#
# for i in updated:
# body += '> {}: {} \n\tupdated on {}\n'.format(i['number'].encode('utf-8'), i['title'].encode('utf-8'), i['date'])
#
# body += '''
# \n\n
# This bot searches www.faa.gov/regulations_policies/advisory_circulars/index.cfm/go/document.list every morning and
# downloads the latest ACs from relevant FAA offices to the server location above. This email is generated if
# updated documents are found. To suggest an AC, reply with the AC number or title and I will include it in the search.
# Reply to be removed from this list.
#
# Kyle Bonnet
# '''
# Msg = o.CreateItem(constants.olMailItem)
# for email in recipients_emails:
# to = Msg.Recipients.Add(email)
# to.Type = constants.olTo
# Msg.Sender = 'Kyle Bonnet'
# Msg.Recipients.ResolveAll()
# Msg.Subject = 'Updated AC Documents Identified --TEST--'
# Msg.Body = body
#
# Msg.Send()
# print '{0} EMAIL SENT {0}'.format('*' * 10) |
999,223 | fc71a78b6bfc550d155c338341d95916214e8d53 | from matplotlib import pyplot as plt
import random
from functions import *
# get price & techinical indicator data as pandas dataframe
pdata, data = getData(1)
# load model from file
model = getModel(1)
# initialize signal(buy/sell/hold decisions)
signal = pd.Series(index=np.arange(len(data)))
signal.fillna(value=0, inplace=True)
signal.loc[0] = 1
state= initializeState(pdata)
# indicate if now it's last state
endState = 0
timeStep = 1
inventory = []
profit = 0
while not endState:
action = (np.argmax(model.predict(state, batch_size=1)))
# perform trade and move to next state
nextState, timeStep, signal, endState, profit,reward = trade(action, pdata, signal, timeStep, inventory, data, profit)
state = nextState
while len(inventory) > 0:
profit += (data.iloc[-1] - inventory.pop(0))*10 # unsure if should be calculated this way??
# print out decisions
long = 0
short = 0
hold = 0
newsig = changedecision(signal)
for j in range(signal.shape[0]):
if signal.iloc[j] < 0:
short += 1
elif signal.iloc[j] > 0:
long += 1
else:
hold += 1
newprofitwhold = profitcal(data,newsig,5000)
print("Profit with hold: ", newprofitwhold , " Orignal Profit: ", profit)
print('Buy: ', long, ', Sell: ', short, ', Hold: ', hold)
bt = twp.Backtest(data, newsig, signalType='shares')
endReward = bt.pnl.iloc[-1]
plt.figure(figsize=(20, 10))
print("profit is ", profit)
bt = twp.Backtest(data, newsig, signalType='shares')
print(bt.data)
plt.figure(figsize=(20, 20))
plt.subplot(2, 1, 1)
plt.title("trades")
plt.xlabel("timestamp")
bt.plotTrades()
plt.subplot(2, 1, 2)
plt.title("PnL")
plt.xlabel("timestamp")
bt.pnl.plot(style='-')
plt.tight_layout()
plt.savefig('plot/summary_test' + '.png', bbox_inches='tight', pad_inches=1, dpi=72)
plt.show()
|
999,224 | 9965f491c8ff8cca62e965f60fe04259f4424b42 |
import heapq
N = int(input())
es = [[] for _ in range(N)]
for _ in range(N-1):
a,b = map(int, input().split())
es[a-1].append(b-1)
es[b-1].append(a-1)
INF = float("inf")
# ダイクストラでフェネック君の開始地点からの距離を探索
dist_f = [INF] * N
dist_f[0] = 0
q = [(0,0)]
while q:
cost, curr = heapq.heappop(q)
if dist_f[curr] < cost:
continue
for nxt in es[curr]:
if dist_f[nxt] < dist_f[curr] + 1:
continue
dist_f[nxt] = dist_f[curr] + 1
heapq.heappush(q, (dist_f[nxt], nxt))
# ダイクストラですぬけ君の開始地点からの距離を探索
dist_s = [INF] * N
dist_s[N-1] = 0
q = [(0,N-1)]
while q:
cost, curr = heapq.heappop(q)
if dist_s[curr] < cost:
continue
for nxt in es[curr]:
if dist_s[nxt] < dist_s[curr] + 1:
continue
dist_s[nxt] = dist_s[curr] + 1
heapq.heappush(q, (dist_s[nxt], nxt))
cnt_f = 0
cnt_s = 0
for i in range(1,N-1):
if dist_f[i] <= dist_s[i]:
cnt_f += 1
else:
cnt_s += 1
if cnt_f > cnt_s:
print("Fennec")
else:
print("Snuke")
|
999,225 | 200b8e55618a55a11bade339e1b69d7097c164bb | from django.apps import AppConfig
class MaxproappConfig(AppConfig):
name = 'MaxproApp'
|
999,226 | ef46f028f8cc70499a4b7ed1a12c201d0e4e6f3a | from enum import Enum
class Edge(Enum):
MaxY = 1
MaxX = 2
MinY = 3
MinX = 4
CenterY = 5
CenterX = 6
def PairFromCompass(cmp):
if isinstance(cmp, Edge):
return None
if isinstance(cmp, str):
cmp = cmp.upper()
if cmp in ["C", "•"]:
return (Edge.CenterX, Edge.CenterY)
elif cmp in ["W", "←"]:
return (Edge.MinX, Edge.CenterY)
elif cmp in ["NW", "↖"]:
return (Edge.MinX, Edge.MaxY)
elif cmp in ["N", "↑"]:
return (Edge.CenterX, Edge.MaxY)
elif cmp in ["NE", "↗"]:
return (Edge.MaxX, Edge.MaxY)
elif cmp in ["E", "→"]:
return (Edge.MaxX, Edge.CenterY)
elif cmp in ["SE", "↘"]:
return (Edge.MaxX, Edge.MinY)
elif cmp in ["S", "↓"]:
return (Edge.CenterX, Edge.MinY)
elif cmp in ["SW", "↙"]:
return (Edge.MinX, Edge.MinY)
def txt_to_edge(txt):
if isinstance(txt, str):
txt = txt.lower()
if txt in ["maxy", "mxy", "n", "⊤"]:
return Edge.MaxY
elif txt in ["maxx", "mxx", "e", "⊣"]:
return Edge.MaxX
elif txt in ["miny", "mny", "s", "⊥"]:
return Edge.MinY
elif txt in ["minx", "mnx", "w", "⊢"]:
return Edge.MinX
elif txt in ["centery", "cy", "midy", "mdy", "H"]:
return Edge.CenterY
elif txt in ["centerx", "cx", "midx", "mdx", "⌶"]:
return Edge.CenterX
else:
return None
else:
return txt |
999,227 | 637957da11b85b35269cf27bebf1d39f6d8bdec7 | import discord, time, asyncio, os, random, json, re
from discord.ext import commands, tasks
from discord.ext.commands import has_permissions, cooldown, MissingPermissions, check, has_role
from discord.utils import get
from termcolor import colored
from datetime import datetime, date
from urlextract import URLExtract
class Utils(commands.Cog):
def __init__(self, client):
self.client = client
f = open("data/slowmode_channels.json")
self.channels_to_slowmode = json.load(f)
f.close()
print("Commands - Lockdown "+colored('Running', 'green'))
@commands.command()
@has_permissions(administrator=True)
async def lockdown(self, ctx):
num = 0
async with ctx.typing():
for channelid in self.channels_to_slowmode:
channel = self.client.get_channel(channelid)
members_role = get(ctx.guild.roles, id=824464395136139274)
await channel.set_permissions(members_role, send_messages=False)
embed=discord.Embed(title="Server Lockdown 🔒", description=":lock: We are locked down Because either Dank is down, or there was a raid, check <#807297379665182720> for info. :lock:", color=0xff0000, timestamp=datetime.now())
embed.set_footer(text="Boba Dankers")
await channel.send(embed=embed)
num += 1
await ctx.send(f"Locked {str(num)} channels!")
@commands.command()
@has_permissions(administrator=True)
async def unlockdown(self, ctx):
num = 0
async with ctx.typing():
for channelid in self.channels_to_slowmode:
channel = self.client.get_channel(channelid)
members_role = get(ctx.guild.roles, id=824464395136139274)
await channel.set_permissions(members_role, send_messages=True)
num += 1
await ctx.send(f"Unlocked {str(num)} channels!")
def setup(client):
client.add_cog(Utils(client))
|
999,228 | dd3d4e22a3b17454930b1e1d65c5f88bfde99a6c | from itertools import *
list1 = [1, 2, 3, 'a', 'b', 'c']
list2 = [101, 102, 103, 'X', 'Y']
chained = chain(list1, list2)
print(type(chained))
print(list(chained))
counter = count(10, 2.5)
for i in counter:
if i <= 20:
print(i)
else:
break
newRange = range(0, 5)
newCycle = cycle(newRange)
ex = 0
for i in newCycle:
print(i)
ex += 1
if (ex == 20):
break
res = list(filterfalse(lambda x: x < 5, [1, 2, 3, 4, 5, 6, 7, 8]))
print(res)
r1 = range(1,21)
my_list = list(islice(r1, 6, 16, 2))
print(my_list) |
999,229 | 40f837d27f00808d4ae7432fb390f5101ee3266b | import base64
import logging
import simplejson
import tornado
import tornado.httpclient
def _callback(response):
if response.error:
logging.error("Failed to send data to mixpane. Reason: " + response.error)
def track(token, event, properties=None):
if "token" not in properties:
properties["token"] = token
params = {"event": event, "properties": properties}
data = base64.b64encode(simplejson.dumps(params))
request = "http://api.mixpanel.com/track/?data=" + data
http_client = tornado.httpclient.AsyncHTTPClient()
http_client.fetch(request, _callback) |
999,230 | 815f3aec29c46382ba30c4e357a21ede9bbe4300 | # coding: utf-8
"""
Arduino IoT Cloud API
Provides a set of endpoints to manage Arduino IoT Cloud **Devices**, **Things**, **Properties** and **Timeseries**. This API can be called just with any HTTP Client, or using one of these clients: * [Javascript NPM package](https://www.npmjs.com/package/@arduino/arduino-iot-client) * [Python PYPI Package](https://pypi.org/project/arduino-iot-client/) * [Golang Module](https://github.com/arduino/iot-client-go) # noqa: E501
The version of the OpenAPI document: 2.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from arduino_iot_rest.api_client import ApiClient
from arduino_iot_rest.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class PropertiesV2Api(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def properties_v2_create(self, id, model_property, **kwargs): # noqa: E501
"""create properties_v2 # noqa: E501
Creates a new property associated to a thing # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.properties_v2_create(id, model_property, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param ModelProperty model_property: PropertyPayload describes a property of a thing. No field is mandatory (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ArduinoProperty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.properties_v2_create_with_http_info(id, model_property, **kwargs) # noqa: E501
def properties_v2_create_with_http_info(self, id, model_property, **kwargs): # noqa: E501
"""create properties_v2 # noqa: E501
Creates a new property associated to a thing # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.properties_v2_create_with_http_info(id, model_property, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param ModelProperty model_property: PropertyPayload describes a property of a thing. No field is mandatory (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ArduinoProperty, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'model_property'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method properties_v2_create" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `properties_v2_create`") # noqa: E501
# verify the required parameter 'model_property' is set
if self.api_client.client_side_validation and ('model_property' not in local_var_params or # noqa: E501
local_var_params['model_property'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `model_property` when calling `properties_v2_create`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'model_property' in local_var_params:
body_params = local_var_params['model_property']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/v2/things/{id}/properties', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArduinoProperty', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def properties_v2_delete(self, id, pid, **kwargs): # noqa: E501
"""delete properties_v2 # noqa: E501
Removes a property associated to a thing # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.properties_v2_delete(id, pid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param str pid: The id of the property (required)
:param bool force: If true, hard delete the property
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.properties_v2_delete_with_http_info(id, pid, **kwargs) # noqa: E501
def properties_v2_delete_with_http_info(self, id, pid, **kwargs): # noqa: E501
"""delete properties_v2 # noqa: E501
Removes a property associated to a thing # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.properties_v2_delete_with_http_info(id, pid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param str pid: The id of the property (required)
:param bool force: If true, hard delete the property
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'pid',
'force'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method properties_v2_delete" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `properties_v2_delete`") # noqa: E501
# verify the required parameter 'pid' is set
if self.api_client.client_side_validation and ('pid' not in local_var_params or # noqa: E501
local_var_params['pid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pid` when calling `properties_v2_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'pid' in local_var_params:
path_params['pid'] = local_var_params['pid'] # noqa: E501
query_params = []
if 'force' in local_var_params and local_var_params['force'] is not None: # noqa: E501
query_params.append(('force', local_var_params['force'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/v2/things/{id}/properties/{pid}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def properties_v2_list(self, id, **kwargs): # noqa: E501
"""list properties_v2 # noqa: E501
Returns the list of properties associated to the thing # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.properties_v2_list(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param bool show_deleted: If true, shows the soft deleted properties
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[ArduinoProperty]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.properties_v2_list_with_http_info(id, **kwargs) # noqa: E501
def properties_v2_list_with_http_info(self, id, **kwargs): # noqa: E501
"""list properties_v2 # noqa: E501
Returns the list of properties associated to the thing # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.properties_v2_list_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param bool show_deleted: If true, shows the soft deleted properties
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[ArduinoProperty], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'show_deleted'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method properties_v2_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `properties_v2_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'show_deleted' in local_var_params and local_var_params['show_deleted'] is not None: # noqa: E501
query_params.append(('show_deleted', local_var_params['show_deleted'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/v2/things/{id}/properties', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ArduinoProperty]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def properties_v2_publish(self, id, pid, property_value, **kwargs): # noqa: E501
"""publish properties_v2 # noqa: E501
Publish a property value to MQTT # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.properties_v2_publish(id, pid, property_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param str pid: The id of the property (required)
:param PropertyValue property_value: PropertyValuePayload describes a property value (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.properties_v2_publish_with_http_info(id, pid, property_value, **kwargs) # noqa: E501
def properties_v2_publish_with_http_info(self, id, pid, property_value, **kwargs): # noqa: E501
"""publish properties_v2 # noqa: E501
Publish a property value to MQTT # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.properties_v2_publish_with_http_info(id, pid, property_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param str pid: The id of the property (required)
:param PropertyValue property_value: PropertyValuePayload describes a property value (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'pid',
'property_value'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method properties_v2_publish" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `properties_v2_publish`") # noqa: E501
# verify the required parameter 'pid' is set
if self.api_client.client_side_validation and ('pid' not in local_var_params or # noqa: E501
local_var_params['pid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pid` when calling `properties_v2_publish`") # noqa: E501
# verify the required parameter 'property_value' is set
if self.api_client.client_side_validation and ('property_value' not in local_var_params or # noqa: E501
local_var_params['property_value'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `property_value` when calling `properties_v2_publish`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'pid' in local_var_params:
path_params['pid'] = local_var_params['pid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'property_value' in local_var_params:
body_params = local_var_params['property_value']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/v2/things/{id}/properties/{pid}/publish', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def properties_v2_show(self, id, pid, **kwargs): # noqa: E501
"""show properties_v2 # noqa: E501
Returns the property requested by the user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.properties_v2_show(id, pid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param str pid: The id of the property (required)
:param bool show_deleted: If true, shows the soft deleted properties
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ArduinoProperty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.properties_v2_show_with_http_info(id, pid, **kwargs) # noqa: E501
def properties_v2_show_with_http_info(self, id, pid, **kwargs): # noqa: E501
"""show properties_v2 # noqa: E501
Returns the property requested by the user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.properties_v2_show_with_http_info(id, pid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param str pid: The id of the property (required)
:param bool show_deleted: If true, shows the soft deleted properties
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ArduinoProperty, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'pid',
'show_deleted'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method properties_v2_show" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `properties_v2_show`") # noqa: E501
# verify the required parameter 'pid' is set
if self.api_client.client_side_validation and ('pid' not in local_var_params or # noqa: E501
local_var_params['pid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pid` when calling `properties_v2_show`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'pid' in local_var_params:
path_params['pid'] = local_var_params['pid'] # noqa: E501
query_params = []
if 'show_deleted' in local_var_params and local_var_params['show_deleted'] is not None: # noqa: E501
query_params.append(('show_deleted', local_var_params['show_deleted'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/v2/things/{id}/properties/{pid}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArduinoProperty', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def properties_v2_timeseries(self, id, pid, **kwargs): # noqa: E501
"""timeseries properties_v2 # noqa: E501
Get numerical property's historic data binned on a specified time interval (note: the total number of data points should NOT be greater than 1000 otherwise the result will be truncated) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.properties_v2_timeseries(id, pid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param str pid: ID of a numerical property (required)
:param bool desc: Whether data's ordering (by time) should be descending
:param str _from: Get data with a timestamp >= to this date (default: 2 weeks ago, min: 1842-01-01T00:00:00Z, max: 2242-01-01T00:00:00Z)
:param int interval: Binning interval in seconds (defaut: the smallest possible value compatibly with the limit of 1000 data points in the response)
:param str to: Get data with a timestamp < to this date (default: now, min: 1842-01-01T00:00:00Z, max: 2242-01-01T00:00:00Z)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ArduinoTimeseriesmedia
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.properties_v2_timeseries_with_http_info(id, pid, **kwargs) # noqa: E501
def properties_v2_timeseries_with_http_info(self, id, pid, **kwargs): # noqa: E501
"""timeseries properties_v2 # noqa: E501
Get numerical property's historic data binned on a specified time interval (note: the total number of data points should NOT be greater than 1000 otherwise the result will be truncated) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.properties_v2_timeseries_with_http_info(id, pid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param str pid: ID of a numerical property (required)
:param bool desc: Whether data's ordering (by time) should be descending
:param str _from: Get data with a timestamp >= to this date (default: 2 weeks ago, min: 1842-01-01T00:00:00Z, max: 2242-01-01T00:00:00Z)
:param int interval: Binning interval in seconds (defaut: the smallest possible value compatibly with the limit of 1000 data points in the response)
:param str to: Get data with a timestamp < to this date (default: now, min: 1842-01-01T00:00:00Z, max: 2242-01-01T00:00:00Z)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ArduinoTimeseriesmedia, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'pid',
'desc',
'_from',
'interval',
'to'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method properties_v2_timeseries" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `properties_v2_timeseries`") # noqa: E501
# verify the required parameter 'pid' is set
if self.api_client.client_side_validation and ('pid' not in local_var_params or # noqa: E501
local_var_params['pid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pid` when calling `properties_v2_timeseries`") # noqa: E501
if self.api_client.client_side_validation and 'interval' in local_var_params and local_var_params['interval'] < 1: # noqa: E501
raise ApiValueError("Invalid value for parameter `interval` when calling `properties_v2_timeseries`, must be a value greater than or equal to `1`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'pid' in local_var_params:
path_params['pid'] = local_var_params['pid'] # noqa: E501
query_params = []
if 'desc' in local_var_params and local_var_params['desc'] is not None: # noqa: E501
query_params.append(('desc', local_var_params['desc'])) # noqa: E501
if '_from' in local_var_params and local_var_params['_from'] is not None: # noqa: E501
query_params.append(('from', local_var_params['_from'])) # noqa: E501
if 'interval' in local_var_params and local_var_params['interval'] is not None: # noqa: E501
query_params.append(('interval', local_var_params['interval'])) # noqa: E501
if 'to' in local_var_params and local_var_params['to'] is not None: # noqa: E501
query_params.append(('to', local_var_params['to'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/v2/things/{id}/properties/{pid}/timeseries', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArduinoTimeseriesmedia', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def properties_v2_update(self, id, pid, model_property, **kwargs): # noqa: E501
"""update properties_v2 # noqa: E501
Updates a property associated to a thing # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.properties_v2_update(id, pid, model_property, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param str pid: The id of the property (required)
:param ModelProperty model_property: PropertyPayload describes a property of a thing. No field is mandatory (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ArduinoProperty
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.properties_v2_update_with_http_info(id, pid, model_property, **kwargs) # noqa: E501
def properties_v2_update_with_http_info(self, id, pid, model_property, **kwargs): # noqa: E501
"""update properties_v2 # noqa: E501
Updates a property associated to a thing # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.properties_v2_update_with_http_info(id, pid, model_property, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: The id of the thing (required)
:param str pid: The id of the property (required)
:param ModelProperty model_property: PropertyPayload describes a property of a thing. No field is mandatory (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ArduinoProperty, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'id',
'pid',
'model_property'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method properties_v2_update" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `properties_v2_update`") # noqa: E501
# verify the required parameter 'pid' is set
if self.api_client.client_side_validation and ('pid' not in local_var_params or # noqa: E501
local_var_params['pid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `pid` when calling `properties_v2_update`") # noqa: E501
# verify the required parameter 'model_property' is set
if self.api_client.client_side_validation and ('model_property' not in local_var_params or # noqa: E501
local_var_params['model_property'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `model_property` when calling `properties_v2_update`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'pid' in local_var_params:
path_params['pid'] = local_var_params['pid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'model_property' in local_var_params:
body_params = local_var_params['model_property']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/v2/things/{id}/properties/{pid}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ArduinoProperty', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
|
999,231 | b68c114450de23beb888bfe7944c7336047d19b3 | '''
This file contains many rules used by both the pruning step and feature generation.
'''
import re
# List of top 15 countries by GDP
countries = ['United States', 'China', 'Japan', 'Germany', 'United Kingdom', 'England',
'India', 'France', 'Brazil', 'Italy', 'Canada', 'Russia', 'Korea',
'Australia', 'Spain', 'Mexico', 'Indonesia', 'Turkey', 'Netherlands',
'Switzerland', 'Saudi Arabia', 'Argentina', 'Taiwan', 'Sweden', 'Poland',
'Belgium']
# List of States
states = ['Alabama', 'Alaska', 'Arizona', 'Arkansas', 'California', 'Colorado', 'Connecticut',
'Delaware', 'Florida', 'Georgia', 'Hawaii', 'Idaho', 'Illinois', 'Indiana', 'Iowa',
'Kansas', 'Kentucky', 'Louisiana', 'Maine', 'Maryland', 'Massachusetts', 'Michigan',
'Minnesota', 'Mississippi', 'Missouri', 'Montana', 'Nebraska', 'Nevada', 'New Hampshire',
'New Jersey', 'New Mexico', 'New York', 'North Carolina', 'North Dakota', 'Ohio',
'Oklahoma', 'Oregon', 'Pennsylvania', 'Rhode Island', 'South Carolina', 'South Dakota',
'Tennessee', 'Texas', 'Utah', 'Vermont', 'Virginia', 'Washington', 'West Virginia',
'Wisconsin', 'Wyoming']
# List of common political words
political = ['House', 'Senate', 'President', 'Democrat', 'Republican', 'The', 'Military', 'General'
'North', 'East', 'South', 'Mr.', 'Mrs.', 'Ms.', 'Sen.', 'Rep.', 'First Lady', 'Congress',
'Governor', 'Representative', 'Leader', 'Sheriff', 'Director', 'Admiral', 'CEO', 'Senator'
'Minister', 'Doctor', 'Sergeant', 'Prosecutor', 'Commissioner', 'Speaker']
# Days of the week
days = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']
# Months of the year (Note isnce April, May, and August are common names, they are not in this list
months = ['January' , 'February', 'March', 'June', 'July', 'September', 'October', 'November', 'December']
# Generate the whitelists and blacklists for previous and following words
def build_lists():
file = open('words.txt')
# Keep track of all the words
lists = [[], [], [], []]
curr_list = -1
# Go through each line in the file
for line in file:
# If we encounter a '#' then go to next list
if line[0] == '#':
curr_list += 1
else:
# Add the new word to the current list
lists[curr_list].append(line.strip())
return lists
# Returns true if each word in the candidate name starts with a capital letter
def all_caps(row):
# Splits into list of words
cand = row[0].split(' ')
# If we find a word that doesn't start with a capital, set to false
caps = True
for word in cand:
if not type(word[0]) == float and not word[0].isupper():
caps = False
return caps
# Check if the previous word starts with a capital letter
def prev_caps(row):
# Return true if previous word starts with a capital
if type(row[3]) == float:
return False
regex = re.compile('[^a-zA-Z]')
prev = regex.sub('', row[3])
if len(prev) > 0 and prev[0].isupper():
return True
return False
# Check if the word before the previous word starts with a capital letter
def prev_2_caps(row):
# Return true if previous word starts with a capital
if type(row[4]) == float:
return False
regex = re.compile('[^a-zA-Z]')
prev = regex.sub('', row[4])
if len(prev) > 0 and prev[0].isupper():
return True
return False
# Check if the following word starts with a capital letter
def follow_caps(row):
# Return true if previous word starts with a capital
if type(row[5]) == float:
return False
regex = re.compile('[^a-zA-Z]')
follow = regex.sub('', row[5])
if len(follow) > 0 and follow[0].isupper():
return True
return False
# Check if the previous word is on the previous blacklist/whitelist
def prev_list(row, list):
# Return true if previous word is in the given list
if not type(row[3]) == float:
regex = re.compile('[^a-zA-Z]')
prev = regex.sub('', row[3])
prev = prev.lower()
if prev in list:
return True
return False
# Check if the following word is on the following blacklist/whitelist
def follow_list(row, list):
# Return true if following word is in the given list
if not type(row[5]) == float:
regex = re.compile('[^a-zA-Z]')
follow = regex.sub('', row[5])
follow = follow.lower()
if follow in list:
return True
return False
# Returns the length of the string
def length(row):
return len(row[0])
# Returns the average length of each word in the string
def avg_len(row):
# Splits into list of words
cand = row[0].split(' ')
# Find the length of each word
lengths = []
for word in cand:
lengths.append(len(word))
# Calculate average
return sum(lengths) / float(len(lengths))
# Returns the length of the previous word
def prev_len(row):
if not type(row[3]) == float:
return len(row[3])
else:
return 0
# Returns the length of the following word
def follow_len(row):
if not type(row[5]) == float:
return len(row[5])
else:
return 0
# Counts the number of words in the candidate name
def num_words(row):
# Splits into list of words
words = row[0].split(' ')
return len(words)
# Search for (non period) punctuation in the name
def punct(row):
# Look for non-letter and non period
regex = re.compile('[^a-zA-Z.\' ]')
if regex.search(row[0]) is not None:
return True
elif row[0].find("'s") >= 0:
return True
return False
# Search for non-letters in the previous word
def prev_punct(row):
# Look for non-letter and non period
regex = re.compile('[^a-zA-Z]')
if regex.search(row[3]) is not None:
return True
else:
return False
# Returns true if the previous word contains a period or doesn't exist
def prev_period(row):
if not type(row[3]) == float:
return '.' in row[3] or '?' in row[3] or '!' in row[3]
else:
return True
# Returns true if the word before the previous word contains a period or doesn't exist
def prev_2_period(row):
if not type(row[4]) == float:
return '.' in row[4] or '?' in row[4] or '!' in row[4]
else:
return True
# Returns true if the name contains any word from the following lists: days, months, countries, states, or political
def common_words(row, index):
if type(row[index]) == float:
return False
# Check for days of the week
if any(row[index].find(day) >= 0 for day in days):
return True
# Check for months of the year
if any(row[index].find(month) >= 0 for month in months):
return True
# Check for country names
if any(row[index].find(country) >= 0 for country in countries):
return True
# Check for state names
if any(row[index].find(state) >= 0 for state in states):
return True
# Check for political words
if any(row[index].find(word) >= 0 for word in political):
return True
return False
# Checks if the name contains a period attached to a word with more than two letters
# The idea here is that names often have periods attached to a middle initial or jr or sr
def longer_word_with_period(row):
words = row[0].split(' ')
for word in words:
regex = re.compile('[^a-zA-Z. ]')
word = regex.sub('', word)
if len(word) > 3 and '.' in word:
return True
return False
# Checks if the following word starts with jr, junior, sr, or senior
def jr_or_sr(row):
if type(row[5]) == float:
return False
foll = row[5].lower()
regex = re.compile('[^a-zA-Z]')
foll = regex.sub('', foll)
list = ['jr', 'junior', 'senior', 'sr']
if any(foll.find(word) == 0 for word in list):
return True
# Block any version of white house as a candidate
def white_house(row):
# previous word is white and name is house
if type(row[3]) != float:
if row[3].lower().find('white') >= 0 and row[0].lower().find('house') >= 0:
return True
# name is white and next word is house
if type(row[5]) != float:
if row[0].lower().find('white') >= 0 and row[5].lower().find('house') >= 0:
return True
return False |
999,232 | b179bcf02941d5a3d5b1cb3ec78cf92554efb206 | import ray
def register_serializer(cls, *, serializer, deserializer):
"""Use the given serializer to serialize instances of type ``cls``,
and use the deserializer to deserialize the serialized object.
Args:
cls: A Python class/type.
serializer (callable): A function that converts an instances of
type ``cls`` into a serializable object (e.g. python dict
of basic objects).
deserializer (callable): A function that constructs the
instance of type ``cls`` from the serialized object.
This function itself must be serializable.
"""
context = ray.worker.global_worker.get_serialization_context()
context._register_cloudpickle_serializer(cls, serializer, deserializer)
def deregister_serializer(cls):
"""Deregister the serializer associated with the type ``cls``.
There is no effect if the serializer is unavailable.
Args:
cls: A Python class/type.
"""
context = ray.worker.global_worker.get_serialization_context()
context._unregister_cloudpickle_reducer(cls)
|
999,233 | 9d8f375884da77d993ac429226178f34d2b4f2f2 | from django.conf.urls import include, url
from django.contrib import admin
from constellation import views
urlpatterns = [
url(r'^json/links/$', views.links_json, name='links_json'),
url(r'^json/floatsam/$', views.floatsam_json, name='floatsam_json'),
url(r'^jetsam/change/(?P<makerslug>[\w-]+)/(?P<slug>[\w-]+)/$', views.add_jetsam, name='add_jetsam'),
url(r'^jetsam/change/(?P<makerslug>[\w-]+)/$', views.add_jetsam, name='add_jetsam'),
url(r'^jetsam/change$', views.add_jetsam, name='add_jetsam'),
url(r'^jetsam/delete/(?P<slug>[\w-]+)/$', views.delete_jetsam, name='delete_jetsam'),
url(r'^jetsam/(?P<slug>[\w-]+)/$', views.jetsam_detail, name='jetsam_detail'),
url(r'^edit/(?P<slug>[\w-]+)/$', views.edit_floatsam, name='edit_floatsam'),
url(r'^edit$', views.edit_floatsam, name='edit_floatsam'),
url(r'^add$', views.add_floatsam, name='add_floatsam'),
url(r'^request/(?P<slug>[\w-]+)/$', views.request_floatsam, name='request_floatsam'),
url(r'^request$', views.request_floatsam, name='request_floatsam'),
url(r'^accept/(?P<initiator_slug>[\w-]+)/(?P<recipient_slug>[\w-]+)/$', views.accept_request, name='accept_request'),
url(r'^deny/(?P<initiator_slug>[\w-]+)/(?P<recipient_slug>[\w-]+)/$', views.deny_request, name='deny_request'),
url(r'^(?P<slug>[\w-]+)/$', views.floatsam_detail, name='floatsam_detail'),
url(r'^json/(?P<slug>[\w-]+)/$', views.json_floatsam_detail, name='json_floatsam_detail'),
]
|
999,234 | 1193d2aa321cdd555cce02d1774137520377e40f | import parser
import webbrowser
import re
def repl():
while True:
read = input('rss>> ')
eval = feed_eval(read)
if eval is not None:
post_repl(eval)
print('Exited Feed')
def post_repl(feed):
print('In feed '+ feed.title)
curr = 0
while True:
found_n = False
found_p = False
found_k = False
if curr > len(feed.posts)-1 or curr < 0:
return None
inp = input('post {0} of {1}>> '.format(curr, len(feed.posts)-1))
if re.search('n', inp):
inp = re.sub('n','',inp)
found_n = True
if re.search('p(?!\[)', inp):
inp = re.sub('p','',inp)
found_p = True
if re.search('k(?!\[)', inp):
inp = re.sub('k','',inp)
found_k = True
if inp == 'ls':
i = 0
for post in feed.posts:
print(str(i) + ' ' + post.title)
i += 1
continue
if inp == 'q' or inp == 'quit' or inp == 'exit':
return None
if inp == 'ar' or inp == 'readall':
feed.printout()
continue
if inp == 'h' or inp == 'help':
rss_help()
continue
curr_post = feed.posts[curr]
if re.search('\[(.*?)\]', inp):
st = re.split('\[', inp)
st = [re.sub('\[|\]','', elem) for elem in st]
if st[0] == 'r':
for i in range(int(st[1])):
curr_post.printout()
curr += 1
if curr > len(feed.posts)-1:
return None
curr_post = feed.posts[curr]
continue
if st[0] == 'k':
curr += int(st[1])
continue
if st[0] == 'g':
curr = int(st[1])
continue
if st[0] == 'p':
curr -= int(st[1])
continue
if inp == 'r':
curr_post.printout()
elif inp == 't':
print(curr_post.title)
elif inp == 'd':
print(curr_post.time)
elif inp == 'a':
print(curr_post.author)
elif inp == 'lp':
print(curr_post.link)
found_p = False
elif inp == 'l':
open_link(curr_post.link)
if found_n and not found_p and not found_k:
curr += 1
continue
elif found_p and not found_n and not found_k:
curr -= 1
continue
elif found_k and not found_n and not found_p:
curr = len(feed.posts)-1
continue
def feed_eval(str):
if str == 'exit' or str == 'quit' or str == 'q':
print('Goodbye!')
exit()
elif str == 'help' or str == 'h':
rss_help()
return None
elif str == 'url' or str == 'link' or str == 'l':
return start(input('Feed URL>> '))
else:
rss_help()
return None
def start(url_str):
return parser.feed(url_str)
def open_link(link):
webbrowser.open(link, 2)
def rss_help():
post_opts = '''
ar - print all information for all posts
ls - list all post titles and their numbers
q - exit feed
h - show this help dialog
r - print all post information for the current post
r[x] - print all information for next x posts. The current post will be changed to the next post in line
t - print title only of current post
d - print post time only of current post
a - print author only of current post
l - open the current post in your web browser
lp - prints the link of the current post
k - skip to last post (suffix)
k[x] - skip the next x posts (including the current one)
g[x] - skip to post number x (can go backwards)
n - go to next post (can be used as suffix)
p - skip to previous post (suffix)
p[x] - go back x posts
'''
feed_opts = '''
q - quit app
h - show this help dialog
url - enter a feed url and start reading that feed
'''
print("App Options:")
print(feed_opts)
print("Feed Options")
print(post_opts)
print("Post {n}>> indicates the current position in the feed.")
repl() |
999,235 | 0a53e0ab27aef719c5aab1297749cf2f3b570334 | import datetime
from builtins import ValueError, int
from django.http import Http404, HttpResponse
# int valueerror报错 换个页面导包正常了
def hours_ahead(request, offset):
try:
offset = int(offset)
except ValueError:
raise Http404()
dt = datetime.datetime.now() + datetime.timedelta(hours=offset)
html = "<html><body>In %s hour(s), it will be %s.</body></html>" % (offset, dt)
return HttpResponse(html)
|
999,236 | d3e7a7a9f41df41558ec06383baa15347862e7ea | #!/usr/bin/env python3
import random
numTentativas = 0
numero = random.randint(1, 20)
print('Estou pensando em um numero entre 1 e 20.')
while numTentativas < 5:
numeroUsuario = input('Adivinhe: ')
numeroUsuario = int(numeroUsuario)
numTentativas = numTentativas + 1
if numeroUsuario < numero:
print('Tente mais alto.')
if numeroUsuario > numero:
print('Tente mais baixo.')
if numeroUsuario == numero:
break
if numeroUsuario == numero:
print('Muito bem! Voce acertou o numero em ' + str(numTentativas) + ' tentativas!')
if numeroUsuario != numero:
numero = str(numero)
print('Errado. O número era ' + numero)
|
999,237 | 724b746e6c9690a8d850f7f2ec185afcc58e65fb | from django.shortcuts import render
from django.http import HttpResponse
import os
import librosa
import librosa.display
import IPython.display as ipd
import numpy as np
import matplotlib.pyplot as plt
import io
import urllib, base64
audio_file = "audio/debussy.wav"
audio, sr = librosa.load(audio_file)
sample_duration = 1 / sr
tot_samples = len(audio)
duration = 1 / sr * tot_samples
# Create your views here.
def SoundSpectogram(request):
ipd.Audio(audio_file) #Play sound
plt.figure(figsize=(10, 8))
plt.subplot(2, 1, 1)
librosa.display.waveplot(audio, alpha=0.5)
plt.ylim((-1, 1))
plt.title("Audio Spectrum")
fig = plt.gcf()
#convert graph into dtring buffer and then we convert 64 bit code into image
buf = io.BytesIO()
fig.savefig(buf,format='png')
buf.seek(0)
string = base64.b64encode(buf.read())
uri = urllib.parse.quote(string)
return render(request,'SoundSpectogram.html',{'data':uri}) |
999,238 | 0f06804f022be61f81ff7afe9f9a8c97f9fe9a58 | import cv2
import numpy as np
def execute(imgName):
print(imgName)
img = cv2.imread('imgSaved/test1/'+imgName, 1)
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
ret, binary = cv2.threshold(gray,127,255,cv2.THRESH_BINARY)
contours, hierarchy = cv2.findContours(binary,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
if len(contours)>1 :
c = sorted(contours, key=cv2.contourArea, reverse=True)[1]
else:
return
# compute the rotated bounding box of the largest contour
rect = cv2.minAreaRect(c)
box = np.int0(cv2.boxPoints(rect))
# draw a bounding box arounded the detected barcode and display the image
draw_img = cv2.drawContours(img.copy(), [box], 0, (0, 0, 255), 3)
#cv2.imwrite('imgProcessed/'+imgName,draw_img)
Xs = [i[0] for i in box]
Ys = [i[1] for i in box]
x1 = min(Xs)
x2 = max(Xs)
y1 = min(Ys)
y2 = max(Ys)
hight = y2 - y1
width = x2 - x1
crop_img = img[y1:y1+hight, x1:x1+width].copy()
cv2.imwrite('imgProcessed/test1/'+imgName,crop_img)
nameFile = open('imgSaved/test1/imgNames.txt')
for line in nameFile.readlines():
#execute(line)
execute(line.strip('\n'))
# mask = np.zeros(crop_img.shape[:2], np.uint8)
# bgdModel = np.zeros((1, 65), np.float64)
# fgdModel = np.zeros((1, 65), np.float64)
# rect = (20, 20, 413, 591)
# cv2.grabCut(crop_img, mask, rect, bgdModel, fgdModel, 10, cv2.GC_INIT_WITH_RECT)
# mask2 = np.where((mask == 2) | (mask == 0), 0, 1).astype('uint8')
# img = crop_img * mask2[:, :, np.newaxis]
# img += 255 * (1 - cv2.cvtColor(mask2, cv2.COLOR_GRAY2BGR))
# # plt.imshow(img)
# # plt.show()
# img = np.array(img)
# mean = np.mean(img)
# img = img - mean
# img = img * 0.9 + mean * 0.9
# img /= 255
# cv2.imwrite('imgProcessed/final_img.jpg',img)
|
999,239 | e466d3634b6da0d12ef3d2b7c06dc8e276d0f81a | class Student:
def __init__(self, name, Id, percentage = 0, skills = []):
self.name = name
self.Id = Id
self.percentage = percentage
self.skills = skills
def get_name(self):
return self.name
def get_Id(self):
return self.Id
def get_percentage(self):
return self.percentage
def get_skills(self):
return self.skills
def set_name(self, name):
self.name = name
def set_percentage(self, perct):
self.percentage = perct
def set_skill(self, skillsnew):
self.skills = skillsnew
class Operation (Student):
def __init__(self):
Student.__init__(self,"saurav",1,10,["java"])
def changeName(self, newName):
self.set_name(newName)
def changeNameUserInput(self):
newName = input("Enter your new Name")
self.set_name(newName)
def addSkills(self,newSkills):
newarr = newSkills+ self.skills
self.set_skill(newarr)
|
999,240 | 9119575e80d94b2205b950bbd3dff055744ace3e | # Generated by Django 2.0 on 2018-10-22 13:20
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('blog', '0005_blog_readed_time'),
]
operations = [
migrations.RenameField(
model_name='blog',
old_name='readed_time',
new_name='readed_num',
),
]
|
999,241 | cbdd1832497a1b7e2e5ac1249e625ab9c763e0fc | L = [
['Apple', 'Google', 'Microsoft'],
['Java', 'Python', 'Ruby', 'Php'],
['Adam', 'Bart', 'Lisa']
]
print('''打印Apple:L[0][0] = %s
打印Python:L[1][1] = %s
打印Lisa:L[-1][-1] = %s''' % (L[0][0], L[1][1], L[-1][-1]))
|
999,242 | 9bc631cc13b732cbe37df04953961f565d8c4bf1 | H, W = map(int, input().split())
HW = [0] * H
for i in range(H):
HW[i] = str(input())
#縦
T = [[0] * W for i in range(H)]
for i in range(H):#4000000
now = 0
for j in range(W):
if HW[i][j] == "#":
T[i][j] = 0
now = 0
else:
now += 1
T[i][j] = now
now = 0
for j in range(1, W + 1):
if T[i][-j] == 0:
now = 0
elif now == 0:
now = T[i][-j]
else:
T[i][-j] = now
#print(i, now)
#print(T)
#横
Y = [[0] * W for i in range(H)]
for j in range(W):#4000000
now = 0
for i in range(H):
if HW[i][j] == "#":
Y[i][j] = 0
now = 0
else:
now += 1
Y[i][j] = now
now = 0
for i in range(1, H + 1):
if Y[-i][j] == 0:
now = 0
elif now == 0:
now = Y[-i][j]
else:
Y[-i][j] = now
#print(i, now)
#print(Y)
ans = 0
for i in range(H):
for j in range(W):
if T[i][j] != 0 and Y[i][j] != 0:
ans = max(ans, T[i][j] + Y[i][j] - 1)
print(ans)
|
999,243 | a820d5ba0ae74474eca2e2d8d177df2e950f55de | # -*- coding: utf-8 -*-
from pyquery import PyQuery as pq
html = open("./_build/html/family_marriage.html").read()
dom = pq(html)
for tr_node in dom.find("tr"):
pq_tr_node = pq(tr_node)
header = pq_tr_node.find("th")
if header.is_("th"):
print([th.text for th in pq_tr_node('th')])
else:
#print("hello", pq_tr_node('td').each(lambda i, e: pq(e).text))
print([d.text for d in pq_tr_node('td')])
td_node = pq_tr_node.find("td")
|
999,244 | fe6d32539ef3e1fba31b28aa1250d3074fe0fb36 | import random
def quicksort(nums):
"""
Швидке сортування
:param nums: Список з вхідними даними
:type nums: list
:return: Повертає відсортований список
"""
if len(nums) <= 1:
return nums
else:
q = random.choice(nums)
l_nums = [n for n in nums if n < q]
e_nums = [q] * nums.count(q)
b_nums = [n for n in nums if n > q]
return quicksort(l_nums) + e_nums + quicksort(b_nums)
def search_value(nums, v):
"""
Пошук елементу за значенням
:param nums: Список з вхідними даними
:type nums: list
:param v: Елемент, який потрібно знайти
:type v: int
"""
i = 0
c = 0
for n in nums:
i += 1
if n == v:
print(f"Item is found at index {i-1}")
c += 1
break
if c == 0:
print("Item isn't found")
def min_elem(nums, n):
"""
Пошук перших 𝑛 мінімальних елементів
:param nums: Список з вхідними даними
:type nums: list
:param n: Кількість елементів, які потрібно знайти
:type n: int
"""
sl = quicksort(nums)
min_s = ""
for i in range(0, n):
min_s += str(sl[i]) + ', '
print(f'{n} first min elements are: {min_s}')
def max_elem(nums, n):
"""
Пошук перших 𝑛 максимальних елементів
:param nums: Список з вхідними даними
:type nums: list
:param n: Кількість елементів, які потрібно знайти
:type n: int
"""
sl = quicksort(nums)
max_s = ""
for i in range(1, n+1):
max_s += str(sl[-i]) + ', '
print(f'{n} first max elements are: {max_s}')
def average(nums):
"""
Пошук середнього арифметичного
:param nums: Список з вхідними даними
:type nums: list
"""
sum = 0
for el in nums:
sum += el
av = sum/len(nums)
print('Average of list is {:.2f}'.format(av))
def unique(nums):
"""
Повернення списку, сформованого з початкового списку, але без повторів
:param nums: Список з вхідними даними
:type nums: list
"""
un_list = list()
for elem in nums:
if elem not in un_list:
un_list.append(elem)
print(f'List with unique values: {un_list}')
def palindrome(pal):
"""
Визначити, чи є послідовності символів поліандром
:param pal: Послідовність символів
"""
p = True
count = int(len(pal)/2)
for i in range(0, count):
if pal[i] != pal[-(i+1)]:
print("String is not palindrome")
p = False
break
if p:
print("String is palindrome")
def main():
nums = [4, 6, 8, 20, 201, 1, 508, 64, 11, 8, 101, 6, 36]
print("List before sorting: ")
print(nums)
print("List after sorting: ")
print(quicksort(nums))
v = 101
search_value(nums, v)
num_min = 3
num_max = 6
min_elem(nums, num_min)
max_elem(nums, num_max)
average(nums)
unique(nums)
pal1 = "A2BCCCCB2A"
pal2 = "A2BCCYCCB2A"
pal3 = "A2BXCCZB2A"
palindrome(pal1)
palindrome(pal2)
palindrome(pal3)
if __name__ == "__main__":
main()
|
999,245 | c9bc32116917e27d35df5f89aaecd38186566f3e | import sys
from PyQt5.Qt import *
class MyWindow(QWidget):
def __init__(self):
super().__init__()
self.resize(500, 500)
self.setWindowTitle('布局管理器')
# self.setup_ui()
self.详解()
def setup_ui(self):
label1 = QLabel('标签1')
label2 = QLabel('标签2')
label3 = QLabel('标签3')
label1.setStyleSheet('background-color:cyan;')
label2.setStyleSheet('background-color:yellow;')
label3.setStyleSheet('background-color:red;')
layout = QHBoxLayout()
# layout = QVBoxLayout()
layout.addWidget(label1)
layout.addWidget(label2)
layout.addWidget(label3)
# 边距 左上右下
layout.setContentsMargins(10, 20, 30, 40)
# 元素之间的空间 间距
layout.setSpacing(40)
# 对齐方式
self.setLayoutDirection(Qt.LeftToRight)
self.setLayoutDirection(Qt.RightToLeft)
self.setLayoutDirection(Qt.LayoutDirectionAuto)
# 会自动设置父子关系,包括子控件
self.setLayout(layout)
def 详解(self):
label1 = QLabel('标签1')
label2 = QLabel('标签2')
label3 = QLabel('标签3')
label1.setStyleSheet('background-color:cyan;')
label2.setStyleSheet('background-color:yellow;')
label3.setStyleSheet('background-color:red;')
layout = QBoxLayout(QBoxLayout.TopToBottom)
layout.addWidget(label1)
layout.addWidget(label2)
layout.addWidget(label3)
# 间距
print('spacing', layout.spacing())
layout.setSpacing(60)
# 边距
print('margins:', layout.contentsMargins())
# 替换子控件 #####
# label4 = QLabel('标签 4 ')
# label4.setStyleSheet('background-color:orange;')
# layout.replaceWidget(label2, label4)
# label2.hide() # 有坑 需要隐藏?
# label2.destroyed.connect(lambda: print('destroyed:', label2.parent()))
# label2.setParent(None)
# 添加子布局 布局嵌套 #####
label5 = QLabel('标签5')
label6 = QLabel('标签6')
label7 = QLabel('标签7')
label5.setStyleSheet('background-color:pink;')
label6.setStyleSheet('background-color:blue;')
label7.setStyleSheet('background-color:cyan;')
h_layout = QBoxLayout(QBoxLayout.LeftToRight)
h_layout.addWidget(label5)
h_layout.addWidget(label6)
h_layout.addWidget(label7)
layout.addLayout(h_layout)
# 生效
layout.setEnabled(False)
layout.setEnabled(True)
self.setLayout(layout)
pass
if __name__ == '__main__':
app = QApplication(sys.argv)
window = MyWindow()
window.show()
sys.exit(app.exec_())
|
999,246 | 24c8354b0a4bc516acbc13f6dde4da7d563cdd8c | n=int(input())
l=[]
for i in range(0,n):
c=int(input())
l.append(c)
for i in range(len(l)):
j=int(l[i]/5)
k=j+1
b=k*5
if l[i]<38:
print(l[i])
elif(b-l[i])<3:
print(b)
else:
print(l[i])
|
999,247 | cc3329776e7d8f15ba306cd937d0729e9ad99253 | rule star:
input:
sample=["reads/{sample}.1.fastq", "reads/{sample}.2.fastq"]
output:
# see STAR manual for additional output files
"star/{sample}/Aligned.out.bam"
log:
"logs/star/{sample}.log"
params:
# path to STAR reference genome index
index="index",
# optional parameters
extra=""
threads: 8
wrapper:
"master/bio/star/align"
|
999,248 | 5edf833f8026ef522234e56d97b2471b4b50be6b | #checkargs2.py
#コマンドライン引数が2個以上であることをチェックする
import sys
if len(sys.argv)<3:
print("引数として2つの整数が必要です。")
exit()
print("引数チェックOK!")
|
999,249 | f5f1b328fcc9e882f6bf102006e49f260254206e | from django.contrib.auth import decorators, views
from django.http import HttpResponse, HttpRequest, HttpResponseBadRequest, QueryDict
from django.conf import settings
from django.views.decorators.csrf import csrf_exempt
from bii_webapp.apps.files.models import *
from threading import Thread
import json
import requests
from bii_webapp.settings import common
import re
import os
from django.core.cache import cache
import stat
TIMEOUT = 1500 #seconds
@csrf_exempt
@decorators.login_required(login_url=views.login)
def postInit(request, sample=None):
url = settings.WEBSERVICES_URL + 'upload/init'
try:
data = {}
if sample == None:
data = request.POST
else:
data['filename'] = sample['filename']
data['filesize'] = sample['filesize']
r = requests.post(url, files=data, timeout=TIMEOUT)
obj = json.loads(r.content)
except requests.exceptions.ConnectionError, e:
r = HttpResponse(
json.dumps({'ERROR': {'total': 1, 'messages': 'Upload server could not be reached, please try again'}}))
except requests.exceptions.Timeout, e:
r = HttpResponse(json.dumps({'ERROR': {'total': 1, 'messages': 'Connection timed out, please try again'}}))
return HttpResponse(r)
@csrf_exempt
@decorators.login_required(login_url=views.login)
def initSample(request):
name = request.GET['sampleName']
# 'sample.zip'
directory = common.SITE_ROOT + '/media/samples/'
filesize = (str)(os.path.getsize(directory + "/" + name))
# request.POST={'filename':name,'filesize':filesize}
sample = {'filename': name, 'filesize': filesize}
return postInit(request, sample)
@csrf_exempt
@decorators.login_required(login_url=views.login)
def uploadSample(request):
name = request.POST['filename']
filesize= request.POST['filesize']
uploadID= request.POST['uploadID']
directory = common.SITE_ROOT + '/media/samples/'
sample = {'filename': name, 'filesize': filesize,'uploadID':uploadID}
sample['file'] = open(directory + "/" + name, 'rb')
return uploadFile(request,sample)
@csrf_exempt
@decorators.login_required(login_url=views.login)
def uploadFile(request, sample=None):
try:
STATE = 'UPLOADING'
if sample == None:
file = request.FILES['file']
name = request.POST['filename']
size = request.POST['filesize']
uploadID = request.POST['uploadID']
else:
file = sample['file']
name = sample['filename']
size = sample['filesize']
uploadID = sample['uploadID']
# mimetype = file.content_type
extension = name[name.rindex('.'):]
valid_ext = '(\.(?i)(zip|tar|gz)$)'
valid_mime = '^application/(zip|x-zip-compressed|x-tar|x-gzip|octet-stream)$'
# Validate file type
# not (re.match(valid_mime, mimetype) and
if not re.match(valid_ext, extension):
r = errorResponse(request, 'Invalid file type', 1)
return r
files = {'file': file}
data = {'uploadID': uploadID, 'filesize': size}
url = settings.WEBSERVICES_URL + 'upload'
try:
r = requests.post(url, data=data, files=files, timeout=TIMEOUT)
import logging
logger = logging.getLogger('django.request')
logger.exception(r)
resp = json.loads(r.content)
if 'ERROR' in resp or 'UPLOAD' in resp and resp['UPLOAD']['stage'] == 'cancelled':
return respond(request,r)
if resp['UPLOAD']['stage'] == 'complete':
cache.delete('browse')
except requests.exceptions.RequestException, e:
r = errorResponse(request, 'Upload server could not be reached')
except requests.exceptions.Timeout, e:
r = errorResponse(request, 'Connection timed out, please try again later')
except Exception, e:
import logging
logger = logging.getLogger('django.request')
logger.exception(e)
r = errorResponse(request, 'Oops something went wrong, please try again')
return respond(request, r)
@csrf_exempt
@decorators.login_required(login_url=views.login)
def getCancel(request, uploadID=None):
url = settings.WEBSERVICES_URL + 'upload/cancel'
url += '?uploadID=' + request.GET['uploadID']
try:
r = requests.get(url, timeout=TIMEOUT)
except requests.exceptions.ConnectionError, e:
r = errorResponse('Upload server could not be reached, please delete the file')
except requests.exceptions.Timeout, e:
r = errorResponse('Connection timed out, please delete the file')
return HttpResponse(r)
@csrf_exempt
@decorators.login_required(login_url=views.login)
def getProgress(request, uploadID=None):
url = settings.WEBSERVICES_URL + 'upload/progress'
url += '?uploadID=' + request.GET['uploadID']
try:
r = requests.get(url, timeout=TIMEOUT)
except requests.exceptions.ConnectionError, e:
r = errorResponse(request, 'Upload server could not be reached')
except requests.exceptions.Timeout, e:
r = errorResponse(request, 'Connection timed out, please try again later')
return respond(request, r)
def errorResponse(request, objErrors):
errorMsgs = {'total': 1, 'messages': objErrors}
return HttpResponse(json.dumps({'ERROR': errorMsgs}))
def respond(request, response):
if (response.status_code != 200):
resp = errorResponse(request, 'Server error with status code ' + str(response.status_code))
return resp
else:
obj = json.loads(response.content)
return HttpResponse(json.dumps(obj)) |
999,250 | 77eb5fab196f60d76a29a4ff97e64cf24582f833 | import argparse
import copy
import logging
import pathlib
import shutil
import sys
import tarfile
import unittest
from .base import Profile, Target, Scope
from .build import Build
from .config import ConfigDict
from .tests import Skip, TestCase
from . import compilers, targets
if not any([ '.xz' in i[1] for i in shutil.get_unpack_formats() ]):
def _extract_xz(filename, extract_dir):
try:
tarobj = tarfile.open(filename)
except tarfile.TarError as e:
raise ReadError('{} is not a tar file'.format(filename)) from e
try:
tarobj.extractall(extract_dir)
finally:
tarobj.close()
shutil.register_unpack_format('XZ file', ['.xz'], _extract_xz, [], 'Tar file compressed with XZ (LZMA) algorithm')
|
999,251 | a899c90d1b92a92b91351cadbf1ba68cb02db5e1 | #!/usr/bin/python3
import sys, os, subprocess
import PyQt5
from PyQt5 import QtCore, QtGui, QtWidgets
# --- Variables -----------------------------------------------------------------------------------
special_keys = { 16777264 : 'F1', 16777222 : 'KP0', 16777235 : 'UP',
16777265 : 'F2', 16777233 : 'KP1', 16777237 : 'DOWN',
16777266 : 'F3', 16777237 : 'KP2', 16777234 : 'LEFT',
16777267 : 'F4', 16777239 : 'KP3', 16777236 : 'RIGHT',
16777268 : 'F5', 16777234 : 'KP4', 16777222 : 'INSERT',
16777269 : 'F6', 16777227 : 'KP5', 16777232 : 'HOME',
16777270 : 'F7', 16777236 : 'KP6', 16777238 : 'PAGEUP',
16777271 : 'F8', 16777232 : 'KP7', 16777223 : 'DELETE',
16777272 : 'F9', 16777235 : 'KP8', 16777233 : 'END',
16777273 : 'F10', 16777238 : 'KP9', 16777239 : 'PAGEDOWN',
16777274 : 'F11', 16777223 : 'KPDOT', 16777217 : 'TAB',
16777275 : 'F12', 43 : 'KPPLUS', 16777220 : 'ENTER',
32 : 'SPACE', 42 : 'KPASTERISK', 16777252 : 'CAPSLOCK',
16777219 : 'BACKSPACE', 45 : 'KPMINUS', 16777253 : 'NUMLOCK',
16777216 : 'ESC', 16777251 : 'LEFTALT', 16781571 : 'RIGHTALT',
16777249 : 'LEFTCTRL', 16777248 : 'LEFTSHIFT', 'uk1' : 'SLASH',
'uk2' : 'RIGHTBRACE', 'uk3' : 'SCROLLLOCK', 'uk4' : 'BACKSLASH',
'uk5' : 'SEMICOLON', 'uk6' : 'APOSTROPHE', 'uk7' : 'LEFTBRACE',
'uk8' : 'DOT', 'uk9' : 'COMMA', 'uk10' : 'EQUAL',
'uk11' : 'GRAVE', 'uk12' : 'MINUS', 'uk13' : 'RIGHTCTRL',
'uk14' : 'RIGHTSHIFT'}
# --- Functions -----------------------------------------------------------------------------------
def createVerticalText(ind):
ud = ''
for c in ind:
ud += c + '\n'
return ud[:-1]
# --- Classes -------------------------------------------------------------------------------------
class StartQT5(QtWidgets.QMainWindow):
def __init__(self, parent=None):
QtWidgets.QWidget.__init__(self, parent)
self.ui = Ui_Window(self)
self.ui.setupUi(self)
self.keyNames = True
itemList = []
self.setWindowIcon(QtGui.QIcon('g13logo.png'))
for i in self.loadCfg():
item = PyQt5.QtWidgets.QTreeWidgetItem(i)
itemList.append(item)
item.setFlags(QtCore.Qt.ItemIsEditable | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)
self.ui.tv_bindings.addTopLevelItems(itemList)
self.checkProcess()
def loadCfg(self):
""" Loads file and returns a list of configurations """
objFile = open('/usr/local/bin/defaults.bind', 'r')
fileContents = objFile.read()
objFile.close()
cfg = fileContents.split('\n')
cfgData = []
for i in cfg:
if i.startswith('bind '):
bind, gKey, kbKey = i.split(' ')
cfgData.append([gKey, kbKey.rstrip('\n')])
return cfgData
def pushButtonClicked(self, but_id, button):
""" Deselects all, selects clicked button """
self.ui.tv_bindings.clearSelection()
lstMatch = self.ui.tv_bindings.findItems(but_id, QtCore.Qt.MatchExactly, 0)[0]
lstMatch.setSelected(True)
lstMatch.setText(1, '[Press a key]')
button.installEventFilter(self)
self.efButton = button # Not elegant, but.... works
def eventFilter(self, object, event):
""" Gets the key, assigns it and removes self """
if event.type() == QtCore.QEvent.KeyPress:
if event.key() >= 48 and event.key() <= 57: # 0-9
kbKey = 'KEY_' + chr(event.key())
elif event.key() >= 65 and event.key() <= 90: # A-Z
kbKey = 'KEY_' + chr(event.key())
elif event.key() in special_keys: # special_keys
kbKey = 'KEY_' + special_keys[event.key()]
else:
kbKey = 'None'
# should not have to search for it... dunno how else to access item?
lstMatch = self.ui.tv_bindings.findItems('[Press a key]', QtCore.Qt.MatchExactly, 1)[0]
g13Key = lstMatch.text(0)
lstMatch.setText(1, kbKey)
if not self.keyNames:
self.efButton.setText(kbKey[4:])
self.ui.tv_bindings.removeEventFilter(self)
self.efButton.removeEventFilter(self)
# Update config
self.writeFile()
self.writePipe('bind ' + g13Key + ' ' + kbKey)
return False
def activateButtonClicked(self):
""" Event for the activateButton (Do stuff, explain) """
print("trying to start process...")
subprocess.Popen("/usr/local/bin/g13d --config /usr/local/bin/defaults.bind", shell=True)
self.checkProcess()
def toggleNamesButtonClicked(self):
""" Changes names of buttons to ButtonNames / ButtonValues """
nameTable = {}
if self.keyNames:
for i in self.loadCfg():
nameTable[i[0]] = i[1][4:]
self.keyNames = False
else:
for i in self.loadCfg():
nameTable[i[0]] = i[0]
self.keyNames = True
self.ui.but_g1.setText(nameTable["G1"])
self.ui.but_g2.setText(nameTable["G2"])
self.ui.but_g3.setText(nameTable["G3"])
self.ui.but_g4.setText(nameTable["G4"])
self.ui.but_g5.setText(nameTable["G5"])
self.ui.but_g6.setText(nameTable["G6"])
self.ui.but_g7.setText(nameTable["G7"])
self.ui.but_g8.setText(nameTable["G8"])
self.ui.but_g9.setText(nameTable["G9"])
self.ui.but_g10.setText(nameTable["G10"])
self.ui.but_g11.setText(nameTable["G11"])
self.ui.but_g12.setText(nameTable["G12"])
self.ui.but_g13.setText(nameTable["G13"])
self.ui.but_g14.setText(nameTable["G14"])
self.ui.but_g15.setText(nameTable["G15"])
self.ui.but_g16.setText(nameTable["G16"])
self.ui.but_g17.setText(nameTable["G17"])
self.ui.but_g18.setText(nameTable["G18"])
self.ui.but_g19.setText(nameTable["G19"])
self.ui.but_g20.setText(nameTable["G20"])
self.ui.but_g21.setText(nameTable["G21"])
self.ui.but_g22.setText(nameTable["G22"])
self.ui.but_m1.setText(nameTable["M1"])
self.ui.but_m2.setText(nameTable["M2"])
self.ui.but_m3.setText(nameTable["M3"])
self.ui.but_mr.setText(nameTable["MR"])
self.ui.but_l1.setText(nameTable["L1"])
self.ui.but_l2.setText(nameTable["L2"])
self.ui.but_l3.setText(nameTable["L3"])
self.ui.but_l4.setText(nameTable["L4"])
self.ui.but_down.setText(nameTable["DOWN"])
self.ui.but_stickTop.setText(nameTable["TOP"])
nameTable["LEFT"] = createVerticalText(nameTable["LEFT"])
self.ui.but_left.setText(nameTable["LEFT"])
if nameTable["STICK_UP"] == "STICK_UP":
nameTable["STICK_UP"] = "UP"
self.ui.but_stickUp.setText(nameTable["STICK_UP"])
if nameTable["STICK_DOWN"] == "STICK_DOWN":
nameTable["STICK_DOWN"] = "DOWN"
self.ui.but_stickDown.setText(nameTable["STICK_DOWN"])
if nameTable["STICK_LEFT"] == "STICK_LEFT":
nameTable["STICK_LEFT"] = "L"
else:
nameTable["STICK_LEFT"] = createVerticalText(nameTable["STICK_LEFT"])
self.ui.but_stickLeft.setText(nameTable["STICK_LEFT"])
if nameTable["STICK_RIGHT"] == "STICK_RIGHT":
nameTable["STICK_RIGHT"] = "R"
else:
nameTable["STICK_RIGHT"] = createVerticalText(nameTable["STICK_RIGHT"])
self.ui.but_stickRight.setText(nameTable["STICK_RIGHT"])
def resetNamesButtonClicked(self):
""" Binds each key to None """
self.writeFile(True)
for key, binding in self.loadCfg():
self.writePipe('bind ' + key + ' None')
while self.ui.tv_bindings.topLevelItemCount() > 0:
self.ui.tv_bindings.takeTopLevelItem(0)
itemList = []
for i in self.loadCfg():
item = QtGui.QTreeWidgetItem(i)
itemList.append(item)
item.setFlags(QtCore.Qt.ItemIsEditable | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)
self.ui.tv_bindings.addTopLevelItems(itemList)
self.ui.but_g1.setText("")
self.ui.but_g2.setText("")
self.ui.but_g3.setText("")
self.ui.but_g4.setText("")
self.ui.but_g5.setText("")
self.ui.but_g6.setText("")
self.ui.but_g7.setText("")
self.ui.but_g8.setText("")
self.ui.but_g9.setText("")
self.ui.but_g10.setText("")
self.ui.but_g11.setText("")
self.ui.but_g12.setText("")
self.ui.but_g13.setText("")
self.ui.but_g14.setText("")
self.ui.but_g15.setText("")
self.ui.but_g16.setText("")
self.ui.but_g17.setText("")
self.ui.but_g18.setText("")
self.ui.but_g19.setText("")
self.ui.but_g20.setText("")
self.ui.but_g21.setText("")
self.ui.but_g22.setText("")
self.ui.but_m1.setText("")
self.ui.but_m2.setText("")
self.ui.but_m3.setText("")
self.ui.but_mr.setText("")
self.ui.but_l1.setText("")
self.ui.but_l2.setText("")
self.ui.but_l3.setText("")
self.ui.but_l4.setText("")
def checkProcess(self):
""" Determines whether g13d is running or not """
process = subprocess.Popen("ps -A | grep g13d", stdout=subprocess.PIPE, shell=True)
out, err = process.communicate()
if out != '':
self.ui.but_activate.setEnabled(False)
self.ui.lab_active.setText("Running ok")
self.ui.lab_active.setStyleSheet("QLabel { background-color : none; color : green; }");
else:
self.ui.but_activate.setEnabled(True)
self.ui.lab_active.setText("Not Started")
self.ui.lab_active.setStyleSheet("QLabel { background-color : none; color : red; }");
def writeFile(self, reset = False):
""" writes all items from list to file, overwriting old file """
objFile = open('/usr/local/bin/defaults.bind', 'w')
objFile.truncate()
root = self.ui.tv_bindings.invisibleRootItem()
childCount = root.childCount()
for c in range(childCount):
if root.child(c).text(1) == '[Press a key]' or reset:
kbKey = 'None'
else:
kbKey = root.child(c).text(1)
objFile.write('bind ' + root.child(c).text(0) + ' ' + kbKey + '\n')
objFile.close()
def writePipe(self, data):
""" writes data to pipe """
if not '[Press a key]' in data:
subprocess.Popen("echo " + str(data) + " > /tmp/g13-0", shell=True)
def itemEdited(self, item):
""" called as event when a row is manually changed """
self.writeFile()
self.writePipe('bind ' + item.text(0) + ' ' + item.text(1))
class Ui_Window(object):
def __init__(self, parent):
self.parent = parent
def setupUi(self, Window):
Window.setObjectName("Window")
Window.setWindowTitle("G13 Keys")
Window.resize(800, 620)
sizePolicy = PyQt5.QtWidgets.QSizePolicy(PyQt5.QtWidgets.QSizePolicy.Fixed, PyQt5.QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(Window.sizePolicy().hasHeightForWidth())
Window.setSizePolicy(sizePolicy)
Window.setMinimumSize(QtCore.QSize(800, 620))
Window.setMaximumSize(QtCore.QSize(800, 620))
self.but_g1 = PyQt5.QtWidgets.QPushButton(Window)
self.but_g1.setGeometry(QtCore.QRect(30, 90, 41, 41))
self.but_g1.setObjectName("but_g1")
self.but_g1.setText("G1")
self.but_g1.clicked.connect(lambda: self.parent.pushButtonClicked('G1', self.but_g1))
self.but_g2 = PyQt5.QtWidgets.QPushButton(Window)
self.but_g2.setGeometry(QtCore.QRect(80, 90, 41, 41))
self.but_g2.setObjectName("but_g2")
self.but_g2.setText("G2")
self.but_g2.clicked.connect(lambda: self.parent.pushButtonClicked('G2', self.but_g2))
self.but_g3 = PyQt5.QtWidgets.QPushButton(Window)
self.but_g3.setGeometry(QtCore.QRect(130, 90, 41, 41))
self.but_g3.setObjectName("but_g3")
self.but_g3.setText("G3")
self.but_g3.clicked.connect(lambda: self.parent.pushButtonClicked('G3', self.but_g3))
self.but_g4 = PyQt5.QtWidgets.QPushButton(Window)
self.but_g4.setGeometry(QtCore.QRect(180, 90, 41, 41))
self.but_g4.setObjectName("but_g4")
self.but_g4.setText("G4")
self.but_g4.clicked.connect(lambda: self.parent.pushButtonClicked('G4', self.but_g4))
self.but_g5 = PyQt5.QtWidgets.QPushButton(Window)
self.but_g5.setGeometry(QtCore.QRect(230, 90, 41, 41))
self.but_g5.setObjectName("but_g5")
self.but_g5.setText("G5")
self.but_g5.clicked.connect(lambda: self.parent.pushButtonClicked('G5', self.but_g5))
self.but_g6 = PyQt5.QtWidgets.QPushButton(Window)
self.but_g6.setGeometry(QtCore.QRect(280, 90, 41, 41))
self.but_g6.setObjectName("but_g6")
self.but_g6.setText("G6")
self.but_g6.clicked.connect(lambda: self.parent.pushButtonClicked('G6', self.but_g6))
self.but_g7 = PyQt5.QtWidgets.QPushButton(Window)
self.but_g7.setGeometry(QtCore.QRect(330, 90, 41, 41))
self.but_g7.setObjectName("but_g7")
self.but_g7.setText("G7")
self.but_g7.clicked.connect(lambda: self.parent.pushButtonClicked('G7', self.but_g7))
self.but_g8 = PyQt5.QtWidgets.QPushButton(Window)
self.but_g8.setGeometry(QtCore.QRect(30, 140, 41, 41))
self.but_g8.setObjectName("but_g8")
self.but_g8.setText("G8")
self.but_g8.clicked.connect(lambda: self.parent.pushButtonClicked('G8', self.but_g8))
self.but_g9 = PyQt5.QtWidgets.QPushButton(Window)
self.but_g9.setGeometry(QtCore.QRect(80, 140, 41, 41))
self.but_g9.setObjectName("but_g9")
self.but_g9.setText("G9")
self.but_g9.clicked.connect(lambda: self.parent.pushButtonClicked('G9', self.but_g9))
self.but_g10 = PyQt5.QtWidgets.QPushButton(Window)
self.but_g10.setGeometry(QtCore.QRect(130, 140, 41, 41))
self.but_g10.setObjectName("but_g10")
self.but_g10.setText("G10")
self.but_g10.clicked.connect(lambda: self.parent.pushButtonClicked('G10', self.but_g10))
self.but_g11 = PyQt5.QtWidgets.QPushButton(Window)
self.but_g11.setGeometry(QtCore.QRect(180, 140, 41, 41))
self.but_g11.setObjectName("but_g11")
self.but_g11.setText("G11")
self.but_g11.clicked.connect(lambda: self.parent.pushButtonClicked('G11', self.but_g11))
self.but_g12 = PyQt5.QtWidgets.QPushButton(Window)
self.but_g12.setGeometry(QtCore.QRect(230, 140, 41, 41))
self.but_g12.setObjectName("but_g12")
self.but_g12.setText("G12")
self.but_g12.clicked.connect(lambda: self.parent.pushButtonClicked('G12', self.but_g12))
self.but_g13 = PyQt5.QtWidgets.QPushButton(Window)
self.but_g13.setGeometry(QtCore.QRect(280, 140, 41, 41))
self.but_g13.setObjectName("but_g13")
self.but_g13.setText("G13")
self.but_g13.clicked.connect(lambda: self.parent.pushButtonClicked('G13', self.but_g13))
self.but_g14 = PyQt5.QtWidgets.QPushButton(Window)
self.but_g14.setGeometry(QtCore.QRect(330, 140, 41, 41))
self.but_g14.setObjectName("but_g14")
self.but_g14.setText("G14")
self.but_g14.clicked.connect(lambda: self.parent.pushButtonClicked('G14', self.but_g14))
self.but_g15 = PyQt5.QtWidgets.QPushButton(Window)
self.but_g15.setGeometry(QtCore.QRect(80, 190, 41, 41))
self.but_g15.setObjectName("but_g15")
self.but_g15.setText("G15")
self.but_g15.clicked.connect(lambda: self.parent.pushButtonClicked('G15', self.but_g15))
self.but_g16 = PyQt5.QtWidgets.QPushButton(Window)
self.but_g16.setGeometry(QtCore.QRect(130, 190, 41, 41))
self.but_g16.setObjectName("but_g16")
self.but_g16.setText("G16")
self.but_g16.clicked.connect(lambda: self.parent.pushButtonClicked('G16', self.but_g16))
self.but_g17 = PyQt5.QtWidgets.QPushButton(Window)
self.but_g17.setGeometry(QtCore.QRect(180, 190, 41, 41))
self.but_g17.setObjectName("but_g17")
self.but_g17.setText("G17")
self.but_g17.clicked.connect(lambda: self.parent.pushButtonClicked('G17', self.but_g17))
self.but_g18 = PyQt5.QtWidgets.QPushButton(Window)
self.but_g18.setGeometry(QtCore.QRect(230, 190, 41, 41))
self.but_g18.setObjectName("but_g18")
self.but_g18.setText("G18")
self.but_g18.clicked.connect(lambda: self.parent.pushButtonClicked('G18', self.but_g18))
self.but_g19 = PyQt5.QtWidgets.QPushButton(Window)
self.but_g19.setGeometry(QtCore.QRect(280, 190, 41, 41))
self.but_g19.setObjectName("but_g19")
self.but_g19.setText("G19")
self.but_g19.clicked.connect(lambda: self.parent.pushButtonClicked('G19', self.but_g19))
self.but_g20 = PyQt5.QtWidgets.QPushButton(Window)
self.but_g20.setGeometry(QtCore.QRect(130, 240, 41, 41))
self.but_g20.setObjectName("but_g20")
self.but_g20.setText("G20")
self.but_g20.clicked.connect(lambda: self.parent.pushButtonClicked('G20', self.but_g20))
self.but_g21 = PyQt5.QtWidgets.QPushButton(Window)
self.but_g21.setGeometry(QtCore.QRect(180, 240, 41, 41))
self.but_g21.setObjectName("but_g21")
self.but_g21.setText("G21")
self.but_g21.clicked.connect(lambda: self.parent.pushButtonClicked('G21', self.but_g21))
self.but_g22 = PyQt5.QtWidgets.QPushButton(Window)
self.but_g22.setGeometry(QtCore.QRect(230, 240, 41, 41))
self.but_g22.setObjectName("but_g22")
self.but_g22.setText("G22")
self.but_g22.clicked.connect(lambda: self.parent.pushButtonClicked('G22', self.but_g22))
self.but_m1 = PyQt5.QtWidgets.QPushButton(Window)
self.but_m1.setGeometry(QtCore.QRect(40, 50, 81, 21))
self.but_m1.setObjectName("but_m1")
self.but_m1.setText("M1")
self.but_m1.clicked.connect(lambda: self.parent.pushButtonClicked('M1', self.but_m1))
self.but_m2 = PyQt5.QtWidgets.QPushButton(Window)
self.but_m2.setGeometry(QtCore.QRect(120, 50, 81, 21))
self.but_m2.setObjectName("but_m2")
self.but_m2.setText("M2")
self.but_m2.clicked.connect(lambda: self.parent.pushButtonClicked('M2', self.but_m2))
self.but_m3 = PyQt5.QtWidgets.QPushButton(Window)
self.but_m3.setGeometry(QtCore.QRect(200, 50, 81, 21))
self.but_m3.setObjectName("but_m3")
self.but_m3.setText("M3")
self.but_m3.clicked.connect(lambda: self.parent.pushButtonClicked('M3', self.but_m3))
self.but_mr = PyQt5.QtWidgets.QPushButton(Window)
self.but_mr.setGeometry(QtCore.QRect(280, 50, 81, 21))
self.but_mr.setObjectName("but_mr")
self.but_mr.setText("MR")
self.but_mr.clicked.connect(lambda: self.parent.pushButtonClicked('MR', self.but_mr))
self.but_l1 = PyQt5.QtWidgets.QPushButton(Window)
self.but_l1.setGeometry(QtCore.QRect(80, 15, 60, 20))
self.but_l1.setObjectName("but_l1")
self.but_l1.setText("L1")
self.but_l1.clicked.connect(lambda: self.parent.pushButtonClicked('L1', self.but_l1))
self.but_l2 = PyQt5.QtWidgets.QPushButton(Window)
self.but_l2.setGeometry(QtCore.QRect(140, 15, 60, 20))
self.but_l2.setObjectName("but_l2")
self.but_l2.setText("L2")
self.but_l2.clicked.connect(lambda: self.parent.pushButtonClicked('L2', self.but_l2))
self.but_l3 = PyQt5.QtWidgets.QPushButton(Window)
self.but_l3.setGeometry(QtCore.QRect(200, 15, 60, 20))
self.but_l3.setObjectName("but_l3")
self.but_l3.setText("L3")
self.but_l3.clicked.connect(lambda: self.parent.pushButtonClicked('L3', self.but_l3))
self.but_l4 = PyQt5.QtWidgets.QPushButton(Window)
self.but_l4.setGeometry(QtCore.QRect(260, 15, 60, 20))
self.but_l4.setObjectName("but_l4")
self.but_l4.setText("L4")
self.but_l4.clicked.connect(lambda: self.parent.pushButtonClicked('L4', self.but_l4))
self.but_left = PyQt5.QtWidgets.QPushButton(Window)
self.but_left.setGeometry(QtCore.QRect(330, 250, 31, 81))
self.but_left.setObjectName("but_left")
self.but_left.setText("L\ne\nf\nt")
self.but_left.clicked.connect(lambda: self.parent.pushButtonClicked('LEFT', self.but_left))
self.but_down = PyQt5.QtWidgets.QPushButton(Window)
self.but_down.setGeometry(QtCore.QRect(360, 335, 81, 31))
self.but_down.setObjectName("but_down")
self.but_down.setText("Down")
self.but_down.clicked.connect(lambda: self.parent.pushButtonClicked('DOWN', self.but_down))
self.but_stickTop = PyQt5.QtWidgets.QPushButton(Window)
self.but_stickTop.setGeometry(QtCore.QRect(390, 256, 50, 50))
self.but_stickTop.setObjectName("but_stickTop")
self.but_stickTop.setText("Top")
# 'QTreeWidget' object has no attribute 'setItemSelected'
self.but_stickTop.clicked.connect(lambda: self.parent.pushButtonClicked('TOP', self.but_stickTop))
self.but_stickUp = PyQt5.QtWidgets.QPushButton(Window)
self.but_stickUp.setGeometry(QtCore.QRect(390, 236, 51, 16))
self.but_stickUp.setObjectName("but_stickUp")
self.but_stickUp.setText("Up")
self.but_stickUp.clicked.connect(lambda: self.parent.pushButtonClicked('STICK_UP', self.but_stickUp))
self.but_stickDown = PyQt5.QtWidgets.QPushButton(Window)
self.but_stickDown.setGeometry(QtCore.QRect(390, 310, 51, 16))
self.but_stickDown.setObjectName("but_stickDown")
self.but_stickDown.setText("Down")
self.but_stickDown.clicked.connect(lambda: self.parent.pushButtonClicked('STICK_DOWN', self.but_stickDown))
self.but_stickRight = PyQt5.QtWidgets.QPushButton(Window)
self.but_stickRight.setGeometry(QtCore.QRect(444, 256, 20, 51))
self.but_stickRight.setObjectName("but_stickRight")
self.but_stickRight.setText("R")
self.but_stickRight.clicked.connect(lambda: self.parent.pushButtonClicked('STICK_RIGHT', self.but_stickRight))
self.but_stickLeft = PyQt5.QtWidgets.QPushButton(Window)
self.but_stickLeft.setGeometry(QtCore.QRect(366, 256, 20, 51))
self.but_stickLeft.setObjectName("but_stickLeft")
self.but_stickLeft.setText("L")
self.but_stickLeft.clicked.connect(lambda: self.parent.pushButtonClicked('STICK_LEFT', self.but_stickLeft))
self.but_activate = PyQt5.QtWidgets.QPushButton(Window)
self.but_activate.setGeometry(QtCore.QRect(20, 550, 430, 50))
self.but_activate.setObjectName("but_activate")
self.but_activate.setText("Activate G13")
self.but_activate.clicked.connect(self.parent.activateButtonClicked)
self.but_toggleNames = PyQt5.QtWidgets.QPushButton(Window)
self.but_toggleNames.setGeometry(QtCore.QRect(20, 400, 150, 30))
self.but_toggleNames.setObjectName("but_changeNames")
self.but_toggleNames.setText("Toggle Button Names")
self.but_toggleNames.clicked.connect(self.parent.toggleNamesButtonClicked)
self.but_resetKeys = PyQt5.QtWidgets.QPushButton(Window)
self.but_resetKeys.setGeometry(QtCore.QRect(20, 440, 150, 30))
self.but_resetKeys.setObjectName("but_resetKeys")
self.but_resetKeys.setText("Reset All Keys")
self.but_resetKeys.clicked.connect(self.parent.resetNamesButtonClicked)
self.tv_bindings = PyQt5.QtWidgets.QTreeWidget(Window)
self.tv_bindings.setGeometry(QtCore.QRect(470, 20, 310, 580))
self.tv_bindings.setObjectName("tv_bindings")
header = PyQt5.QtWidgets.QTreeWidgetItem(["G13 Key","Keyboard Key"])
header.setTextAlignment(0, QtCore.Qt.AlignCenter )
header.setTextAlignment(1, QtCore.Qt.AlignCenter)
self.tv_bindings.setHeaderItem(header)
self.tv_bindings.setColumnWidth(0,145)
self.tv_bindings.setColumnWidth(1,145)
font = QtGui.QFont()
font.setPointSize(9)
self.tv_bindings.setFont(font)
self.tv_bindings.itemChanged.connect(self.parent.itemEdited)
self.lab_status = PyQt5.QtWidgets.QLabel(Window)
self.lab_status.setGeometry(QtCore.QRect(30, 500, 170, 31))
font = QtGui.QFont()
font.setPointSize(22)
self.lab_status.setFont(font)
self.lab_status.setObjectName("lab_status")
self.lab_status.setText("Activate G13")
self.lab_active = PyQt5.QtWidgets.QLabel(Window)
self.lab_active.setGeometry(QtCore.QRect(210, 500, 200, 31))
font = QtGui.QFont()
font.setPointSize(18)
self.lab_active.setFont(font)
self.lab_active.setObjectName("lab_active")
QtCore.QMetaObject.connectSlotsByName(Window)
# --- Main ----------------------------------------------------------------------------------------
# die if not root. MUST be. in order to gain write access to usb device, g13 hardware
if os.getuid() != 0:
sys.exit('\n Must be run with admin priviliges\n')
# die if not installed
if not os.path.exists('/usr/local/bin/g13d') or not os.path.exists('/usr/local/bin/defaults.bind'):
sys.exit('\n "g13d" and "defaults.bind" must be installed to "/usr/local/bin/" \n')
if __name__ == "__main__":
# app = QtGui.QApplication(sys.argv)
app = QtWidgets.QApplication(sys.argv)
g13if = StartQT5()
g13if.show()
sys.exit(app.exec_())
# --- TODO ----------------------------------------------------------------------------------------
# - Not all keys known (English keyboard)
# - use custom .bind-files? Load from cmd?
# - Cursor-Keys are identified as KP (Error in QT?)
|
999,252 | 66d1ad005cce45f47d7b6b556053de992ee5f094 | '''
Created on Apr 10, 2016
@author: Dell
'''
candies = []
valleys = []
def get_rating(i,N,arr):
if i < 0 or i > N-1:
return None
return arr[i]
def get_candies(arr):
for i,v in enumerate(range(0,len(arr)-1)):
if get_rating(i,len(arr),arr) <= get_rating(i-1,len(arr),arr) and get_rating(i,len(arr),arr) <= get_rating(i+1,len(arr),arr):
valleys.append(i)
candies = [0]*len(arr) # An array of N 0s
for valley_idx in valleys:
try:
candies[valley_idx] = 1
except IndexError:
candies= candies + [0]*len(candies)
candies[valley_idx] = 1
cur_idx = valley_idx-1
cur_candies = 2
while cur_idx >= 0 and arr[cur_idx] > arr[cur_idx+1]:
candies[cur_idx] = max(candies[cur_idx], cur_candies)
cur_idx -= 1
cur_candies += 1
cur_idx = valley_idx+1
cur_candies = 2
while cur_idx < len(arr) and arr[cur_idx] > arr[cur_idx-1]:
candies[cur_idx] = max(candies[cur_idx], cur_candies)
cur_idx += 1
cur_candies += 1
if __name__ == '__main__':
noc= int(input())
score_array = []
if noc <=(10**5) and noc >=1:
for score in range(noc):
score = int(input())
score_array.append(score)
print(valleys)
pass |
999,253 | 6fab300c290906ae2e1515d2f3663e1d1ee47bea | import sys
with open( sys.argv[1] ) as dataFile:
currentCase = 1
numCases = dataFile.readline()
for unsplitLine in dataFile:
A, B = [ int( i ) for i in unsplitLine.split() ]
count = 0
for n in range( A, B ):
m = {}
nAsList = [ digit for digit in str( n ) ]
for j in range( 1, len( nAsList ) ):
mAsStr = ''.join( list( nAsList[j:] ) + list( nAsList[:j] ) )
if mAsStr[0] != '0' and int( mAsStr ) > n and int( mAsStr ) <= B and mAsStr not in m:
count += 1
m[ mAsStr ] = 0
print "Case #%s: %s" % ( currentCase, count )
currentCase += 1
|
999,254 | 235de3d6ce526504d5af3aa302d54dedbc19bd3d | from async_event_bus.types import EventHandler, FilterFunction, EventHandlerFunction, Context, EventGenerator, Event
def define_handler(filter: FilterFunction, handler: EventHandlerFunction, context: Context) -> EventHandler:
def handle(event: Event) -> EventGenerator:
if filter(event):
return handler(event, context)
return handle |
999,255 | 82a569397bdf10ffb5908e17c257ecb5c042c4d2 | import unittest
from scrape import *
from unittest.mock import patch, Mock
class TestMS(unittest.TestCase):
def setUp(self):
self.target_url = 'https://www.metal-archives.com/browse/ajax-country/c/SE/json/1'
self.target_url += '?sEcho=1&iColumns=4&sColumns=&iDisplayStart=0&iDisplayLength=500'
self.target_url += '&mDataProp_0=0&mDataProp_1=1&mDataProp_2=2&mDataProp_3=3&iSortCol_0=0'
self.target_url += '&sSortDir_0=asc&iSortingCols=1&bSortable_0=true&bSortable_1=true'
self.target_url += '&bSortable_2=true&bSortable_3=false&_=1505682951191'
self.data = ["<a href='https://www.metal-archives.com/bands/%24ilverdollar/60323'>$ilverdollar</a>",
"Heavy/Power Metal", "Nyköping", '<span class="active">Active</span>']
self.soup_obj = "This is a soup object probably idk."
def test_current_target_url(self):
self.assertEqual(MetalScraper.current_target_url(1, 0), self.target_url)
def test_get_total_records(self):
self.assertGreater(MetalScraper.get_total_records(self.target_url), 0)
@patch('scrape.MetalScraper')
def test_json_data(self, Mock):
mock = Mock()
mock.get_json_data.return_value = self.data
self.assertEqual(MetalScraper.get_json_data(self.target_url)["aaData"][0], self.data)
def test_crawler(self):
self.assertIsNone(MetalScraper.crawler())
@patch('scrape.MetalScraper.get_band_attributes', return_value = True)
def test_get_band_attributes(self, attribute):
self.assertTrue(attribute(self.soup_obj))
@patch('scrape.MetalScraper.get_band_disco', return_value = True)
def test_get_discography(self, entry):
self.assertTrue(entry(self.soup_obj, 1))
@patch('scrape.MetalScraper.get_band_members', return_value = True)
def test_get_band_members(self, band):
self.assertTrue(band(self.soup_obj, 1))
if __name__ == '__main__':
unittest.main()
|
999,256 | 88450cc4830d1ee25fda18b65022b7140da40d27 | #Copyright (c) Ramzi Al Haddad 2002-2017
import pyautogui as pag
import time
time.sleep(5)
while True:
pag.typewrite(['e','t','a','i','n','o','s','h','r','d','l','u','c','m','f','w','y','g','p','b','v','k','q','j','x','z','e','t','a','i','n','o','s','h','r','d','l','u','c','m','f','w','y','g','p','b','v','k','q','j','x','z','e','t','a','i','n','o','s','h','r','d','l','u','c','m','f','w','y','g','p','b','v','k','q','j','x','z','e','t','a','i','n','o','s','h','r','d','l','u','c','m','f','w','y','g','p','b','v','k','q','j','x','z','e','t','a','i','n','o','s','h','r','d','l','u','c','m','f','w','y','g','p','b','v','k','q','j','x','z','e','t','a','i','n','o','s','h','r','d','l','u','c','m','f','w','y','g','p','b','v','k','q','j','x','z','e','t','a','i','n','o','s','h','r','d','l','u','c','m','f','w','y','g','p','b','v','k','q','j','x','z'],interval=0.0001)
|
999,257 | fd3dd5344539c9c98d8a12246c5d6fff825ce28d | from .. import db
from flask import current_app
from flask_login import AnonymousUserMixin, UserMixin
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from itsdangerous import BadSignature, SignatureExpired
class Bank(UserMixin, db.Model):
__tablename__ = 'bank'
id = db.Column(db.Integer, primary_key=True)
confirmed = db.Column(db.Boolean, default=False)
first_name = db.Column(db.String(64), index=True)
last_name = db.Column(db.String(64), index=True)
email = db.Column(db.String(64), unique=True, index=True)
password_hash = db.Column(db.String(128))
role_id = db.Column(db.Integer, db.ForeignKey('roles.id'))
def __init__(self, **kwargs):
super(User, self).__init__(**kwargs)
if self.role is None:
if self.email == current_app.config['ADMIN_EMAIL']:
self.role = Role.query.filter_by(
permissions=Permission.ADMINISTER).first()
if self.role is None:
self.role = Role.query.filter_by(default=True).first()
def full_name(self):
return '%s %s' % (self.first_name, self.last_name)
def can(self, permissions):
return self.role is not None and \
(self.role.permissions & permissions) == permissions
def is_admin(self):
return self.can(Permission.ADMINISTER)
@property
def password(self):
raise AttributeError('`password` is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
@staticmethod
def generate_fake(count=100, **kwargs):
"""Generate a number of fake users for testing."""
from sqlalchemy.exc import IntegrityError
from random import seed, choice
from faker import Faker
fake = Faker()
roles = Role.query.all()
seed()
for i in range(count):
u = User(
first_name=fake.first_name(),
last_name=fake.last_name(),
email=fake.email(),
password='password',
confirmed=True,
role=choice(roles),
**kwargs)
db.session.add(u)
try:
db.session.commit()
except IntegrityError:
db.session.rollback()
def __repr__(self):
return '<User \'%s\'>' % self.full_name() |
999,258 | 24fbff50d9793ddaece5da51f02e245858916570 | i=str(input())
if (i!=i[::-1]):
print(i)
else:
k=i[:-1]
print(k)
|
999,259 | 5837e5f8b30ad0fcba80309146a38d7d485d4116 | import time
from datetime import datetime
from time import ctime
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.select import Select
driver = webdriver.Chrome()
driver.maximize_window()
driver.get('http://oa.troila.com')
time.sleep(3)
driver.find_element_by_id("loginid").send_keys("000833")
time.sleep(1)
driver.find_element_by_id("userpassword").send_keys("1QAZ2wsx")
time.sleep(1)
driver.find_element_by_id("login").click()
time.sleep(3)
# 打开填写oa的页面
driver.get("http://oa.troila.com/workflow/request/AddRequest.jsp?workflowid=266&isagent=0&beagenter=0&f_weaver_belongto_userid=")
time.sleep(2)
# 切换到填写页面
driver.switch_to.frame('bodyiframe')
time.sleep(2)
# 下拉到底部
# body = driver.find_element_by_tag_name("body")
# body.send_keys(Keys.PAGE_DOWN)
# time.sleep(2)
js1 = "document.documentElement.scrollTop=10000"
driver.execute_script(js1)
time.sleep(1)
nowtime = datetime.now().isoweekday()
isweekly = [5,6]
if nowtime in isweekly:
# 次日是否到岗选择否
Select(driver.find_element_by_name("field12540")).select_by_value("1")
time.sleep(1)
# 不到岗原因选择公休日
Select(driver.find_element_by_name("field12541")).select_by_value("0")
else:
# 选择次日是否到岗,选择是
c = driver.find_element_by_name("field12540")
d = Select(c)
d.select_by_value("0")
time.sleep(2)
# 选择第一个温度输入框
driver.find_element_by_name("field12746_0").send_keys("36.2")
time.sleep(1)
# 选择第二个温度输入框
driver.find_element_by_name("field12746_1").send_keys("36.0")
time.sleep(1)
# 切换到主框架
driver.switch_to.default_content()
# 点击提交按钮
driver.find_element_by_class_name("e8_btn_top_first").click()
time.sleep(5)
# 关闭浏览器
driver.quit()
|
999,260 | feff450f054e22be9e61d3ce49dc70cdfe1afaed | from tkinter import *
from gui import gui
from sudoku import sudoku
def core():
g.pullinput()
s=sudoku(g.m)
if s.solve():
g.pushoutput(s)
if __name__=='__main__':
g=gui(core)
g.format()
mainloop()
|
999,261 | f3a4f92122d465e0640143b15dae78a35c82b933 | import matplotlib.pyplot as plt
import numpy as np
img = skimage.io.imread('tiger.png', as_gray=True)
h=img.shape[0]
w=img.shape[1]
matrix=np.ones((h,w))*100
img1 = img+matrix
for i in range (0,w):
for j in range(0,h):
if img1[j][i] > 255:
img1[j][i]=255
h,w=img.shape
img90=np.zeros((w,h))
imgr=np.zeros((h,w))
for j in range(0,h):
img90[:,h-j-1]=img[j,:]
for j in range(0,w):
imgr[:,j]=img1[:,w-j-1]
imgcut=img1[::10,::10]
plt.figure(1)
plt.imshow(imgcut, cmap='gray', vmin=0, vmax=255)
plt.show()
#:j
#[:,1] stupac
#[1,:] redak |
999,262 | 5b04e5d7a34b705c8f615e79dafa1cd70e63bcd7 | # Load in all required libraries
import os
import sys
import pandas as pd
import boto3
import botocore.exceptions
import json
import configparser
import time
# Set path to current directory
os.chdir(os.path.dirname(sys.argv[0]))
# Open and read the contents of the config file
ioc_config = configparser.ConfigParser()
ioc_config.read_file(open('./aws.cfg'))
# Load all the keys needed to create AWS services
KEY = ioc_config.get('AWS','KEY')
SECRET = ioc_config.get('AWS','SECRET')
DWH_REGION = ioc_config.get("REDSHIFT","REGION")
DWH_CLUSTER_TYPE = ioc_config.get("REDSHIFT","CLUSTER_TYPE")
DWH_NUM_NODES = ioc_config.get("REDSHIFT","NUM_NODES")
DWH_NODE_TYPE = ioc_config.get("REDSHIFT","NODE_TYPE")
DWH_CLUSTER_IDENTIFIER = ioc_config.get("REDSHIFT","CLUSTER_IDENTIFIER")
DWH_IAM_ROLE_NAME = ioc_config.get("REDSHIFT","IAM_ROLE_NAME")
DWH_DB = ioc_config.get("CLUSTER","DB_NAME")
DWH_DB_USER = ioc_config.get("CLUSTER","DB_USER")
DWH_DB_PASSWORD = ioc_config.get("CLUSTER","DB_PASSWORD")
DWH_PORT = ioc_config.get("CLUSTER","DB_PORT")
def create_client(name, func):
"""Creating resources/clients for all needed infrastructure: EC2, S3, IAM, Redshift
Keyword arguments:
name -- the name of the AWS service resource/client
func -- the boto3 function object (e.g. boto3.resource/boto3.client)
"""
print("Creating client for", name)
return func(name,
region_name=DWH_REGION,
aws_access_key_id=KEY,
aws_secret_access_key=SECRET)
def create_iam_role(iam):
"""Creating IAM role for Redshift, allowing it to use AWS services
Keyword arguments:
iam -- a boto3.client for IAM
"""
print("Creating a new IAM Role")
try:
resp = iam.create_role(Path='/',
RoleName=DWH_IAM_ROLE_NAME,
Description = "Allows Redshift clusters to call AWS services on your behalf.",
AssumeRolePolicyDocument=json.dumps({'Statement': [{'Action': 'sts:AssumeRole',
'Effect': 'Allow',
'Principal': {'Service': 'redshift.amazonaws.com'}}],
'Version': '2012-10-17'}
)
)
print("IAM Role created:")
print(resp)
except iam.exceptions.EntityAlreadyExistsException:
print("IAM Role already created")
except Exception as e:
print("Error creating IAM Role:", e)
def create_arn_role(iam):
"""Attaching policy to role, and return the ARN role
Keyword arguments:
iam -- a boto3.client for IAM
"""
print("Attaching policy to IAM role")
iam.attach_role_policy(RoleName=DWH_IAM_ROLE_NAME,
PolicyArn="arn:aws:iam::aws:policy/AmazonS3ReadOnlyAccess")['ResponseMetadata']['HTTPStatusCode']
roleArn = iam.get_role(RoleName=DWH_IAM_ROLE_NAME)['Role']['Arn']
#print("ARN role:", roleArn)
return roleArn
def create_redshift_cluster(redshift, roleArn):
"""Creates Redshift cluster (Warning, this costs money - make sure to use it or delete it again!)
Keyword arguments:
iam -- a boto3.client for Redshift
roleArn -- a role arn for reading from S3
"""
cluster = redshift.create_cluster(
#Hardware provisioned
ClusterType=DWH_CLUSTER_TYPE,
NodeType=DWH_NODE_TYPE,
NumberOfNodes=int(DWH_NUM_NODES),
#Identifiers & Credentials
DBName=DWH_DB,
ClusterIdentifier=DWH_CLUSTER_IDENTIFIER,
MasterUsername=DWH_DB_USER,
MasterUserPassword=DWH_DB_PASSWORD,
#Roles (for s3 access)
IamRoles=[roleArn]
)
print("Creating Redshift cluster with", DWH_NUM_NODES, "nodes, on", DWH_REGION)
return cluster
def query_redshift_status(redshift):
"""Query status of the cluster, returns cluster properties once cluster is available
Keyword arguments:
iam -- a boto3.client for Redshift
"""
def prettyRedshiftProps(props, limited = True):
#pd.set_option('display.max_colwidth', -1)
if limited:
keysToShow = ["ClusterStatus"]
else:
keysToShow = ["ClusterIdentifier", "NodeType", "ClusterStatus", "MasterUsername", "DBName", "Endpoint", "NumberOfNodes", 'VpcId']
x = [(k, v) for k,v in props.items() if k in keysToShow]
return pd.DataFrame(data=x, columns=["Key", "Value"])
# Print status, sleep if not available, try again
while True:
cluster_props = redshift.describe_clusters(ClusterIdentifier=DWH_CLUSTER_IDENTIFIER)['Clusters'][0]
df = prettyRedshiftProps(cluster_props, limited=True)
print(df.values)
if cluster_props['ClusterStatus'] == 'available':
break
time.sleep(60) # Sleep 60 seconds, and look again, untill cluster becomes available
# Print full details once cluster is available
df = prettyRedshiftProps(cluster_props, limited=False)
print(df)
# Return cluster properties
return cluster_props
def get_redshift_endpoint_info(redshift, cluster_props):
"""Get endpoint and ARN role for cluster
Keyword arguments:
iam -- a boto3.client for Redshift
cluster_props -- cluster properties for the created Redshift cluster
"""
redshift.describe_clusters(ClusterIdentifier=DWH_CLUSTER_IDENTIFIER)['Clusters'][0]
DWH_ENDPOINT = cluster_props['Endpoint']['Address']
DWH_ROLE_ARN = cluster_props['IamRoles'][0]['IamRoleArn']
#print("DWH_ENDPOINT:", DWH_ENDPOINT)
#print("DWH_ROLE_ARN:", DWH_ROLE_ARN)
return (DWH_ENDPOINT, DWH_ROLE_ARN)
def update_cluster_security_group(ec2, cluster_props):
"""Update cluster security group to allow access through redshift port
Keyword arguments:
iam -- a boto3.resource for EC2
cluster_props -- cluster properties for the created Redshift cluster
"""
vpc = ec2.Vpc(id=cluster_props['VpcId'])
# The first Security group should be the default one
defaultSg = list(vpc.security_groups.all())[0]
print("Default Security group:", defaultSg)
# Authorize access
try:
defaultSg.authorize_ingress(GroupName=defaultSg.group_name,
CidrIp='0.0.0.0/0',
IpProtocol='TCP',
FromPort=int(DWH_PORT),
ToPort=int(DWH_PORT)
)
print("Access authorized")
except botocore.exceptions.ClientError as e:
print("ClientError:", e)
except Exception as e:
print("Error:", e)
def test_connection():
"""Test connection to created Redshift cluster to validate"""
import psycopg2
dwh_config = configparser.ConfigParser()
dwh_config.read_file(open('./aws.cfg'))
try:
conn = psycopg2.connect("host={} dbname={} user={} password={} port={}".format(*dwh_config['CLUSTER'].values()))
_ = conn.cursor()
print('Connected to AWS Redshift cluster')
conn.close()
except Exception as e:
print('Error connecting to AWS Redshift cluster:', e)
def main():
"""Standing up a Redshift cluster and saves connection information to redshift.cfg"""
# Creating resources/clients for all needed infrastructure: EC2, IAM, Redshift
ec2 = create_client('ec2', boto3.resource)
iam = create_client('iam', boto3.client)
redshift = create_client('redshift', boto3.client)
# Create needed IAM / ARN roles for Redshift
create_iam_role(iam)
arn_role = create_arn_role(iam)
# Create cluster and await its completion
create_redshift_cluster(redshift, arn_role)
cluster_props = query_redshift_status(redshift)
# Get endpoint into to allow querying
info = get_redshift_endpoint_info(redshift, cluster_props)
print(info)
# TODO: Save info to aws.cfg
# Update security groups to ACTUALLY allow querying
update_cluster_security_group(ec2, cluster_props)
# Test connection to see that everything (hopefully) went well
test_connection()
# End of main
return
if __name__ == "__main__":
main()
|
999,263 | c0742cbff221a2a82ffdd9adda0f84f68c074a6a | import unittest
import json
import tests.webapp.test_client
class TestReadyForBattle(unittest.TestCase):
def setUp(self):
self.app = tests.webapp.test_client.build()
def test_ready_for_battle_when_no_ships_deployed(self):
self._do_attack_request(self._do_auth_request())
auth_token = self._do_auth_request()
self._do_attack_request(auth_token)
response = self._do_ready_request(auth_token)
self.assertEqual(400, response.status_code)
response = self._do_get_curret_battle_request(auth_token)
response_body = json.loads(response.get_data().decode('utf-8'))
self.assertEqual(2, response_body['state'])
self.assertFalse(response_body['my_battlefield']['ready_for_battle'])
def test_ready_for_battle_when_all_ships_deployed(self):
self._do_attack_request(self._do_auth_request())
auth_token = self._do_auth_request()
self._do_attack_request(auth_token)
body = {
'ships': [
{'id': 'id:0', 'x': 3, 'y': 4, 'orientation': 1},
{'id': 'id:1', 'x': 5, 'y': 7, 'orientation': 2}
]
}
self._deploy_request(auth_token, body)
response = self._do_ready_request(auth_token)
self.assertEqual(204, response.status_code)
response = self._do_get_curret_battle_request(auth_token)
response_body = json.loads(response.get_data().decode('utf-8'))
self.assertEqual(2, response_body['state'])
self.assertTrue(response_body['my_battlefield']['ready_for_battle'])
def test_change_state_when_both_players_ready(self):
body = {
'ships': [
{'id': 'id:0', 'x': 3, 'y': 4, 'orientation': 1},
{'id': 'id:1', 'x': 5, 'y': 7, 'orientation': 2}
]
}
auth_token1 = self._do_auth_request()
self._do_attack_request(auth_token1)
auth_token = self._do_auth_request()
self._do_attack_request(auth_token)
self._deploy_request(auth_token, body)
self._do_ready_request(auth_token)
self._deploy_request(auth_token1, body)
self._do_ready_request(auth_token1)
response = self._do_get_curret_battle_request(auth_token)
response_body = json.loads(response.get_data().decode('utf-8'))
self.assertEqual(3, response_body['state'])
def _do_auth_request(self):
response = self.app.post('/api/v1/account')
return response.headers['X-AuthToken']
def _do_attack_request(self, auth_token):
return self.app.post(
'/api/v1/battle',
headers={'X-AuthToken': auth_token}
)
def _do_ready_request(self, auth_token):
return self.app.put(
'/api/v1/battle/ready',
headers={'X-AuthToken': auth_token}
)
def _deploy_request(self, auth_token, body):
return self.app.put(
'/api/v1/battle/my-battlefield',
headers={'X-AuthToken': auth_token},
data=json.dumps(body)
)
def _do_get_curret_battle_request(self, auth_token):
return self.app.get(
'/api/v1/battles',
headers={'X-AuthToken': auth_token}
)
|
999,264 | de52ef7dbeaddc84d5003c0bbffedac35c570516 | # -*- coding: utf-8 -*-
"""PythonProblem10.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/15hLFFwmlsAPEApEA62wQtewfBCO1YTrt
Problem 10: Print characters at even index in string
Solution:
"""
#Assigning string a value
String = "Welcome to the future"
#Appending characters at even index to an empty string
evenstr = ""
for i in range(0,len(String)):
if i%2==0:
evenstr = evenstr + String[i]
evenstr
|
999,265 | d76adff37ff35fc4a3d23ae5073f30ee0ec3b6e8 | #!/opt/Python/2.7.3/bin/python
import sys
from collections import defaultdict
import numpy as np
import re
import os
import argparse
import glob
import time
from Bio import SeqIO
def usage():
test="name"
message='''
python RunInsertSize.py --input /rhome/cjinfeng/BigData/00.RD/RILs/QTL_pipe/input/fastq/RILs_ALL_bam/
Get insert size for bam in directory of input.
--input: directory of bam files, GN160.bam
--project: output directory
'''
print message
def runjob(script, lines):
#cmd = 'perl /rhome/cjinfeng/BigData/software/bin/qsub-pbs.pl --maxjob 30 --lines %s --interval 120 --resource walltime=100:00:00,mem=5G --convert no %s' %(lines, script)
cmd = 'perl /rhome/cjinfeng/BigData/software/bin/qsub-slurm.pl --maxjob 5 --lines %s --interval 120 --task 1 --mem 20G --time 100:00:00 --convert no %s' %(lines, script)
#print cmd
os.system(cmd)
#readgroup:RIL1_0_CGTACG_FC153L5 platform:Illumina map:../input/bam/GN1.bam readlen:100.18 lib:RIL1_0_CGTACG_FC153L5 num:10001 lower:76.38 upper:303.56 mean:141.39 std:25.74 SWnormality:-71.20 exe:samtools view
def getsize(infile):
size = []
with open (infile, 'r') as filehd:
for line in filehd:
line = line.rstrip()
if len(line) > 2:
unit = re.split(r'\t',line)
size = [unit[8], unit[9], unit[6], unit[7]]
for i in range(len(size)):
size[i] = re.sub(r'.*:', '', size[i])
return size
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input')
parser.add_argument('-p', '--project')
parser.add_argument('-v', dest='verbose', action='store_true')
args = parser.parse_args()
try:
len(args.input) > 0
except:
usage()
sys.exit(2)
if not args.project:
args.project = 'Rice_pop'
project = os.path.abspath(args.project)
shell = open ('%s.InsertSize.sh' %(project), 'w')
#print project
bams = glob.glob('%s/*.bam' %(args.input))
count = 0
for bam in sorted(bams):
#print bam
prefix = os.path.split(os.path.splitext(bam)[0])[1]
cfg = '%s/%s.config' %(args.input, prefix)
if not os.path.exists(cfg) or os.path.getsize(cfg) == 0:
#print bam
bam2cfg ='perl /rhome/cjinfeng/BigData/software/SVcaller/breakdancer-1.1.2/bin/bam2cfg.pl %s > %s' %(bam, cfg)
print >> shell, bam2cfg
count += 1
#print 'done'
shell.close()
if count > 0:
#print 'Run Job'
runjob('%s.InsertSize.sh' %(project), 1)
else:
print 'No Job'
#os.system('rm -R *.InsertSize.sh*')
ofile = open ('%s.InsertSize.list' %(project), 'w')
for bam in sorted(bams):
prefix = os.path.split(os.path.splitext(bam)[0])[1]
cfg = '%s/%s.config' %(args.input, prefix)
size = getsize(cfg)
#size, std, lower, upper
print >> ofile, '%s\t%s\t%s\t%s\t%s' %(bam, size[0], size[1], size[2], size[3])
ofile.close()
if __name__ == '__main__':
main()
|
999,266 | 5265036dbe1cd26ac54328fbbed2a24ecd562523 | #!/usr/bin/env python
# coding:utf-8
import subprocess
a = subprocess.Popen('ls',shell=True,stdout=subprocess.PIPE)
print(a)
print(a.stdout.read().decode("utf8")) |
999,267 | b3851ea8f75769cf917e6ce7f66199304f038e96 | from django.db.models import Avg
from django.http import Http404
from django.shortcuts import render, get_object_or_404
from .models import Book
# Create your views here.
def index(request):
# we can sort the data using order_by() function | add - for descending order
# books = Book.objects.all()
books = Book.objects.all().order_by("-id")
# aggregation functions
total_number_of_book = books.count()
average_rating = books.aggregate(Avg("rating")) # here rating is taken form model
return render(request, "book_outlet/index.html", {
"books": books,
"total_number_of_book": total_number_of_book,
"average_rating": average_rating
})
def book_details(request, slug):
# Method 1
# try:
# book = Book.objects.get(pk=id)
# except:
# raise Http404()
# Method 2
book = get_object_or_404(Book, slug=slug)
return render(request, "book_outlet/book_details.html/", {
"title": book.title,
"author": book.author,
"rating": book.rating,
"is_bestselling": book.is_bestselling
})
|
999,268 | 27701991639a3d2aeeb7689fba6321c07836db99 | #-------------------------------------------------------------------------------
# Name: pygametools.py
# Purpose: This module will hold different tools that I have made to improve
# the rate of development of pygame programs
#
# Author: James
#
# Created: 18/06/2014
# Copyright: (c) James 2014
# Licence: <your licence>
#-------------------------------------------------------------------------------
#!/usr/bin/env python
import pygame, sys
class Button(pygame.sprite.Sprite):
def __init__(self, type_of_button, fileortext, position, midpoint = False, resize = False,fontsize = 36,surface = None):
"""This class will help make quick buttons for use with pygame.
If 0 is passed into type of button a text button will be made and if a
1 is passed a picture button will be made. The fileortext variable will
hold the file name for a picture button or the text to be displayed for
a text button. The position variable is the (x,y) location of the button.
If midpoint = True the (x,y) position is the midpoint position rather than
the top left pixel"""
pygame.sprite.Sprite.__init__(self)
pygame.font.init()
basicfont = pygame.font.Font(None,fontsize)
#Create a text button
if type_of_button == 0:
# Create the text surface and find the size and midpoint of that surface
self.text = basicfont.render(fileortext,0,(1,1,1))
self.textsize = self.text.get_size()
self.textmidp = (int(self.textsize[0] * 0.5),int(self.textsize[1] * 0.5))
# Create the background box
self.image = pygame.Surface((int(self.textsize[0] * 1.25),int(self.textsize[1] * 1.429)))
self.imagesize = self.image.get_size()
self.imagemidp = (int(self.imagesize[0] * 0.5),int(self.imagesize[1] * 0.5))
self.image.fill((67,110,238))
# Center the text at the center of the box
self.image.blit(self.text,(self.imagemidp[0]-self.textmidp[0],self.imagemidp[1]-self.textmidp[1]))
# Create a picture button
elif type_of_button == 1:
self.image = pygame.image.load(fileortext)
# Change the size of the picture if necessary
if resize:
self.image = pygame.transform.scale(self.image,resize)
self.imagemidp = (int(self.image.get_width() * 0.5), int(self.image.get_height() * 0.5))
# if a midpoint arguement is passed set the pos to the top left pixel
# such that the position passed in is in the middle of the button
if midpoint:
self.pos = (position[0] - self.imagemidp[0], position[1] - self.imagemidp[1])
else:
self.pos = position
# set the rectangle to be used for collision detection
self.rect = pygame.Rect(self.pos,self.image.get_size())
# Set up the information that is needed to blit the image to the surface
self.blitinfo = (self.image, self.pos)
# automatically blit the button onto an input surface
if surface:
surface.blit(*self.blitinfo)
class Linesoftext(object):
def __init__(self,text,position,xmid = False,fontsize = 36,backgroundcolor = (200,200,200),surface = None):
"""This object will create an image of text that is passed in as a list
of strings. It will put a new line for each element in the list. Use its
image attribute to put this text on your screen"""
pygame.font.init()
basicfont = pygame.font.Font(None,fontsize)
# Figure out the size of the image that will be drawn on and create that
# image
self.linewidths = []
for x in text:
self.texttemp = basicfont.render(x,0,(1,1,1))
self.linewidths.append(self.texttemp.get_width())
self.imagewidth = basicfont.render(text[self.linewidths.index(max(self.linewidths))],0,(1,1,1)).get_width()
self.imageheight = len(text) * fontsize + (len(text)-1) * 10
self.image = pygame.Surface((self.imagewidth,self.imageheight))
self.image.fill(backgroundcolor)
# Draw the text to the image
n = 0
for x in text:
self.texttemp = basicfont.render(x,0,(1,1,1))
self.image.blit(self.texttemp,(0,n * fontsize + n * 10))
n +=1
# Set the position of the text. If xmid is passed in as true set the
# pos to the top middle pixel of the text
if xmid:
self.pos = (position[0] - int(self.image.get_width() / 2),position[1])
else:
self.pos = position
# Set up the information that will be needed to blit the image to a
# surface
self.blitinfo = (self.image, self.pos)
# automatically blit the text onto an input surface
if surface:
surface.blit(*self.blitinfo)
def test(self,windowsize = False):
"""This can be used to quickly test the spacing of the words. If you want
to test how the text looks with a specific window you can pass in a
(width,height) into windowsize"""
# set up a specific window to test the text in
if windowsize:
self.screen = pygame.display.set_mode(windowsize)
self.screen.fill((200,200,200))
self.screen.blit(*self.blitinfo)
# if no specific window is specified create a small one around the
# outside of the text
else:
self.screen = pygame.display.set_mode((self.imagewidth + 20,self.imageheight + 20))
self.screen.fill((200,200,200))
self.screen.blit(self.image, (10,10))
pygame.display.flip()
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
|
999,269 | fef955b0820211efda48f757e91e888b2cbdd127 | import email
import imaplib
import re
import time
from .base_page import BasePage
from .locators import MainPageLocators
class MainPage(BasePage):
def should_be_login_in_link(self):
self.is_link_correct("login")
def should_be_auth_in_link(self):
self.is_link_correct("auth")
def fill_first_name_in_sign_up_form(self):
self.browser.find_element(*MainPageLocators.FIRST_NAME_FIELD).send_keys("user_name")
def fill_last_name_in_sign_up_form(self):
self.browser.find_element(*MainPageLocators.LAST_NAME_FIELD).send_keys("user_last_name")
def fill_email_in_form(self, login):
self.browser.find_element(*MainPageLocators.EMAIL_FIELD).send_keys(login)
def fill_password_in_form(self, password):
self.browser.find_element(*MainPageLocators.PASSWORD_FIELD).send_keys(password)
def choose_pp_checkbox_in_sign_up_form(self):
self.browser.find_element(*MainPageLocators.PP_CHECKBOX).click()
def click_sign_up_button(self):
self.click_if_element_present(*MainPageLocators.SIGN_UP_BTN)
def go_to_login_form(self):
self.browser.find_element(*MainPageLocators.LOGIN_FORM_BTN).click()
def click_login_button(self):
self.click_if_element_present(*MainPageLocators.LOGIN_BTN)
def get_code(self):
time.sleep(60)
mail = imaplib.IMAP4_SSL('imap.gmail.com')
mail.login('vasyadasha328@gmail.com', 'Pp123456')
mail.list()
mail.select("inbox")
result, data = mail.search(None, "ALL")
ids = data[0]
id_list = ids.split()
latest_email_id = id_list[-1]
result, data = mail.fetch(latest_email_id, "(RFC822)")
raw_email = data[0][1].decode('utf-8')
email_message = email.message_from_string(raw_email)
for payload in email_message.get_payload():
body = payload.get_payload(decode=True).decode('utf-8')
code = re.findall(
r'Вы можете использовать приведенный ниже код подтверждения, чтобы завершить регистрацию. '
r'Просто скопируйте и вставьте его на страницу завершения регистрации.\s*(\d{2}-\d{2})', body)
return code
|
999,270 | 43eab73794d4f278a6397907118c00ef973832c9 | from django.db import models
class ExerciseEvent(models.Model):
woType = models.CharField(max_length=100) #Stores in the name of a workout type from the program
startTime = models.DateTimeField() #Time functions are stored in from Python because they're cleaner
endTime = models.DateTimeField() #Same as above
elapsedTime = models.IntegerField(default=0) #The time it took to finish
location = models.CharField(max_length=100) #City/Town name goes here
calories = models.IntegerField(default=0) #Maybe
#Link it to a user
def duration(self):
self.elapsedTime=int((self.endTime-self.startTime).total_seconds())
def __unicode__(self):
return "A " + self.woType + " in " + self.location + " on " + str(self.startTime.date()) + " at " + str(self.startTime.time())
class Goal(models.Model):
event = models.ForeignKey(ExerciseEvent) #All goals need Events, but not all events need goals
gType = models.CharField(max_length=20) #Stored in from the site (Types: Distance, Calories, Time)
gValue = models.IntegerField(default=0) #What do you want your: Distance, Calories, Time to be
gActual = models.IntegerField(default=0) #The value at the end of the workout
gSuccess = models.BooleanField(default=0) #boolean. T if you met your goal |
999,271 | d8cd3133ff8fe874bb4ff19cd1106fbcaaa25acf | class Time(object):
def __init__(self, hours, minutes, seconds):
self.hours = hours
self.minutes = minutes
self.seconds = seconds
def __str__(self):
return str(self.hours) + ':' + str(self.minutes) + ':' + str(self.seconds)
time = Time(11, 59, 30)
# time.hours = 11
# time.minutes = 59
# time.seconds = 30
print time
print isinstance(time, Time)
def add_time(t1, t2):
hours = t1.hours + t2.hours
minutes = t1.minutes + t2.minutes
seconds = t1.seconds + t2.seconds
sum = Time(hours, minutes, seconds)
return sum
current_time = Time(9, 14, 30)
# current_time.hours = 9
# current_time.minutes = 14
# current_time.seconds = 30
bread_time = Time(3, 35, 0)
# bread_time.hours = 3
# bread_time.minutes = 35
# bread_time.seconds = 0
done_time = add_time(current_time, bread_time)
print done_time
#
|
999,272 | 5cd10ff860d8b9d1094d308dfe68e8e2711d7142 | """ Hardcore - The "screen" series - Aligning """
def main():
""" Draw screen """
width = int(input())
height = int(input())
line = int(input())
align = input()
text = input()
top = line - 1
bottom = height - top - 3
space = width - 2
if len(text) > space:
print("!!!ERROR!!!")
elif width < 3 or height < 3:
print("!!!ERROR!!!")
elif line > (height - 2):
print("!!!ERROR!!!")
elif line <= 0:
print("!!!ERROR!!!")
else:
if align == "center":
blank = int(((space - len(text)) / 2))
if (space - len(text)) % 2 == 0:
string = (" " * blank) + text + (" " * (blank)) + "|"
else:
string = (" " * blank) + text + (" " * (blank + 1)) + "|"
print("-" * width)
print(("|" + " " * (width - 2) + "|\n") * top, end='')
print("|" + string)
print(("|" + " " * (width - 2) + "|\n") * bottom, end='')
print("-" * width)
elif align == "left":
blank = space - len(text)
string = text + (" " * (blank)) + "|"
print("-" * width)
print(("|" + " " * (width - 2) + "|\n") * top, end='')
print("|" + string)
print(("|" + " " * (width - 2) + "|\n") * bottom, end='')
print("-" * width)
elif align == "right":
blank = space - len(text)
string = (" " * (blank)) + text + "|"
print("-" * width)
print(("|" + " " * (width - 2) + "|\n") * top, end='')
print("|" + string)
print(("|" + " " * (width - 2) + "|\n") * bottom, end='')
print("-" * width)
else:
print("!!!ERROR!!!")
main()
|
999,273 | ac15abfcf8f8aa1cd77acca971cbad059a297c2f | # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class RQuickplot(RPackage):
"""A System of Plotting Optimized for Speed and Modularity.
A high-level plotting system, built using 'grid' graphics, that is
optimized for speed and modularity. This has great utility for quick
visualizations when testing code, with the key benefit that visualizations
are updated independently of one another."""
cran = "quickPlot"
maintainers = ["dorton21"]
version("0.1.6", sha256="48690a77ae961ed1032130621ef06b2eaf86ee592bf1057471a8c6d6a98ace55")
depends_on("r@3.3.0:", type=("build", "run"))
depends_on("r-backports", type=("build", "run"))
depends_on("r-data-table@1.10.4:", type=("build", "run"))
depends_on("r-fpcompare", type=("build", "run"))
depends_on("r-ggplot2", type=("build", "run"))
depends_on("r-gridbase", type=("build", "run"))
depends_on("r-igraph", type=("build", "run"))
depends_on("r-raster", type=("build", "run"))
depends_on("r-rcolorbrewer", type=("build", "run"))
depends_on("r-rgdal", type=("build", "run"))
depends_on("r-rgeos", type=("build", "run"))
depends_on("r-sp", type=("build", "run"))
|
999,274 | 1b9a1b7e7d47dbd26053f661af1f8635833c0a07 | import astropy.io.votable
import vaex
from vaex.dataset import DatasetFile
from vaex.dataset_misc import _try_unit
class VOTable(DatasetFile):
snake_name = "votable"
def __init__(self, filename, fs_options={}, fs=None):
super().__init__(filename)
self.ucds = {}
self.units = {}
self.filename = filename
self.path = filename
with vaex.file.open(filename, fs_options=fs_options, fs=fs) as f:
votable = astropy.io.votable.parse(f)
self.first_table = votable.get_first_table()
self.description = self.first_table.description
for field in self.first_table.fields:
name = field.name
data = self.first_table.array[name]
type = self.first_table.array[name].dtype
clean_name = name
if field.ucd:
self.ucds[clean_name] = field.ucd
if field.unit:
unit = _try_unit(field.unit)
if unit:
self.units[clean_name] = unit
if field.description:
self.descriptions[clean_name] = field.description
if type.kind in "fiubSU": # only store float and int and boolean
self.add_column(clean_name, data) #self.first_table.array[name].data)
if type.kind == "O":
print("column %r is of unsupported object type , will try to convert it to string" % (name,))
try:
data = data.astype("S")
self.add_column(name, data)
except Exception as e:
print("Giving up column %s, error: %r" %(name, e))
#if type.kind in ["S"]:
# self.add_column(clean_name, self.first_table.array[name].data)
self._freeze()
@classmethod
def can_open(cls, path, *args, **kwargs):
can_open = vaex.file.stringyfy(path).endswith(".vot")
return can_open
def close(self):
pass
|
999,275 | c9c127c4a336dd76dd166da3c8627b51ecf1bd69 | #!/usr/bin/env python2
from nvr.nvr import main
main()
|
999,276 | bf6fb66c261b5fdefe4fb1a7ae464219bf5f2422 | usia = [19, 20, 21, 18, 17, 19, 19, 18, 19, 18]
jumlah_data = len(usia)
total_usia = 0
for i in range(0, len(usia)):
total_usia = total_usia + usia[i]
rata_rata_usia = total_usia / float(jumlah_data) |
999,277 | d53c539a42e6da9227bc178055f925e23ac214c0 | # %%
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
from datetime import datetime
# %%
from pandas_datareader import data
from pandas_datareader._utils import RemoteDataError
# %%
#zona temporal para extraer datos
START_DATE = '2019-01-01'
#END_DATE = str(datetime.now().strftime('%y-%m-%d'))
END_DATE = '2019-12-31'
AMAZON = 'AMZN'#nombre de la empresa la cual queremos saber los datos
# %%
def get_data(ticker):#elegir desde cual api deseamos sacar los datos
try:
stock_data = data.DataReader(ticker,'yahoo',START_DATE ,END_DATE)
except RemoteDataError:
print('no hay datos para {t}'.format(t=ticker))
return stock_data
# %%
stock=get_data(AMAZON)#dataframe con los datos de amazon del 2019
print(stock.shape)
stock.describe()#estadisticas basicas del dataframe de amazon
# %%
stock.head()#5 primeras tuplas
# %%
dias = pd.date_range(start=START_DATE, end=END_DATE)
stock = stock.reindex(dias)
stock.head(10)
# %%
stock=stock.interpolate(method='time')#rellena los nulos con lo calculado en la funcion interpolate
stock.head(10)#escogimos la funcion time ya que trabajamos con datos de tipo fecha
#NOTA: la funcion interpolate trabaja con los valores anteriores y posteriores por eso no puede interpolar la primera tupla
# %%
stock.isnull().sum()
# %%
stock=stock.fillna(method='bfill')#rellena hacia atras se moveran hacia arriba
# %%
"""
## calculando la media movil
"""
# %%
rolling_data_20=stock.Close.rolling(window=20,min_periods=1).mean()#calculamos la media movil de este dataset
rolling_data_10=stock.Close.rolling(window=10,min_periods=1).mean()#inicialmente para el precio de cierre, el comando rolling ejecuta una operacion en este caso la media
plt.plot(rolling_data_20)
plt.plot(rolling_data_10)
plt.plot(stock.Close)
plt.legend(['media de 20 dias','media de 10 dias','price'])#etiquetas del panel superior derecho
plt.show()
# %%
|
999,278 | 2f80fae62a66eb0c5109909be6b60f716b61bd82 | # __author__ = 'eslam'
def solveMeFirst(a,b):
# Hint: Type return a+b below
return a+b
num1 = input()
num2 = input()
res = solveMeFirst(num1,num2)
print(res)
|
999,279 | 20b5da842881e07ef7f2cb50056bdbb0cdacdfef | import numpy as np
import pytest
from baloo import Index, Series, RangeIndex, DataFrame
from .indexes.utils import assert_indexes_equal
from .test_frame import assert_dataframe_equal
from .test_series import assert_series_equal
class TestEmptyDataFrame(object):
def test_aggregation_empty(self, df_empty):
assert_series_equal(df_empty.min(), Series(np.empty(0)))
@pytest.mark.parametrize('op,expected', [
('df < 2', 'df'),
('df[2:]', 'df'),
('df * 2', 'df'),
('df.head()', 'df'),
('df.tail()', 'df'),
('df.sort_index()', 'df'),
('df.reset_index()', 'DataFrame({"index": np.empty(0, dtype=np.int64)}, RangeIndex(0, 0, 1))')
])
def test_empty_ops(self, df_empty, op, expected):
df = df_empty
assert_dataframe_equal(eval(op), eval(expected))
@pytest.mark.parametrize('op,exception', [
('df.agg(["mean", "var"])', ValueError),
('df["a"]', KeyError),
('df[["a", "b"]]', KeyError),
('df.drop("a")', KeyError),
('df.drop(["a", "b"])', KeyError)
])
def test_empty_exceptions(self, df_empty, op, exception):
df = df_empty
with pytest.raises(exception):
eval(op)
def test_keys_empty(self, df_empty):
assert_indexes_equal(df_empty.keys(), Index(np.empty(0, dtype=np.dtype('S'))))
def test_empty_series_init():
assert_series_equal(Series(), Series(np.empty(0), RangeIndex(0, 0, 1), np.dtype(np.float64)))
def test_empty_dataframe_init():
assert_dataframe_equal(DataFrame(), DataFrame({}, Index(np.empty(0, dtype=np.int64))))
|
999,280 | b931c914296ffed3a93513c985ea281edccf0047 | from enum import Enum
from enum import auto
__all__ = (
'Side',
'WebSocketType',
'MessageFeedType',
'TradeEvent',
'AccountEvent',
'OrderStatusType',
'OrderType',
'StopLimitType',
'TimeInForce',
'BatchOrderType',
'TransactionType',
'CurrencyPair',
)
class NameStrEnum(Enum):
def __str__(self):
return str(self.name)
class Side(NameStrEnum):
SELL = auto()
BUY = auto()
class WebSocketType(Enum):
ACCOUNT = '/ws/account'
TRADE = '/ws/trade'
class MessageFeedType(NameStrEnum):
SUBSCRIBE = auto()
SUBSCRIBED = auto()
AUTHENTICATED = auto()
UNSUPPORTED = auto()
PING = auto()
PONG = auto()
class TradeEvent(NameStrEnum):
AGGREGATED_ORDERBOOK_UPDATE = auto()
MARKET_SUMMARY_UPDATE = auto()
NEW_TRADE_BUCKET = auto()
NEW_TRADE = auto()
class AccountEvent(NameStrEnum):
NEW_ACCOUNT_HISTORY_RECORD = auto()
BALANCE_UPDATE = auto()
NEW_ACCOUNT_TRADE = auto()
INSTANT_ORDER_COMPLETED = auto()
OPEN_ORDERS_UPDATE = auto()
ORDER_PROCESSED = auto()
ORDER_STATUS_UPDATE = auto()
FAILED_CANCEL_ORDER = auto()
NEW_PENDING_RECEIVE = auto()
SEND_STATUS_UPDATE = auto()
class OrderStatusType(NameStrEnum):
PLACED = auto()
FAILED = auto()
CANCELLED = auto()
FILLED = auto()
PARTIALLY_FILLED = auto()
INSTANT_ORDER_BALANCE_RESERVE_FAILED = auto()
INSTANT_ORDER_BALANCE_RESERVED = auto()
INSTANT_ORDER_COMPLETED = auto()
class OrderType(NameStrEnum):
LIMIT_POST_ONLY = auto()
MARKET = auto()
LIMIT = auto()
SIMPLE = auto()
STOP_LOSS_LIMIT = auto()
TAKE_PROFIT_LIMIT = auto()
class StopLimitType(NameStrEnum):
TAKE_PROFIT_LIMIT = auto()
STOP_LOSS_LIMIT = auto()
class TimeInForce(NameStrEnum):
GTC = auto()
FOK = auto()
IOC = auto()
class BatchOrderType(NameStrEnum):
PLACE_MARKET = auto()
PLACE_LIMIT = auto()
PLACE_STOP = auto()
CANCEL_ORDER = auto()
class TransactionType(NameStrEnum):
LIMIT_BUY = auto()
LIMIT_SELL = auto()
MARKET_BUY = auto()
MARKET_SELL = auto()
SIMPLE_BUY = auto()
SIMPLE_SELL = auto()
AUTO_BUY = auto()
MAKER_REWARD = auto()
BLOCKCHAIN_RECEIVE = auto()
BLOCKCHAIN_SEND = auto()
FIAT_DEPOSIT = auto()
FIAT_WITHDRAWAL = auto()
REFERRAL_REBATE = auto()
REFERRAL_REWARD = auto()
PROMOTIONAL_REBATE = auto()
INTERNAL_TRANSFER = auto()
FIAT_WITHDRAWAL_REVERSAL = auto()
PAYMENT_SENT = auto()
PAYMENT_RECEIVED = auto()
PAYMENT_REVERSED = auto()
PAYMENT_REWARD = auto()
OFF_CHAIN_BLOCKCHAIN_WITHDRAW = auto()
OFF_CHAIN_BLOCKCHAIN_DEPOSIT = auto()
class CurrencyPair(NameStrEnum):
BTCZAR = auto()
ETHZAR = auto()
XRPZAR = auto()
SOLZAR = auto()
|
999,281 | 4af35bcdd2a5ee6aecc982b0b43b8039c6c6476f | """
Behaviors/Magic
===============
.. rubric:: Magical effects for buttons.
.. warning:: Magic effects do not work correctly with `KivyMD` buttons!
To apply magic effects, you must create a new class that is inherited from the
widget to which you apply the effect and from the :attr:`MagicBehavior` class.
In `KV file`:
.. code-block:: kv
<MagicButton@MagicBehavior+MDRectangleFlatButton>
In `python file`:
.. code-block:: python
class MagicButton(MagicBehavior, MDRectangleFlatButton):
pass
.. rubric:: The :attr:`MagicBehavior` class provides five effects:
- :attr:`MagicBehavior.wobble`
- :attr:`MagicBehavior.grow`
- :attr:`MagicBehavior.shake`
- :attr:`MagicBehavior.twist`
- :attr:`MagicBehavior.shrink`
Example:
.. code-block:: python
from kivymd.app import MDApp
from kivy.lang import Builder
KV = '''
#:import MagicBehavior kivymd.uix.behaviors.MagicBehavior
<MagicButton@MagicBehavior+MDRectangleFlatButton>
FloatLayout:
MagicButton:
text: "WOBBLE EFFECT"
on_release: self.wobble()
pos_hint: {"center_x": .5, "center_y": .3}
MagicButton:
text: "GROW EFFECT"
on_release: self.grow()
pos_hint: {"center_x": .5, "center_y": .4}
MagicButton:
text: "SHAKE EFFECT"
on_release: self.shake()
pos_hint: {"center_x": .5, "center_y": .5}
MagicButton:
text: "TWIST EFFECT"
on_release: self.twist()
pos_hint: {"center_x": .5, "center_y": .6}
MagicButton:
text: "SHRINK EFFECT"
on_release: self.shrink()
pos_hint: {"center_x": .5, "center_y": .7}
'''
class Example(MDApp):
def build(self):
return Builder.load_string(KV)
Example().run()
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/magic-button.gif
:width: 250 px
:align: center
"""
__all__ = ("MagicBehavior",)
from kivy.animation import Animation
from kivy.factory import Factory
from kivy.lang import Builder
Builder.load_string(
"""
<MagicBehavior>
translate_x: 0
translate_y: 0
scale_x: 1
scale_y: 1
rotate: 0
canvas.before:
PushMatrix
Translate:
x: self.translate_x or 0
y: self.translate_y or 0
Rotate:
origin: self.center
angle: self.rotate or 0
Scale:
origin: self.center
x: self.scale_x or 1
y: self.scale_y or 1
canvas.after:
PopMatrix
"""
)
class MagicBehavior:
def grow(self):
"""Grow effect animation."""
Animation.stop_all(self)
(
Animation(scale_x=1.2, scale_y=1.2, t="out_quad", d=0.03)
+ Animation(scale_x=1, scale_y=1, t="out_elastic", d=0.4)
).start(self)
def shake(self):
"""Shake effect animation."""
Animation.stop_all(self)
(
Animation(translate_x=50, t="out_quad", d=0.02)
+ Animation(translate_x=0, t="out_elastic", d=0.5)
).start(self)
def wobble(self):
"""Wobble effect animation."""
Animation.stop_all(self)
(
(
Animation(scale_y=0.7, t="out_quad", d=0.03)
& Animation(scale_x=1.4, t="out_quad", d=0.03)
)
+ (
Animation(scale_y=1, t="out_elastic", d=0.5)
& Animation(scale_x=1, t="out_elastic", d=0.4)
)
).start(self)
def twist(self):
"""Twist effect animation."""
Animation.stop_all(self)
(
Animation(rotate=25, t="out_quad", d=0.05)
+ Animation(rotate=0, t="out_elastic", d=0.5)
).start(self)
def shrink(self):
"""Shrink effect animation."""
Animation.stop_all(self)
Animation(scale_x=0.95, scale_y=0.95, t="out_quad", d=0.1).start(self)
Factory.register("MagicBehavior", cls=MagicBehavior)
|
999,282 | 6e9f2d1e7a2f39c6fd7bec3e8085b8777e769e30 | # Comparing Tuples
t1=(80, 90, 56, 44, 99)
t2=(100, 200, 300, 400, 500)
print(t1>=t2)
|
999,283 | f3ceb5b41a7091c5220e8e286ac373971a61cad1 | import numpy as np
import pandas as pd
import pickle
import random
def getMcIntireDataset():
filename = "dataset/mcIntire.csv"
data = pd.read_csv(filename)
x = data['text'].tolist()
m = {'FAKE':1,'REAL':0}
y = data['label'].map(m).tolist()
y = np.array(y)
print("CSV file loaded!!")
return x,y
def getLiarLiarDataset():
TRAIN = "dataset/liar_dataset/train.tsv"
TEST = "dataset/liar_dataset/test.tsv"
VALID = "dataset/liar_dataset/valid.tsv"
def getLiarData(fileName):
data = pd.read_csv(fileName, sep='\t')
data = data[data['the label']!='half-true']
x = data['the statement'].tolist()
m = {'true': 0, 'mostly-true': 0, 'barely-true': 1, 'false': 1, 'pants-fire': 1}
y = data['the label'].map(m).tolist()
print("CSV file loaded!!")
return x, y
x_train, y_train = getLiarData(TRAIN)
x_test, y_test = getLiarData(TEST)
x_valid, y_valid = getLiarData(VALID)
x = x_train + x_test + x_valid
y = y_train + y_test + y_valid
y = np.array(y)
return x,y
def twitterHarvardDataset():
filename = "dataset/dataHarvard"
f = open(filename,"rb")
nonRumor = pickle.load(f)
rumor = pickle.load(f)
random.seed(5)
x1 = random.sample(nonRumor,k=50000)
y1 = [0 for i in range(0,50000)]
x2 = random.sample(rumor,k=50000)
y2 = [1 for i in range(0,50000)]
x = x1+x2
y = y1+y2
c = list(zip(x,y))
random.shuffle(c)
x,y = zip(*c)
x = list(x)
y = np.array(y)
print(len(x))
print(y.shape)
return x,y
def textpreprocessing(x,y,file):
##Convert to lower case
x = [item.lower() for item in x]
##Remove numbers
import re
x = [re.sub(r'\d+','', i) for i in x]
##Remove_hashtags_@
x = [' '.join(re.sub("(@[A-Za-z0-9]+)|([^0-9A-Za-z \t])|(\w+:\/\/\S+)", " ", i).split()) for i in x]
##Remove punctuations
import string
translator = str.maketrans(string.punctuation+'—…', ' '*(len(string.punctuation)+2))
x = [i.translate(translator) for i in x]
##Remove whitespaces
x = [" ".join(i.split()) for i in x]
##Remove stopwords
from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize
# remove stopwords function
def remove_stopwords(text):
stop_words = set(stopwords.words("english"))
word_tokens = word_tokenize(text)
filtered_text = [word for word in word_tokens if word not in stop_words]
filtered_text = " ".join(filtered_text)
return filtered_text
x = [remove_stopwords(i) for i in x]
##Lemmatization
from nltk.stem import WordNetLemmatizer
from nltk.tokenize import word_tokenize
lemmatizer = WordNetLemmatizer()
# lemmatize string
def lemmatize_word(text):
word_tokens = word_tokenize(text)
# provide context i.e. part-of-speech
lemmas = [lemmatizer.lemmatize(word, pos='v') for word in word_tokens]
lemmas = " ".join(lemmas)
return lemmas
x = [lemmatize_word(i) for i in x]
print("Text preprocessing done!!")
##save to file
import pickle
f = open(file,"wb")
pickle.dump(x,f)
pickle.dump(y,f)
x1,y1 = getMcIntireDataset()
x2,y2 = getLiarLiarDataset()
x3,y3 = twitterHarvardDataset()
x = list(x1)+list(x2)+list(x3)
y = np.concatenate((y1,y2,y3))
print(len(x),len(y))
textpreprocessing(x,y,"combined")
|
999,284 | ee6d426f95a3b19a65bc8293d5937a6d93b7e635 | import unittest
from days import day13
import util
class MyTestCase(unittest.TestCase):
def test_example_a1(self):
table = day13.Table()
self.assertEqual(table.find_optimal_seating_arrangement(
['Alice would gain 54 happiness units by sitting next to Bob.',
'Alice would lose 79 happiness units by sitting next to Carol.',
'Alice would lose 2 happiness units by sitting next to David.',
'Bob would gain 83 happiness units by sitting next to Alice.',
'Bob would lose 7 happiness units by sitting next to Carol.',
'Bob would lose 63 happiness units by sitting next to David.',
'Carol would lose 62 happiness units by sitting next to Alice.',
'Carol would gain 60 happiness units by sitting next to Bob.',
'Carol would gain 55 happiness units by sitting next to David.',
'David would gain 46 happiness units by sitting next to Alice.',
'David would lose 7 happiness units by sitting next to Bob.',
'David would gain 41 happiness units by sitting next to Carol.']), 330)
def test_answer_part_a(self):
result = day13.part_a(util.get_file_contents('day13.txt'))
self.assertEqual(result, '709')
def test_answer_part_b(self):
result = day13.part_b(util.get_file_contents('day13.txt'))
self.assertEqual(result, '668')
|
999,285 | d1780fae455fcabc9bfb3dea564aad116457e463 | You are given an array A of size N. Your task is to find the minimum number of operations needed to convert the given array to 'Palindromic Array'.
Palindromic Array:
[23,15,23] is a ‘Palindromic Array’ but [2,0,1] is not.
The only allowed operation is that you can merge two adjacent elements in the array and replace them with their sum.
Input:
The first line of input contains an integer T denoting the number of test cases.
The first line of each test case is N, where N is the size of array.
The second line of each test case contains N space separated integers which is the input for the array.
Output:
Output the minimum number of operations required to make the given array a palindromic array.
Constraints:
1<=T<=100
1<=N<=100
Example:
Input:
2
5
3 2 3 3 5
4
5 3 3 4
Output:
1
3
Explanation:
For Test Case 1: [3 2 3 3 5] after merging the 1st two elements 3 and 2, we get the array as [5 3 3 5] which is a palindrome, hence only 1 operation is needed.
** For More Input/Output Examples Use 'Expected Output' option **
for _ in range(int(input())):
n = int(input())
array = list(map(int, input().split()))
beg = 0
end = n - 1
count = 0
while beg <= end:
if array[beg] == array[end]:
beg += 1
end -= 1
elif array[beg] < array[end]:
array[beg+1] += array[beg]
beg += 1
count += 1
else:
array[end-1] += array[end]
end -= 1
count += 1
print(count)
|
999,286 | fa0b3502bb1ec4410176bbec40efc8efabe2eade | # python3
# Finding user input is odd or even
num = int(input("enter a num: "))
print(num)
if num % 2 == 0 and num % 4 == 0:
print("your num divided by 2 n 4")
elif num % 2 == 0:
print("you entered even number")
else:
print("your num is odd")
|
999,287 | 93646756ce63f254b5101dc2be3994a43fa6bfa4 | """
Test data created by CourseSerializer and CourseDetailSerializer
"""
from datetime import datetime
from unittest import TestCase
import ddt
from opaque_keys.edx.locator import CourseLocator
from rest_framework.request import Request
from rest_framework.test import APIRequestFactory
from xblock.core import XBlock
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from openedx.core.djangoapps.models.course_details import CourseDetails
from xmodule.course_module import DEFAULT_START_DATE
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import check_mongo_calls
from ..serializers import CourseDetailSerializer, CourseKeySerializer, CourseSerializer
from .mixins import CourseApiFactoryMixin
@ddt.ddt
class TestCourseSerializer(CourseApiFactoryMixin, ModuleStoreTestCase):
"""
Test CourseSerializer
"""
expected_mongo_calls = 0
maxDiff = 5000 # long enough to show mismatched dicts, in case of error
serializer_class = CourseSerializer
ENABLED_SIGNALS = ['course_published']
def setUp(self):
super().setUp()
self.staff_user = self.create_user('staff', is_staff=True)
self.honor_user = self.create_user('honor', is_staff=False)
self.request_factory = APIRequestFactory()
course_id = 'edX/toy/2012_Fall'
banner_image_uri = '/c4x/edX/toy/asset/images_course_image.jpg'
banner_image_absolute_uri = 'http://testserver' + banner_image_uri
image_path = '/c4x/edX/toy/asset/just_a_test.jpg'
image_url = 'http://testserver' + image_path
self.expected_data = {
'id': course_id,
'name': 'Toy Course',
'number': 'toy',
'org': 'edX',
'short_description': 'A course about toys.',
'media': {
'banner_image': {
'uri': banner_image_uri,
'uri_absolute': banner_image_absolute_uri,
},
'course_image': {
'uri': image_path,
},
'course_video': {
'uri': 'http://www.youtube.com/watch?v=test_youtube_id',
},
'image': {
'raw': image_url,
'small': image_url,
'large': image_url,
}
},
'start': '2015-07-17T12:00:00Z',
'start_type': 'timestamp',
'start_display': 'July 17, 2015',
'end': '2015-09-19T18:00:00Z',
'enrollment_start': '2015-06-15T00:00:00Z',
'enrollment_end': '2015-07-15T00:00:00Z',
'blocks_url': 'http://testserver/api/courses/v2/blocks/?course_id=edX%2Ftoy%2F2012_Fall',
'effort': '6 hours',
'pacing': 'instructor',
'mobile_available': True,
'hidden': False,
'invitation_only': False,
# 'course_id' is a deprecated field, please use 'id' instead.
'course_id': course_id,
}
def _get_request(self, user=None):
"""
Build a Request object for the specified user.
"""
if user is None:
user = self.honor_user
request = Request(self.request_factory.get('/'))
request.user = user
return request
def _get_result(self, course):
"""
Return the CourseSerializer for the specified course.
"""
course_overview = CourseOverview.get_from_id(course.id)
return self.serializer_class(course_overview, context={'request': self._get_request()}).data
def test_basic(self):
course = self.create_course()
CourseDetails.update_about_video(course, 'test_youtube_id', self.staff_user.id)
with check_mongo_calls(self.expected_mongo_calls):
result = self._get_result(course)
self.assertDictEqual(result, self.expected_data)
def test_hidden(self):
course = self.create_course(
course='custom',
start=datetime(2015, 3, 15),
catalog_visibility='none'
)
result = self._get_result(course)
assert result['hidden'] is True
def test_advertised_start(self):
course = self.create_course(
course='custom',
start=datetime(2015, 3, 15),
advertised_start='The Ides of March'
)
result = self._get_result(course)
assert result['course_id'] == 'edX/custom/2012_Fall'
assert result['start_type'] == 'string'
assert result['start_display'] == 'The Ides of March'
def test_empty_start(self):
course = self.create_course(start=DEFAULT_START_DATE, course='custom')
result = self._get_result(course)
assert result['course_id'] == 'edX/custom/2012_Fall'
assert result['start_type'] == 'empty'
assert result['start_display'] is None
@ddt.unpack
@ddt.data(
(True, 'self'),
(False, 'instructor'),
)
def test_pacing(self, self_paced, expected_pacing):
course = self.create_course(self_paced=self_paced)
result = self._get_result(course)
assert result['pacing'] == expected_pacing
class TestCourseDetailSerializer(TestCourseSerializer): # lint-amnesty, pylint: disable=test-inherits-tests
"""
Test CourseDetailSerializer by rerunning all the tests
in TestCourseSerializer, but with the
CourseDetailSerializer serializer class.
"""
# 1 mongo call is made to get the course About overview text.
expected_mongo_calls = 1
serializer_class = CourseDetailSerializer
def setUp(self):
super().setUp()
# update the expected_data to include the 'overview' data.
about_descriptor = XBlock.load_class('about')
overview_template = about_descriptor.get_template('overview.yaml')
self.expected_data['overview'] = overview_template.get('data')
class TestCourseKeySerializer(TestCase): # lint-amnesty, pylint: disable=missing-class-docstring
def test_course_key_serializer(self):
course_key = CourseLocator(org='org', course='course', run='2020_Q3')
serializer = CourseKeySerializer(course_key)
assert serializer.data == str(course_key)
|
999,288 | a4283edbf2dc5b7f6bda830d3aaefe11ffee52da | from django.urls.conf import path
from django.urls.resolvers import URLPattern
from .import views
urlpatterns=[
path('id_measurements/<id>/',views.get_id_measurements, name='measurements_id'),
path('list_measurements/', views.get_measurements, name='measurements_List'),
path('modificar_measurements/<id_modificado>/<str:Unit>/',views.modificando,name='modificado_measurment'),
path('eliminar_measurements/<id_borrado>/',views.deleteMeasurments,name='eliminado_measurment')
] |
999,289 | 55d08ffdb1e6ab8e363689783110c374e1f062c4 | # 非参数估计概率密度
import numpy as np
import math
import matplotlib.pyplot as plt
import matplotlib as mpl
class DensityEstimated:
def __init__(self, N, h):
self.n = N # 样本数
self.k_n = int(math.sqrt(N)) # (Kn近邻估计使用的)每个小舱固定的样本数
self.h = h # (parzen窗使用)每个小舱的棱长(方窗)
self.sigma = h/int(math.sqrt(N)) # 高斯窗的方差
self.data = []
# 随机产生满足要求的正态分布的一维样本点
def generateRandData(self, mean=0, sigma=0.1):
self.data = np.random.normal(mean, sigma, self.n)
self.data = sorted(self.data)
return self.data
# Kn近邻估计法
def KnEstimated(self):
p = []
for i in self.data:
# 计算点i 到其他所有点的距离
dist = []
for j in self.data:
dist.append(abs(i - j)) # 一维数据距离
dist = sorted(dist) # 排序,离x最近的kn个点 dist[0:k_n+1]
# 小舱体积为:2dist[self.k_n]
p.append(self.k_n/self.n/(2*dist[self.k_n]))
return p
# Parzen窗估计:
def parzenEstimated(self):
p_Gaussian = []
p_square = []
for i in self.data:
p1, p2 = 0, 0
for j in self.data:
# 高斯窗
p1 += 1/(math.sqrt(2*3.14)*self.sigma) * math.exp(-(i-j)**2/(2*self.sigma**2))
# 方窗
if abs(i-j) <= self.h/2:
p2 += 1/self.h
p_Gaussian.append(p1/self.n)
p_square.append(p2/self.n)
return p_Gaussian, p_square
if __name__ == '__main__':
solu = DensityEstimated(256, 0.1)
data = solu.generateRandData()
p_k = solu.KnEstimated()
p_Gaussian, p_square = solu.parzenEstimated()
# 解决中文显示问题
mpl.rcParams['font.sans-serif'] = ['SimHei']
mpl.rcParams['axes.unicode_minus'] = False
ax1 = plt.subplot(211)
plt.plot(data, p_k)
plt.hist(data, bins=100, density=True)
ax2 = plt.subplot(223)
plt.plot(data, p_Gaussian)
plt.hist(data, bins=100, density=True)
ax3 = plt.subplot(224)
plt.plot(data, p_square)
plt.hist(data, bins=100, density=True)
ax1.set_title('kn近邻: k_n = 16')
ax2.set_title('高斯窗: h = 0.1')
ax3.set_title('方窗: h = 0.1')
# 调整每隔子图之间的距离
plt.tight_layout()
plt.show()
|
999,290 | 512237af12731f56e43d7cb0a48d8e0bd92cc5c9 | from __future__ import print_function
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torchvision import datasets, transforms
from torch.autograd import Variable
from torch.utils.data import Dataset, DataLoader
import itertools
def to_one_hot(y, n_dims=None):
y_tensor = y.data if isinstance(y, Variable) else y
y_tensor = y_tensor.type(torch.LongTensor).view(-1, 1)
y_one_hot = torch.zeros(y_tensor.size()[0], n_dims).scatter_(1, y_tensor, 1)
if y.is_cuda:
y_one_hot = y_one_hot.cuda()
return Variable(y_one_hot) if isinstance(y, Variable) else y_one_hot
def mk_tree(depth, tree=None):
if tree is None:
tree = {0:[1,2]}
depth = depth-1
for d in range(depth):
child_ids = []
for node_id in tree:
child_ids = child_ids + tree[node_id]
leaf_ids = set(child_ids) - set(tree.keys())
max_id = max(child_ids + tree.keys())
for leaf_id in leaf_ids:
tree[leaf_id] = [max_id + 1, max_id + 2]
max_id += 2
return tree
class nn_node(nn.Module):
def __init__(self, split_model):
super(nn_node, self).__init__()
self.split_fcn = split_model()
self.val = None
def split_prob(self, x):
# > 0.5 ->right, <= 0.5 -> left
self.val = self.split_fcn(x) # (batch_size, 1)
return self.val
def prob_to(self, side):
if side > 0.5: # right
return self.val
else: # left
return 1 - self.val
class nn_leaf(nn.Module):
def __init__(self, class_num, reg=True):
super(nn_leaf, self).__init__()
self.weight = nn.Parameter(torch.Tensor(class_num))
#self.weight = nn.Parameter(torch.zeros(class_num) + 1./class_num)
if reg:
self.weight.data = self.prob(softmax=True).data
# use in m_step to calc moving avarage
self.pi_n = 1. / class_num
self.pi_d = 1
def prob(self, softmax):
if softmax:
return F.softmax(self.weight, 0)
else:
return self.weight
class nn_tree(object):
def __init__(self, class_num, split_model, tree ={0:[1,2]} ,
max_depth = None, min_samples_leaf=None, cuda=True):
self.split_model = split_model
self.tree = tree
self.class_num = class_num
self.cuda = cuda
self.nodes = {} #nodes[node_id] = nn_node instance
self.leaves = {} #leaf[node_id] = nn_leaf instance
self.c2p = {} #c2p[node_id] = node_id of parent
for node_id in self.tree:
self.nodes[node_id] = nn_node(split_model)
if cuda:
self.nodes[node_id].cuda()
self.c2p[self.tree[node_id][0]] = node_id
self.c2p[self.tree[node_id][1]] = node_id
self.root_id = list(set(self.tree.keys()) - set(self.c2p.keys()))[0]
leaf_ids = set(self.c2p.keys()) - set(self.tree.keys())
for leaf_id in leaf_ids:
self.leaves[leaf_id] = nn_leaf(class_num)
if cuda:
self.leaves[leaf_id].cuda()
def parameters(self):
params = []
for node_id in self.nodes:
params = itertools.chain(params, self.nodes[node_id].parameters())
for node_id in self.leaves:
params = itertools.chain(params, self.leaves[node_id].parameters())
return params
def set_train(self):
for node_id in self.nodes:
self.nodes[node_id].split_fcn.train()
def set_eval(self):
for node_id in self.nodes:
self.nodes[node_id].split_fcn.eval()
def set_gamma(self, gamma):
for node_id in self.nodes:
self.nodes[node_id].split_fcn.gamma = gamma
def path_to(self, node_id):
c_id = node_id
path = [c_id] # id seq to node_id
split_sides = {} # split_sides[node_id] = 1:path right, 0:path left
while True:
if c_id not in self.c2p:
break
p_id = self.c2p[c_id]
path.append(p_id)
split_sides[p_id] = int(self.tree[p_id][1] == c_id)
c_id = p_id
path.reverse()
return path, split_sides
def update_nodes(self, x):
for node_id in self.nodes:
_ = self.nodes[node_id].split_prob(x)
def update_nodes_to(self, x, leaf_ids):
leaf_path = []
for leaf_id in leaf_ids:
path, _ = self.path_to(leaf_id)
leaf_path.extend(leaf_path)
use_nodes = set(leaf_path)
for node_id in use_nodes:
_ = self.nodes[node_id].split_prob(x)
def prob_to(self, node_id):
# Re: make sure to update nodes before exec prob_to
path, split_sides = self.path_to(node_id)
prob_var = 1
for i in split_sides:
prob_var = prob_var * self.nodes[i].prob_to(split_sides[i]).view(-1, 1)
return prob_var # (batch_size, 1)
def pred_with_all_leaves(self, x=None, softmax=False):
if x is not None:
self.update_nodes(x)
output = 0
for leaf_id in self.leaves:
output += torch.mm(self.prob_to(leaf_id), self.leaves[leaf_id].prob(softmax).view(1, -1)) # (batch_size, class_num)
return output
def pred_with_one_leaf(self, x=None, softmax=False):
if x is None:
l = self.nodes[self.root_id].val.size()[0]
else:
l = x.size()[0]
output = torch.zeros(l, self.class_num)
if self.cuda:
output = output.cuda()
for i in range(l):
p_id = self.root_id
while True:
if x is None:
p = self.nodes[p_id].val.data[i,:].cpu().numpy()
else:
p = self.nodes[p_id].split_prob(x[i,:]).data.cpu().numpy()
c_id = self.tree[p_id][int(p > 0.5)]
if c_id in self.leaves:
output[i,:] = self.leaves[c_id].prob(softmax).data
break
p_id = c_id
return Variable(output)
def _e_step(self, y, softmax=False):
exp_dict = {}
l = y.size()[0]
z = 0
for leaf_id in self.leaves:
exp_dict[leaf_id] = {}
exp_dict[leaf_id]["prob_to"] = self.prob_to(leaf_id)
#pred in a leaf
exp_dict[leaf_id]["latent"] = torch.mm(
exp_dict[leaf_id]["prob_to"],
self.leaves[leaf_id].prob(softmax).view(1, -1)
)[[np.arange(l).astype(int), y]].view(l,1) # (batch_size, 1)
z += exp_dict[leaf_id]["latent"]
for leaf_id in self.leaves:
exp_dict[leaf_id]["latent"] = exp_dict[leaf_id]["latent"] / (z + 1e-10)
return exp_dict
def _m_step_ma(self, y, exp_dict, optimizer, a=0.1):
# update for pi
for leaf_id in self.leaves:
y_one_hot = to_one_hot(y, self.class_num)
self.leaves[leaf_id].pi_n = ( (1 - a) * self.leaves[leaf_id].pi_n ) + \
( a * (exp_dict[leaf_id]["latent"] * y_one_hot).mean(dim=0) ).detach()
self.leaves[leaf_id].pi_d = ( (1 - a) * self.leaves[leaf_id].pi_d ) + \
( a * exp_dict[leaf_id]["latent"].mean() ).detach()
if self.leaves[leaf_id].pi_d.data.cpu().numpy() < 1e-10:
pass
else:
self.leaves[leaf_id].weight.data = (self.leaves[leaf_id].pi_n / (self.leaves[leaf_id].pi_d)).data
# update nodes params
loss = 0
optimizer.zero_grad()
for leaf_id in self.leaves:
loss += (-torch.log(exp_dict[leaf_id]["prob_to"]+1e-10)*exp_dict[leaf_id]["latent"].detach()).mean() / len(self.leaves)
loss.backward()
optimizer.step()
return loss
def _m_step_sgd(self, y, exp_dict, optimizer):
loss = 0
optimizer.zero_grad()
for leaf_id in self.leaves:
loss += (
(-torch.log(exp_dict[leaf_id]["prob_to"]+1e-10)
- 100*(F.log_softmax(self.leaves[leaf_id].weight, dim=0)*to_one_hot(y, self.class_num)).sum(dim=1).view(-1,1)
)
* exp_dict[leaf_id]["latent"].detach() ).mean() / len(self.leaves)
loss.backward()
optimizer.step()
return loss
def fine_tune_em(self, epoch, train_loader, optimizer, log_interval = 10, gamma=1,
m_step = 'ma', test_loader = None, test_interval = 200):
self.set_train()
self.set_gamma(gamma)
l = len(train_loader)
correct_all = 0
correct_one_leaf = 0
for batch_idx, (data, target) in enumerate(train_loader):
if self.cuda:
data, target = data.cuda(), target.cuda()
data, target = Variable(data), Variable(target)
# forward path
self.update_nodes(data)
# e step
exp_dict = self._e_step(target, softmax=(m_step=='sgd'))
# m step
if m_step == 'ma':
loss = self._m_step_ma(target, exp_dict, optimizer = optimizer)
else:
loss = self._m_step_sgd(target, exp_dict, optimizer = optimizer)
#monitering
output = self.pred_with_all_leaves(softmax=(m_step=='sgd'))
pred = output.data.max(1, keepdim=True)[1]
correct_all += pred.eq(target.data.view_as(pred)).cpu().sum()
output = self.pred_with_one_leaf(softmax=(m_step=='sgd'))
pred = output.data.max(1, keepdim=True)[1]
correct_one_leaf += pred.eq(target.data.view_as(pred)).cpu().sum()
if (batch_idx + 1) % log_interval == 0:
print('Train Epoch: {} [{}/{} ({:.0f}%)] \tLoss: {:.6f}\tAll_Acc: {:.6f}\tone_leaf_Acc: {:.6f}'.format(
epoch,
(batch_idx + 1) * len(data),
len(train_loader.dataset),
100. * (batch_idx + 1) / len(train_loader),
loss.data[0],
float(correct_all) / (log_interval * len(data)),
float(correct_one_leaf) / (log_interval * len(data))
)
)
correct_all = 0
correct_one_leaf = 0
# test
if (test_loader is not None) and ( (batch_idx + 1) % test_interval == 0 ):
self.test(test_loader, softmax=(m_step=='sgd'))
self.set_train()
def fine_tune_sgd(self, epoch, train_loader, optimizer, log_interval = 10, gamma=1,
test_loader = None, test_interval = 200):
self.set_train()
self.set_gamma(gamma)
l = len(train_loader)
correct_all = 0
correct_one_leaf = 0
for batch_idx, (data, target) in enumerate(train_loader):
if self.cuda:
data, target = data.cuda(), target.cuda()
data, target = Variable(data), Variable(target)
optimizer.zero_grad()
output = self.pred_with_all_leaves(data, softmax=True)
loss = F.nll_loss(torch.log(output), target)
loss.backward()
optimizer.step()
pred = output.data.max(1, keepdim=True)[1]
correct_all += pred.eq(target.data.view_as(pred)).cpu().sum()
output = self.pred_with_one_leaf(softmax=True)
pred = output.data.max(1, keepdim=True)[1]
correct_one_leaf += pred.eq(target.data.view_as(pred)).cpu().sum()
if (batch_idx + 1) % log_interval == 0:
print('Train Epoch: {} [{}/{} ({:.0f}%)] \tLoss: {:.6f}\tAcc: {:.6f}\tone_leaf_Acc: {:.6f}'.format(
epoch, (batch_idx + 1) * len(data), len(train_loader.dataset),
100. * (batch_idx + 1) / len(train_loader), loss.data[0],
float(correct_all) / (log_interval * len(data)),
float(correct_one_leaf) / (log_interval * len(data))
)
)
correct_all = 0
correct_one_leaf = 0
# test
if (test_loader is not None) and ( (batch_idx + 1) % test_interval == 0 ):
self.test(test_loader, softmax=True)
self.set_train()
def test(self, test_loader, softmax=True):
self.set_eval()
l = len(test_loader)
correct_all = 0
correct_one_leaf = 0
loss_all = 0
loss_one_leaf = 0
for data, target in test_loader:
if self.cuda:
data, target = data.cuda(), target.cuda()
data, target = Variable(data), Variable(target)
output = self.pred_with_all_leaves(data, softmax=softmax)
pred = output.data.max(1, keepdim=True)[1]
correct_all += pred.eq(target.data.view_as(pred)).cpu().sum()
loss_all += F.nll_loss(torch.log(output), target).data
output = self.pred_with_one_leaf(softmax=softmax)
pred = output.data.max(1, keepdim=True)[1]
correct_one_leaf += pred.eq(target.data.view_as(pred)).cpu().sum()
loss_one_leaf += F.nll_loss(torch.log(output), target).data
print('Test: Loss: {:.6f}\tAcc: {:.6f}\tone_leaf_Loss: {:.6f}\tone_leaf_Acc: {:.6f}'.format(
loss_all[0]/(l*len(data)),
float(correct_all) / (l*len(data)),
loss_one_leaf[0]/(l*len(data)),
float(correct_one_leaf) / (l*len(data))
))
return loss_all/l, loss_one_leaf/l
if __name__ == '__main__':
class Net(nn.Module):
def __init__(self, gamma = 1):
super(Net, self).__init__()
self.fc = nn.Linear(3, 1)
self.gamma = gamma
def forward(self, x):
x = self.fc(x)
return F.sigmoid(x * self.gamma)
tree = nn_tree( 3, Net, tree = {0:[1, 2], 1:[3, 4]}, cuda=True)
print(tree.path_to(2))
print(tree.path_to(4))
print(tree.path_to(0))
# tmp_input = Variable(torch.Tensor([[0, 1, 2]]).cuda())
# print(tree.pred_with_all_leaves(tmp_input))
class mydata(Dataset):
def __init__(self):
data = np.zeros(900).reshape(300,3)
data[:100, 0] = 1
data[100:200, 1] = 1
data[200:, 2] = 1
data = data + np.random.normal(scale=0.1, size=900).reshape(300,3)
target = np.zeros(300)
target[100:200] = 1
target[200:] = 2
self.data = torch.from_numpy(data).float()
self.target = torch.from_numpy(target).long()
def __len__(self):
return self.data.shape[0]
def __getitem__(self, i):
return self.data[i,:], self.target[i]
tmp_data = mydata()
train_loader = DataLoader(tmp_data, batch_size=10, shuffle=True)
optimizer = optim.SGD(tree.parameters(), lr=0.01, momentum=0.5)
for leaf_id in tree.leaves:
print(tree.leaves[leaf_id].weight)
for i in range(10):
# tree.fine_tune_sgd(i, train_loader, optimizer, gamma = 1*(1.5**i))
tree.fine_tune_em(i, train_loader, optimizer, gamma=1*(1.5**i))
# for node_id in tree.nodes:
# print(tree.nodes[node_id].split_fcn.fc.weight)
for leaf_id in tree.leaves:
print(tree.leaves[leaf_id].weight)
|
999,291 | 563e832068069595fdd0c06175630725f864e176 | import argparse
from csat.acquisition import base
__version__ = '0.1.0'
class ListAction(argparse.Action):
def __init__(self, option_strings, dest, const, default=None,
required=False, help=None, metavar=None):
super(ListAction, self).__init__(option_strings=option_strings,
dest=dest, nargs=0, const=const,
default=default, required=required,
help=help)
def __call__(self, parser, namespace, values, option_string):
for graph in self.const():
print ' * {}'.format(graph.key)
print ' ' + graph.description.strip()
print ''
parser.exit()
class ExampleGraphsCollector(base.CollectorBase):
name = 'Example Graphs Collection'
key = 'examples'
version = __version__
def get_form(self):
from . import forms
return forms.ConfigForm
def get_model(self):
from . import models
return models.ExamplesConfig
def get_command(self, model):
return ['csat-collect', self.key, model.example, ]
def build_parser(self, base):
from . import graphs
parser = super(ExampleGraphsCollector, self).build_parser(base)
parser.add_argument('graph_name')
parser.add_argument('-l', '--list', action=ListAction,
const=graphs.get_graphs, help='List all available '
'graphs and exit.')
return parser
def build_collector(self, task_manager, logger, args):
from . import graphs
return graphs.get_graph(args.graph_name)
examples_collector = ExampleGraphsCollector()
if __name__ == '__main__':
from csat.acquisition.runner import get_runner
get_runner(examples_collector).run()
|
999,292 | 938fe5e354b4a91d4ac42abc7cde698ad83852a9 | class Solution:
s = None
def lengthOfLongestSubstring(self, s):
if len(s) <= 1:
return len(s)
self.s = s
table = set()
index = len(s) // 2
l1 = self.helper(index, index, table, 0)
return max(self.lengthOfLongestSubstring(s[:index]), self.lengthOfLongestSubstring(s[index:]),
self.helper(index, index, table, 0))
def helper(self, start, end, table, size):
if start < 0 or
s = Solution()
string = "abcabcbb"
print(s.lengthOfLongestSubstring(string)) |
999,293 | 1280a594aa329becdad3fdef6d0f883f8f12db19 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import re
import time
from distutils.version import LooseVersion
from glob import glob
from os import path, makedirs, remove
from shutil import copyfileobj, rmtree
import psutil
from six import iteritems
from six.moves.configparser import NoSectionError
from six.moves.urllib.parse import urlparse, urlunparse
from twisted.internet.defer import DeferredQueue, maybeDeferred
from twisted.internet.defer import inlineCallbacks, returnValue
from w3lib.url import path_to_file_uri
from zope.interface import implementer
from slave.interfaces import IEggStorage
from slave.interfaces import IEnvironment
from slave.interfaces import IPoller, IPerformance
from slave.interfaces import ISpiderQueue
from slave.interfaces import ISpiderScheduler
from slave.sqlite import JsonSqlitePriorityQueue
def get_project_list(config):
"""Get list of projects by inspecting the eggs dir and the ones defined in
the scrapyd.conf [settings] section
"""
eggs_dir = config.get('eggs_dir', 'eggs')
if os.path.exists(eggs_dir):
projects = os.listdir(eggs_dir)
else:
projects = []
try:
projects += [x[0] for x in config.cp.items('settings')]
except NoSectionError:
pass
return projects
def get_spider_queues(config):
"""Return a dict of Spider Queues keyed by project name"""
dbs_dir = config.get('dbs_dir', 'dbs')
if not os.path.exists(dbs_dir):
os.makedirs(dbs_dir)
d = {}
for project in get_project_list(config):
db_path = os.path.join(dbs_dir, '%s.db' % project)
d[project] = SqliteSpiderQueue(db_path)
return d
@implementer(IEggStorage)
class FilesystemEggStorage(object):
def __init__(self, config):
self.basedir = config.get('eggs_dir', 'eggs')
def put(self, egg_file, project, version):
egg_path = self._egg_path(project, version)
egg_dir = path.dirname(egg_path)
if not path.exists(egg_dir):
makedirs(egg_dir)
with open(egg_path, 'wb') as f:
copyfileobj(egg_file, f)
def get(self, project, version=None):
if version is None:
try:
version = self.list(project)[-1]
except IndexError:
return None, None
return version, open(self._egg_path(project, version), 'rb')
def list(self, project):
egg_dir = path.join(self.basedir, project)
versions = [path.splitext(path.basename(x))[0] for x in
glob("%s/*.egg" % egg_dir)]
return sorted(versions, key=LooseVersion)
def delete(self, project, version=None):
if version is None:
rmtree(path.join(self.basedir, project))
else:
remove(self._egg_path(project, version))
if not self.list(project): # remove project if no versions left
self.delete(project)
def _egg_path(self, project, version):
sanitized_version = re.sub(r'[^a-zA-Z0-9_-]', '_', version)
x = path.join(self.basedir, project, "%s.egg" % sanitized_version)
return x
@implementer(IEnvironment)
class Environment(object):
def __init__(self, config, init_env=os.environ):
self.dbs_dir = config.get('dbs_dir', 'dbs')
self.logs_dir = config.get('logs_dir', 'logs')
self.items_dir = config.get('items_dir', '')
self.jobs_to_keep = config.getint('jobs_to_keep', 5)
if config.cp.has_section('settings'):
self.settings = dict(config.cp.items('settings'))
else:
self.settings = {}
self.init_env = init_env
def get_environment(self, message, slot):
project = message['_project']
env = self.init_env.copy()
env['SCRAPY_SLOT'] = str(slot)
env['SCRAPY_PROJECT'] = project
env['SCRAPY_SPIDER'] = message['_spider']
env['SCRAPY_JOB'] = message['_job']
if '_version' in message:
env['SCRAPY_EGG_VERSION'] = message['_version']
if project in self.settings:
env['SCRAPY_SETTINGS_MODULE'] = self.settings[project]
if self.logs_dir:
env['SCRAPY_LOG_FILE'] = self._get_file(
message, self.logs_dir, 'log')
if self.items_dir:
env['SCRAPY_FEED_URI'] = self._get_feed_uri(message, 'jl')
return env
def _get_feed_uri(self, message, ext):
url = urlparse(self.items_dir)
if url.scheme.lower() in ['', 'file']:
return path_to_file_uri(self._get_file(message, url.path, ext))
return urlunparse((url.scheme,
url.netloc,
'/'.join([
url.path,
message['_project'],
message['_spider'],
'%s.%s' % (message['_job'], ext)]),
url.params,
url.query,
url.fragment))
def _get_file(self, message, file_dir, ext):
logs_dir = os.path.join(
file_dir, message['_project'], message['_spider'])
if not os.path.exists(logs_dir):
os.makedirs(logs_dir)
to_delete = sorted(
(os.path.join(logs_dir, x) for x in os.listdir(logs_dir)),
key=os.path.getmtime)[:-self.jobs_to_keep]
for x in to_delete:
os.remove(x)
return os.path.join(logs_dir, "%s.%s" % (message['_job'], ext))
@implementer(IPoller)
class QueuePoller(object):
def __init__(self, config):
self.config = config
self.update_projects()
self.dq = DeferredQueue(size=1)
self.queues = dict()
@inlineCallbacks
def poll(self):
if self.dq.pending:
return
for p, q in iteritems(self.queues):
c = yield maybeDeferred(q.count)
if c:
msg = yield maybeDeferred(q.pop)
if msg is not None: # In case of a concurrently accessed queue
d = msg.copy()
d['_project'] = p
d['_spider'] = d.pop('name')
returnValue(self.dq.put(d))
def next(self):
return self.dq.get()
def update_projects(self):
self.queues = get_spider_queues(self.config)
@implementer(IPerformance)
class Performance(object):
"""Statistical system performance."""
def __init__(self):
self.disk_io_read_speed = None
self.disk_io_write_speed = None
self.net_io_sent_speed = None
self.net_io_receive_speed = None
# system performance indicators
self._last_time = None
# Disk I/O
self._last_disk_io_read_bytes = None
self._last_disk_io_write_bytes = None
# Net I/O
self._last_net_io_sent_bytes = None
self._last_net_io_receive_bytes = None
pass
def poll(self):
curr_time = time.time() # 当前时间
disk_io = psutil.disk_io_counters() # 磁盘IO状态
net_io = psutil.net_io_counters() # 网络IO状态
timedelta = curr_time - self._last_time if self._last_time else None
if timedelta:
# 分母 单位:KB/s
denominator = 1024 * timedelta
read_bytes = disk_io.read_bytes - self._last_disk_io_read_bytes
write_bytes = disk_io.write_bytes - self._last_disk_io_write_bytes
self.disk_io_read_speed = round(read_bytes / denominator, 2)
self.disk_io_write_speed = round(write_bytes / denominator, 2)
# 分母 单位:Kb/s
denominator = 1000 * timedelta
sent_bytes = net_io.bytes_sent - self._last_net_io_sent_bytes
receive_bytes = net_io.bytes_recv - self._last_net_io_receive_bytes
self.net_io_sent_speed = round(sent_bytes / denominator, 2)
self.net_io_receive_speed = round(receive_bytes / denominator, 2)
self._last_time = curr_time
self._last_disk_io_read_bytes = disk_io.read_bytes
self._last_disk_io_write_bytes = disk_io.write_bytes
self._last_net_io_sent_bytes = net_io.bytes_sent
self._last_net_io_receive_bytes = net_io.bytes_recv
pass
@property
def cpu_percent(self):
return psutil.cpu_percent()
pass
@property
def virtual_memory_percent(self):
return psutil.virtual_memory().percent
pass
@property
def swap_memory_percent(self):
return psutil.swap_memory().percent
pass
@property
def disk_usage_percent(self):
return psutil.disk_usage('/').percent
pass
pass
@implementer(ISpiderScheduler)
class SpiderScheduler(object):
def __init__(self, config):
self.config = config
self.queues = dict()
self.update_projects()
def schedule(self, project, spider_name, **spider_args):
q = self.queues[project]
q.add(spider_name, **spider_args)
def list_projects(self):
return self.queues.keys()
def update_projects(self):
self.queues = get_spider_queues(self.config)
@implementer(ISpiderQueue)
class SqliteSpiderQueue(object):
def __init__(self, database=None, table='spider_queue'):
self.q = JsonSqlitePriorityQueue(database, table)
def add(self, name, **spider_args):
d = spider_args.copy()
d['name'] = name
priority = float(d.pop('priority', 0))
self.q.put(d, priority)
def pop(self):
return self.q.pop()
def count(self):
return len(self.q)
def list(self):
return [x[0] for x in self.q]
def remove(self, func):
return self.q.remove(func)
def clear(self):
self.q.clear()
|
999,294 | 098e10680eacbe906cb1c83770f452aa847d29f5 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import pygame
from pygame.locals import *
import window
import system_notify
import character_view
import item
import shop
TITLE, CITY, BAR, INN, SHOP, TEMPLE, CASTLE, TOWER, STATUS_CHECK, GAMEOVER = (0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
CHARACTER_MAKE = 10
NO_EXTRA, CHARACTER_VIEW, PARTY_REMOVE, CHARACTER_CHECK = 100, 101, 102, 103
SCREEN_RECTANGLE = Rect(0,0,640,480)
COLOR_BLACK = (0,0,0)
COLOR_GLAY = (128,128,128)
COLOR_WHITE = (255,255,255)
class Shop_window(window.Window):
SWORD, KATANA, BLUNT, GUN, THROW = 0, 1, 2, 3, 4
SHIELD, ARMOR, HELMET, GAUNTLET, ACCESSORY = 5, 6, 7, 8, 9
ITEM = 10
MENU_MAX = 10
def __init__(self, rectangle):
window.Window.__init__(self, rectangle)
self.is_visible = False
self.menu = 0
self.top = rectangle.top
self.left = rectangle.left
self.right = rectangle.right
self.centerx = rectangle.centerx
self.menu_font = pygame.font.Font("ipag.ttf", 20)
self.top_font = self.menu_font.render( u"何が欲しいんだい?", True, COLOR_WHITE)
self.sword_font = self.menu_font.render( u"剣", True, COLOR_WHITE)
self.katana_font = self.menu_font.render( u"刀", True, COLOR_WHITE)
self.blunt_font = self.menu_font.render( u"鈍器", True, COLOR_WHITE)
self.gun_font = self.menu_font.render( u"銃", True, COLOR_WHITE)
self.throw_font = self.menu_font.render( u"投擲", True, COLOR_WHITE)
self.shield_font = self.menu_font.render( u"盾", True, COLOR_WHITE)
self.armor_font = self.menu_font.render( u"鎧", True, COLOR_WHITE)
self.helmet_font = self.menu_font.render( u"兜", True, COLOR_WHITE)
self.gauntlet_font = self.menu_font.render( u"篭手", True, COLOR_WHITE)
self.accessory_font = self.menu_font.render( u"アクセサリー", True, COLOR_WHITE)
self.item_font = self.menu_font.render( u"アイテム", True, COLOR_WHITE)
self.buy_window = Buy_window(Rect(120, 50, 400, 360))
def draw( self, screen, game_self):
"""draw the shop window on screen"""
if self.is_visible == False: return
window.Window.draw(self, screen)
screen.blit( self.top_font, ((self.centerx-self.top_font.get_width()/2), 60))
if self.menu == self.SWORD:
pygame.draw.rect(screen, COLOR_GLAY, Rect(204, 95, 232, 30), 0)
if self.menu == self.KATANA:
pygame.draw.rect(screen, COLOR_GLAY, Rect(204, 125, 232, 30), 0)
if self.menu == self.BLUNT:
pygame.draw.rect(screen, COLOR_GLAY, Rect(204, 155, 232, 30), 0)
if self.menu == self.GUN:
pygame.draw.rect(screen, COLOR_GLAY, Rect(204, 185, 232, 30), 0)
if self.menu == self.THROW:
pygame.draw.rect(screen, COLOR_GLAY, Rect(204, 215, 232, 30), 0)
if self.menu == self.SHIELD:
pygame.draw.rect(screen, COLOR_GLAY, Rect(204, 245, 232, 30), 0)
if self.menu == self.ARMOR:
pygame.draw.rect(screen, COLOR_GLAY, Rect(204, 275, 232, 30), 0)
if self.menu == self.HELMET:
pygame.draw.rect(screen, COLOR_GLAY, Rect(204, 305, 232, 30), 0)
if self.menu == self.GAUNTLET:
pygame.draw.rect(screen, COLOR_GLAY, Rect(204, 335, 232, 30), 0)
if self.menu == self.ACCESSORY:
pygame.draw.rect(screen, COLOR_GLAY, Rect(204, 365, 232, 30), 0)
if self.menu == self.ITEM:
pygame.draw.rect(screen, COLOR_GLAY, Rect(204, 395, 232, 30), 0)
screen.blit(self.sword_font, ((self.centerx-self.sword_font.get_width()/2), 100))
screen.blit(self.katana_font, ((self.centerx-self.katana_font.get_width()/2), 130))
screen.blit(self.blunt_font, ((self.centerx-self.blunt_font.get_width()/2), 160))
screen.blit(self.gun_font, ((self.centerx-self.gun_font.get_width()/2), 190))
screen.blit(self.throw_font, ((self.centerx-self.throw_font.get_width()/2), 220))
screen.blit(self.shield_font, ((self.centerx-self.shield_font.get_width()/2), 250))
screen.blit(self.armor_font, ((self.centerx-self.armor_font.get_width()/2), 280))
screen.blit(self.helmet_font, ((self.centerx-self.helmet_font.get_width()/2), 310))
screen.blit(self.gauntlet_font, ((self.centerx-self.gauntlet_font.get_width()/2), 340))
screen.blit(self.accessory_font, ((self.centerx-self.accessory_font.get_width()/2), 370))
screen.blit(self.item_font, ((self.centerx-self.item_font.get_width()/2), 400))
#draw extra window
self.buy_window.draw(screen, game_self)
def shop_window_handler( self, event, game_self):
if self.buy_window.is_visible == True:
self.buy_window.buy_window_handler( event, game_self)
return
#moves the cursor up
if event.type == KEYDOWN and event.key == K_UP:
game_self.cursor_se.play()
self.menu -= 1
if self.menu < 0:
self.menu = self.MENU_MAX
#moves the cursor down
elif event.type == KEYDOWN and event.key == K_DOWN:
game_self.cursor_se.play()
self.menu += 1
if self.menu > self.MENU_MAX:
self.menu = 0
#moves back to shop
elif event.type == KEYDOWN and event.key == K_x:
game_self.cancel_se.play()
self.menu = 0
self.is_visible =False
#select category to buy
elif event.type == KEYDOWN and (event.key == K_z or event.key == K_SPACE or event.key == K_RETURN):
self.buy_window.is_visible = True
class Buy_window(window.Window):
SWORD, KATANA, BLUNT, GUN, THROW = 0, 1, 2, 3, 4
SHIELD, ARMOR, HELMET, GAUNTLET, ACCESSORY = 5, 6, 7, 8, 9
ITEM = 10
MENU_MAX = 9
def __init__(self, rectangle):
window.Window.__init__(self, rectangle)
self.is_visible = False
self.menu = 0
self.page = 0
self.top = rectangle.top
self.left = rectangle.left
self.right = rectangle.right
self.centerx = rectangle.centerx
self.menu_font = pygame.font.Font("ipag.ttf", 20)
self.top_font = self.menu_font.render( u"商品一覧:", True, COLOR_WHITE)
self.category_item = []
self.character_select = Character_select_window( Rect(100, 50, 440, 230))
def draw( self, screen, game_self):
"""draw the shop window on screen"""
if self.is_visible == False: return
window.Window.draw(self, screen)
screen.blit( self.top_font, (self.left + 20, self.top+20))
selected = game_self.shop.shop_window.menu
#show what is the category
if selected == self.SWORD:
category_font = self.menu_font.render( u"剣", True, COLOR_WHITE)
if selected == self.KATANA:
category_font = self.menu_font.render( u"刀", True, COLOR_WHITE)
if selected == self.BLUNT:
category_font = self.menu_font.render( u"鈍器", True, COLOR_WHITE)
if selected == self.GUN:
category_font = self.menu_font.render( u"銃", True, COLOR_WHITE)
if selected == self.THROW:
category_font = self.menu_font.render( u"投擲", True, COLOR_WHITE)
if selected == self.SHIELD:
category_font = self.menu_font.render( u"盾", True, COLOR_WHITE)
if selected == self.ARMOR:
category_font = self.menu_font.render( u"鎧", True, COLOR_WHITE)
if selected == self.HELMET:
category_font = self.menu_font.render( u"兜", True, COLOR_WHITE)
if selected == self.GAUNTLET:
category_font = self.menu_font.render( u"篭手", True, COLOR_WHITE)
if selected == self.ACCESSORY:
category_font = self.menu_font.render( u"アクセサリー", True, COLOR_WHITE)
if selected == self.ITEM:
category_font = self.menu_font.render( u"アイテム", True, COLOR_WHITE)
screen.blit( category_font, (self.left + 20 + self.top_font.get_width(), self.top+20))
#store the item in the shop and quantity of it
item_data = game_self.item_data
#category item is the array of selected category items
self.category_item = game_self.shop.stock[selected]
#draw the box on item selected
if self.category_item != []:
#draws rectangle on the menu item size of rectangle has width of window rectangle - edge_length*2
#the height depends on the size of font
pygame.draw.rect(screen, COLOR_GLAY, Rect( self.left+4, self.top+55 + 30*self.menu,(self.right-self.left)-8,30), 0)
#draws the item 10 at a time in the page
i = 0
for item in self.category_item[self.page*10:(self.page+1)*10]:
item_font = item_data[item.id][0].strip("\"")
item_font = unicode(item_font, encoding="sjis")
item_font = self.menu_font.render( item_font, True, COLOR_WHITE)
screen.blit( item_font, (self.left + 20, self.top+60+i*30))
cost_font = self.menu_font.render( item_data[item.id][2] + "TG", True, COLOR_WHITE)
screen.blit( cost_font, (self.right - 20 - cost_font.get_width(), self.top+60+i*30))
i+=1
self.character_select.draw( screen, game_self)
def buy_window_handler(self, event, game_self):
if self.character_select.is_visible == True:
self.character_select.character_select_handler( event, game_self)
return
#moves back to shop
if event.type == KEYDOWN and event.key == K_x:
game_self.cancel_se.play()
self.menu = 0
self.page = 0
self.is_visible =False
#moves the cursor up
elif event.type == KEYDOWN and event.key == K_UP:
game_self.cursor_se.play()
self.menu -= 1
if self.menu < 0:
self.menu = 0
#moves the cursor down
elif event.type == KEYDOWN and event.key == K_DOWN:
game_self.cursor_se.play()
if len(self.category_item) > self.menu+self.page*10+1:
self.menu += 1
if self.menu > self.MENU_MAX:
self.menu = self.MENU_MAX
#moves the cursor down
elif event.type == KEYDOWN and event.key == K_LEFT:
game_self.cursor_se.play()
if self.page > 0:
self.page-= 1
self.menu = 0
#moves the cursor down
elif event.type == KEYDOWN and event.key == K_RIGHT:
game_self.cursor_se.play()
if len(self.category_item) > (self.page+1)*10:
self.page += 1
self.menu = 0
#select item
elif event.type == KEYDOWN and (event.key == K_z or event.key == K_SPACE or event.key == K_RETURN):
if len(self.category_item) > 0:
game_self.select_se.play()
self.character_select.is_visible = True
class Sell_window(window.Window):
MENU_MAX = 9
def __init__(self, rectangle):
window.Window.__init__(self, rectangle)
self.is_visible = False
self.menu = 0
self.top = rectangle.top
self.left = rectangle.left
self.right = rectangle.right
self.centerx = rectangle.centerx
self.menu_font = pygame.font.Font("ipag.ttf", 20)
self.top_font = self.menu_font.render( u"の持ち物:", True, COLOR_WHITE)
self.sold_item_window = system_notify.Donate_finish_window( Rect(150, 160 ,300, 50), 6)
def draw( self, screen, character):
"""draw the shop window on screen"""
if self.is_visible == False: return
window.Window.draw(self, screen)
name_font = self.menu_font.render( character.name, True, COLOR_WHITE)
screen.blit( name_font, (self.left+20, self.top+20))
screen.blit( self.top_font, (self.left+20+name_font.get_width(), self.top+20))
#draw the box on item selected
if character.items != []:
#draws rectangle on the menu item size of rectangle has width of window rectangle - edge_length*2
#the height depends on the size of font
pygame.draw.rect(screen, COLOR_GLAY, Rect( self.left+4, self.top+55 + 30*self.menu,(self.right-self.left)-8,30), 0)
i = 0
for item in character.items:
item_font = self.menu_font.render( item.name, True, COLOR_WHITE)
screen.blit ( item_font, (self.left+20, self.top+60+i*30))
cost_font = self.menu_font.render( str(item.price/2) + "TG", True, COLOR_WHITE)
screen.blit( cost_font, (self.right-20 - cost_font.get_width(), self.top+60+i*30))
i += 1
self.sold_item_window.draw(screen)
def character_sell_window_handler( self, event, game_self):
if self.sold_item_window.is_visible == True:
self.sold_item_window.donate_finish_window_handler( event, game_self)
return
character = game_self.party.member[game_self.shop.sell_window.menu]
#moves back to shop
if event.type == KEYDOWN and event.key == K_x:
game_self.cancel_se.play()
self.menu = 0
self.is_visible =False
#moves the cursor up
elif event.type == KEYDOWN and event.key == K_UP:
game_self.cursor_se.play()
self.menu -= 1
if self.menu < 0:
self.menu = 0
#moves the cursor down
elif event.type == KEYDOWN and event.key == K_DOWN:
game_self.cursor_se.play()
if len(character.items) > self.menu+1:
self.menu += 1
if self.menu > self.MENU_MAX:
self.menu = self.MENU_MAX
elif event.type == KEYDOWN and (event.key == K_z or event.key == K_SPACE or event.key == K_RETURN):
if len(character.items) > 0:
self.sold_item_window.is_visible = True
money = character.items[self.menu].price
#if not_found is 100, it means that there was no item with that id so add new one
not_found = shop_item_change( character.items[self.menu].id , game_self, 0)
if not_found == 100:
add_new_shop_item( character.items[self.menu].id, game_self )
#delete character's items and adjust money
del character.items[self.menu]
character.money += (money/2)
if self.menu+1 > len(character.items):
self.menu -= 1
class Character_select_window(window.Window):
def __init__(self, rectangle):
window.Window.__init__(self, rectangle)
self.is_visible = False
self.menu = 0
self.top = rectangle.top
self.left = rectangle.left
self.right = rectangle.right
self.centerx = rectangle.centerx
self.menu_font = pygame.font.Font("ipag.ttf", 20)
#if there is no more item left in stock
self.no_more = 0
self.top_font = self.menu_font.render( u"誰が買いますか?", True, COLOR_WHITE)
self.status = character_view.Status_view_window( Rect(20,20,600, 440))
self.buy_window = system_notify.Donate_finish_window( Rect(150, 160 ,300, 50), 4)
self.not_enough_window = system_notify.Donate_finish_window( Rect(150, 160 ,300, 50), 0)
self.too_much_item_window = system_notify.Donate_finish_window( Rect(150, 160 ,300, 50), 5)
self.not_movable = system_notify.Donate_finish_window( Rect(150,160,300,50), system_notify.Donate_finish_window.TEMPLE_NOT_MOVABLE)
def draw(self, screen, game_self):
if self.is_visible == False: return
window.Window.draw(self, screen)
screen.blit( self.top_font, ( self.centerx - self.top_font.get_width()/2 , self.top+20))
#draw the box on item selected
if game_self.party.member != []:
#draws rectangle on the menu item size of rectangle has width of window rectangle - edge_length*2
#the height depends on the size of font
pygame.draw.rect(screen, COLOR_GLAY, Rect( self.left+4, self.top+45 + 30*self.menu,(self.right-self.left)-8,30), 0)
i = 0
for character in game_self.party.member:
character_font = self.menu_font.render( character.name, True, COLOR_WHITE)
screen.blit(character_font, (self.left+20, self.top+50 + 30*i))
money_font = self.menu_font.render( u"所持金:" + str(character.money) + "TG", True, COLOR_WHITE)
screen.blit(money_font, (self.right - 20 - money_font.get_width(), self.top+50 + 30*i))
i+=1
self.status.draw( screen, game_self.party.member)
self.buy_window.draw( screen)
self.not_enough_window.draw(screen)
self.too_much_item_window.draw(screen)
self.not_movable.draw(screen)
def character_select_handler(self, event, game_self):
if self.buy_window.is_visible == True:
self.buy_window.donate_finish_window_handler( event, game_self)
return
elif self.not_enough_window.is_visible == True:
self.not_enough_window.donate_finish_window_handler( event, game_self)
return
elif self.status.is_visible == True:
self.status.status_view_window_handler( game_self, event, None)
return
elif self.too_much_item_window.is_visible == True:
self.too_much_item_window.donate_finish_window_handler( event, game_self)
return
elif self.not_movable.is_visible == True:
self.not_movable.donate_finish_window_handler( event, game_self)
return
length = len(game_self.party.member)-1
#moves back to item window
if event.type == KEYDOWN and event.key == K_x:
game_self.cancel_se.play()
self.menu = 0
self.is_visible =False
#moves the cursor up
elif event.type == KEYDOWN and event.key == K_UP:
game_self.cursor_se.play()
self.menu -= 1
self.status.menu -= 1
if self.menu < 0:
self.menu = length
self.status.menu = length
#moves the cursor down
elif event.type == KEYDOWN and event.key == K_DOWN:
game_self.cursor_se.play()
self.menu += 1
self.status.menu += 1
if self.menu > length:
self.menu = 0
self.status.menu = 0
#status view
elif event.type == KEYDOWN and event.key == K_LSHIFT:
game_self.cursor_se.play()
self.status.is_visible = True
#buy
elif event.type == KEYDOWN and (event.key == K_z or event.key == K_SPACE or event.key == K_RETURN):
game_self.select_se.play()
#get the cost of the item
category_item = game_self.shop.shop_window.buy_window.category_item
item_menu = game_self.shop.shop_window.buy_window.menu
if game_self.party.member[self.menu].status != [0,0,0,0,0,0,0,0,0]:
self.not_movable.is_visible = True
elif game_self.party.member[self.menu].money >= int(game_self.item_data[category_item[item_menu].id][2]) and len(game_self.party.member[self.menu].items) < 10:
game_self.party.member[self.menu].money -= int(game_self.item_data[category_item[item_menu].id][2])
game_self.party.member[self.menu].items.append( item.Item( game_self.item_data[category_item[item_menu].id] ))
self.no_more = shop_item_change( category_item[item_menu].id, game_self, 1)
self.buy_window.is_visible = True
elif game_self.party.member[self.menu].money < int(game_self.item_data[category_item[item_menu].id][2]):
self.not_enough_window.is_visible = True
elif len(game_self.party.member[self.menu].items) == 10:
self.too_much_item_window.is_visible = True
#increase(0) or decrease(1) item
#if no item was found, it returns 100, else it returns 101
def shop_item_change( item_id, game_self, i ):
found = 100
j = 0
k = 0
for item_array in game_self.shop.stock:
for item in item_array:
if i == 1:
if item.id == item_id:
found = 101
#if stock is negative it has infinity stock
if item.stock < 0:
return found
item.stock -= 1
if item.stock == 0:
del game_self.shop.stock[j][k]
game_self.shop.shop_window.buy_window.menu -= 1
if game_self.shop.shop_window.buy_window.menu < 0:
game_self.shop.shop_window.buy_window.menu = 0
return 1
else:
if item.id == item_id:
if item.stock == -1:
return 101
item.stock += 1
found = 101
if item.stock > 99:
item.stock = 99
k += 1
j += 1
k = 0
return found
def add_new_shop_item( item_id, game_self ):
item_id = int(item_id)
new_item = shop.Shop_item( item_id, 1)
if item_id <= 100:
game_self.shop.stock[10].append(new_item)
elif item_id < 150:
game_self.shop.stock[0].append(new_item)
elif item_id < 200:
game_self.shop.stock[1].append(new_item)
elif item_id < 250:
game_self.shop.stock[2].append(new_item)
elif item_id < 300:
game_self.shop.stock[3].append(new_item)
elif item_id < 350:
game_self.shop.stock[4].append(new_item)
elif item_id < 400:
game_self.shop.stock[5].append(new_item)
elif item_id < 500:
game_self.shop.stock[6].append(new_item)
elif item_id < 550:
game_self.shop.stock[7].append(new_item)
elif item_id < 600:
game_self.shop.stock[8].append(new_item)
elif item_id < 700:
game_self.shop.stock[9].append(new_item)
|
999,295 | cb09426509b18d52cad1f2764ae7525c95f1fa5e | from segmentation_models_pytorch.base import (SegmentationHead,
SegmentationModel)
from segmentation_models_pytorch.encoders import get_encoder
from torch import nn
from trainer.start import *
from utils.pretrain import *
from model.common import *
@dataclass
class PylonConfig:
n_in: int
n_out: int
backbone: str = 'resnet50'
weights: str = 'imagenet'
# number of decoding feature maps
n_dec_ch: int = 128
# number of UP modules
n_up: int = 3
# prediction head kernel size
seg_kern_size: int = 1
# whether to use pyramidal attention
use_pa: bool = True
# UP module's conv layers
# '1layer' or '2layer' (default)
up_type: str = '2layer'
# UP module's conv kernel size
up_kernel_size: int = 1
# freeze?
# 'enc' to freeze the encoder
freeze: str = None
# pretraining configs
pretrain_conf: PretrainConfig = None
@property
def name(self):
name = f'pylon-{self.backbone}'
if not self.use_pa:
name += '-nopa'
name += f'-uptype{self.up_type}'
if self.up_kernel_size != 1:
name += f'-upkern{self.up_kernel_size}'
if self.n_up != 3:
name += f'-up{self.n_up}'
if self.weights is not None:
name += f'-{self.weights}'
name += f'-dec{self.n_dec_ch}'
if self.seg_kern_size != 1:
name += f'-segkern{self.seg_kern_size}'
if self.freeze is not None:
name += f'_freeze{self.freeze}'
if self.pretrain_conf is not None:
name += f'_{self.pretrain_conf.name}'
return name
def make_model(self):
return Pylon(self)
class Pylon(nn.Module):
def __init__(self, conf: PylonConfig):
super(Pylon, self).__init__()
self.conf = conf
self.net = PylonCore(backbone=conf.backbone,
n_in=conf.n_in,
n_out=conf.n_out,
weights=conf.weights,
n_dec_ch=conf.n_dec_ch,
use_pa=conf.use_pa,
up_type=conf.up_type,
up_kernel_size=conf.up_kernel_size,
n_up=conf.n_up,
seg_kern_size=conf.seg_kern_size)
self.pool = nn.AdaptiveMaxPool2d(1)
if conf.pretrain_conf is not None:
load_pretrain(conf.pretrain_conf, self)
if conf.freeze is not None:
if conf.freeze == 'enc':
self.net.encoder.requires_grad_(False)
else:
raise NotImplementedError()
def forward(self, img, classification=None, **kwargs):
# enforce float32 is a good idea
# because if the loss function involves a reduction operation
# it would be harmful, this prevents the problem
seg = self.net(img).float()
pred = self.pool(seg)
pred = torch.flatten(pred, start_dim=1)
loss = None
loss_pred = None
loss_bbox = None
if classification is not None:
loss_pred = F.binary_cross_entropy_with_logits(
pred, classification.float())
loss = loss_pred
return ModelReturn(
pred=pred,
pred_seg=seg,
loss=loss,
loss_pred=loss_pred,
loss_bbox=loss_bbox,
)
class PylonCore(SegmentationModel):
def __init__(self,
backbone: str,
n_in: int,
n_out: int,
weights: str = 'imagenet',
n_dec_ch: int = 128,
use_pa: bool = True,
up_type: str = '2layer',
up_kernel_size: int = 1,
n_up: int = 3,
seg_kern_size: int = 1):
super(PylonCore, self).__init__()
self.encoder = get_encoder(
backbone,
in_channels=n_in,
depth=5,
weights=weights,
)
self.decoder = PylonDecoder(
encoder_channels=self.encoder.out_channels,
n_dec_ch=n_dec_ch,
use_pa=use_pa,
up_type=up_type,
up_kernel_size=up_kernel_size,
n_up=n_up,
)
self.segmentation_head = SegmentationHead(in_channels=n_dec_ch,
out_channels=n_out,
activation=None,
kernel_size=seg_kern_size,
upsampling=1)
# just to comply with SegmentationModel
self.classification_head = None
self.initialize()
class PylonDecoder(nn.Module):
"""returns each layer of decoder
"""
def __init__(
self,
encoder_channels,
n_dec_ch: int,
use_pa: bool = True,
up_type: str = '2layer',
up_kernel_size: int = 1,
n_up: int = 3,
upscale_mode: str = 'bilinear',
align_corners=True,
):
super(PylonDecoder, self).__init__()
self.n_up = n_up
self.pa = PA(
in_channels=encoder_channels[-1],
out_channels=n_dec_ch,
align_corners=align_corners,
use_pa=use_pa,
)
kwargs = dict(
out_channels=n_dec_ch,
upscale_mode=upscale_mode,
align_corners=align_corners,
up_type=up_type,
kernel_size=up_kernel_size,
)
if n_up >= 1:
self.up3 = UP(
in_channels=encoder_channels[-2],
**kwargs,
)
if n_up >= 2:
self.up2 = UP(
in_channels=encoder_channels[-3],
**kwargs,
)
if n_up >= 3:
self.up1 = UP(
in_channels=encoder_channels[-4],
**kwargs,
)
def forward(self, *features):
bottleneck = features[-1]
x = self.pa(bottleneck) # 1/32
if self.n_up >= 1:
x = self.up3(features[-2], x) # 1/16
if self.n_up >= 2:
x = self.up2(features[-3], x) # 1/8
if self.n_up >= 3:
x = self.up1(features[-4], x) # 1/4
return x
class PA(nn.Module):
def __init__(
self,
in_channels,
out_channels,
use_pa: bool = True,
upscale_mode='bilinear',
align_corners=True,
):
super(PA, self).__init__()
self.upscale_mode = upscale_mode
self.align_corners = align_corners if upscale_mode == 'bilinear' else None
self.use_pa = use_pa
# middle branch
self.mid = nn.Sequential(
ConvBnRelu(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=1,
stride=1,
padding=0,
))
# pyramid attention branch
if use_pa:
self.down1 = nn.Sequential(
nn.MaxPool2d(kernel_size=2, stride=2),
ConvBnRelu(in_channels=in_channels,
out_channels=1,
kernel_size=7,
stride=1,
padding=3))
self.down2 = nn.Sequential(
nn.MaxPool2d(kernel_size=2, stride=2),
ConvBnRelu(in_channels=1,
out_channels=1,
kernel_size=5,
stride=1,
padding=2))
self.down3 = nn.Sequential(
nn.MaxPool2d(kernel_size=2, stride=2),
ConvBnRelu(in_channels=1,
out_channels=1,
kernel_size=3,
stride=1,
padding=1))
self.conv3 = ConvBnRelu(in_channels=1,
out_channels=1,
kernel_size=3,
stride=1,
padding=1)
self.conv2 = ConvBnRelu(in_channels=1,
out_channels=1,
kernel_size=5,
stride=1,
padding=2)
self.conv1 = ConvBnRelu(in_channels=1,
out_channels=1,
kernel_size=7,
stride=1,
padding=3)
def forward(self, x):
upscale_parameters = dict(mode=self.upscale_mode,
align_corners=self.align_corners)
mid = self.mid(x)
if self.use_pa:
x1 = self.down1(x)
x2 = self.down2(x1)
x3 = self.down3(x2)
x = F.interpolate(self.conv3(x3),
scale_factor=2,
**upscale_parameters)
x = F.interpolate(self.conv2(x2) + x,
scale_factor=2,
**upscale_parameters)
x = F.interpolate(self.conv1(x1) + x,
scale_factor=2,
**upscale_parameters)
x = torch.mul(x, mid)
else:
x = mid
return x
class UP(nn.Module):
def __init__(
self,
in_channels: int,
out_channels: int,
up_type: str = '2layer',
kernel_size: int = 1,
upscale_mode: str = 'bilinear',
align_corners=True,
):
super(UP, self).__init__()
self.upscale_mode = upscale_mode
self.align_corners = align_corners if upscale_mode == 'bilinear' else None
if up_type == '1layer':
self.conv1 = ConvBnRelu(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
padding=kernel_size // 2,
)
elif up_type == '2layer':
self.conv1 = nn.Sequential(
ConvBnRelu(
in_channels=in_channels,
out_channels=in_channels,
kernel_size=kernel_size,
padding=kernel_size // 2,
),
ConvBnRelu(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
padding=kernel_size // 2,
),
)
else:
raise NotImplementedError()
def forward(self, x, y):
"""
Args:
x: low level feature
y: high level feature
"""
h, w = x.size(2), x.size(3)
y_up = F.interpolate(y,
size=(h, w),
mode=self.upscale_mode,
align_corners=self.align_corners)
conv = self.conv1(x)
return y_up + conv
class ConvBnRelu(nn.Module):
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: int,
stride: int = 1,
padding: int = 0,
dilation: int = 1,
groups: int = 1,
add_bn: bool = True,
add_relu: bool = True,
bias: bool = True,
interpolate: bool = False):
super(ConvBnRelu, self).__init__()
self.conv = nn.Conv2d(in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
dilation=dilation,
bias=bias,
groups=groups)
self.add_relu = add_relu
self.add_bn = add_bn
self.interpolate = interpolate
if add_bn:
self.bn = nn.BatchNorm2d(out_channels)
if add_relu:
self.activation = nn.ReLU(inplace=True)
def forward(self, x):
x = self.conv(x)
if self.add_bn:
x = self.bn(x)
if self.add_relu:
x = self.activation(x)
if self.interpolate:
x = F.interpolate(x,
scale_factor=2,
mode='bilinear',
align_corners=True)
return x
|
999,296 | 3b1217ac713d10c9a98874de6c4aba20b1483dca | '''
Created on 8 avr. 2013
utilitaires sur les transformation.
on utilisera les matrices 4*4 de la géometrie projective pour définir les transformation
les utilitaires pour créer les transformations de bases sont déjà prédéfinis dans mathutils.Matrix
pour appliquer une transformation, il faut l'appliquer au champ matrix_world de l'objet
(la matrice locale dépend de la position du repère local de l'objet, matrix_world non)
!!!! NE FONCTIONNE PAS ENCORE !!!!
@author: francois
'''
from mathutils import Matrix
import math
from modP3D.utilsBlender.primitivesGeometriques import boiteXYZ
from modP3D.utilsBlender.selections import dupliqueCopie
def transform(obj,matrixTransform):
obj.matrix_world = matrixTransform * obj.matrix_world
def testTransformations():
b1 = boiteXYZ(-5, -5, 0, 10, 20, 8)
b2 = dupliqueCopie(b1)
trans = Matrix.Translation([10,10,0])
transform(b2, trans)
#rot = Matrix.Rotation(math.radians(45), 4, 'Z')
#transform(b2, rot)
if __name__ == "__main__":
testTransformations()
|
999,297 | b4bab6138e5ca06d35c15e3575ba14d0aaba6ff0 | #! /usr/bin/env python3
import pandas as pd
f_path = 'py/question.txt'
cols = ['序号','题目','A','B','C','D','正确选项']
# rows = [[1,'Title-1','Opt-A','Opt-B','Opt-C','Opt-D','C'],[2,'Title-2','Opt-A','Opt-B','Opt-C','Opt-D','D'],[3,'Title-3','Opt-A','Opt-B','Opt-C','Opt-D','A'],[4,'Title-4','Opt-A','Opt-B','Opt-C','Opt-D','B']]
with open(f_path,'r') as f:
msg = f.read()
linemsg=msg.split('\n')
lst = ['','','','','','','']
rule = {'A':2,'B':3,'C':4,'D':5,}
data = []
for id in range(len(linemsg)):
line = linemsg[id].strip()
if line == '':continue
if line[0] == '试':
lst[-1] = line[-1]
data.append(lst)
lst = ['','','','','','','']
continue
if line[0] in rule:
optidx = rule[line[0]]
lst[optidx] = line[2:]
else:
try:
except :
print(line)
continue
lst[0] = title
lst[1] = content
df = pd.DataFrame(data=data,columns=cols)
df.to_excel('mytest.xlsx')
|
999,298 | 899cf48607acf464717068627fb9f213f661942b | import bng
import pyproj
# Define coordinate systems
wgs84=pyproj.CRS("EPSG:4326") # LatLon with WGS84 datum used by GPS units and Google Earth
osgb36=pyproj.CRS("EPSG:27700") # UK Ordnance Survey, 1936 datum
# Transform
#x, y = bng.to_osgb36('NT2755072950')
x , y = 538890, 177320
res = pyproj.transform(osgb36, wgs84, x, y)
# (55.94410954187127, -3.1615548049941133i)
print(res)
|
999,299 | 6ac97fe12363e50b6cd640eecf901dbe49a9f0b9 | import numpy as np
from scipy import linalg
#===============================================================================
# s_av: mean value for each bin ; s1_av: possible values of s_av
#===============================================================================
def data_binning(s,nbin):
l = len(s)
nbin1 = nbin+1
s_min = np.min(s)
s_max = np.max(s)
s_bin=np.linspace(s_min,s_max,nbin1)
#print('Sbin:',s_bin)
ibin = np.digitize(s, s_bin, right=True)
# set min value to bin 1, not 0:
for i in range(len(ibin)):
if ibin[i] < 1:
ibin[i] = 1
#print('ibin:',ibin)
# mean of each bin
#s1_av = [s[ibin == i].mean() for i in range(1, len(s_bin))]
#s_sum=[s[ibin==i].sum() for i in range(1,nbin1)]
#print(s_sum)
#ibin_count=[list(ibin).count(i) for i in range(1,nbin1)]
#print(ibin_count)
s1_av = np.zeros(nbin)
for i in range(nbin):
s1_av[i]=(s_bin[i+1]+s_bin[i])/2 # mean value
#if ibin_count[i] > 0:
# s1_av[i] = s_sum[i]/ibin_count[i]
#else:
# s1_av[i] = (s_bin[i+1] + s_bin[i])/2 # median value
#print(s1_av)
# set observed value to mean value of each bin
s_av = [s1_av[ibin[i]-1] for i in range(0,l)]
return s_av,s1_av
##==============================================================================
# 2018.07.26: model expectation value of s:
# s[:] processed-sequence, sk[:] possible values of s, w[:] coupling, h0 external local field
##==============================================================================
def model_expectation(s,sk,w,h0):
l = s.shape[0]
nbin = len(sk)
p = np.empty((l,nbin)) ; p1 = np.empty((l,nbin)) ; s_model = np.zeros(l)
for k in range(nbin):
p[0:l,k] = np.exp(sk[k]*(h0+np.matmul(s[0:l,:],w[:])))
p1[0:l,k] = sk[k]*p[0:l,k]
s_model[0:l] = np.sum(p1[0:l,:],axis=1)/np.sum(p[0:l,:],axis=1)
return s_model
##==============================================================================
## 2018.07.26: from s,sbin --> w, h0
##==============================================================================
def fit_interaction(s0,sbin,nloop):
s = s0[:-1]
l,n = s.shape
m = s.mean(axis=0)
ds = s - m
c = np.cov(ds,rowvar=False,bias=True)
c_inv = linalg.inv(c)
dst = ds.T
#--------------------------------
# initial guess
w_ini = np.random.normal(0.0,1./np.sqrt(n),size=(n,n))
h_all = np.matmul(s,w_ini.T)
W = np.empty((n,n)) ; H0 = np.empty(n)
for i0 in range(n):
s1 = s0[1:,i0]
cost = np.full(nloop,100.)
h = h_all[:,i0]
for iloop in range(1,nloop):
h_av = np.mean(h)
hs_av = np.matmul(dst,h-h_av)/l
w = np.matmul(hs_av,c_inv)
h0=h_av-np.sum(w*m)
h = np.matmul(s[:,:],w[:]) + h0
s_model = model_expectation(s,sbin[:,i0],w,h0)
#s_model = np.tanh(h)
#s_model = 0.5*np.tanh(0.5*h)
cost[iloop]=np.mean((s1[:]-s_model[:])**2)
#MSE = np.mean((w[:]-W0[i0,:])**2)
#slope = np.sum(W0[i0,:]*w[:])/np.sum(W0[i0,:]**2)
#print(i0,iloop,cost[iloop]) #,MSE,slope)
if cost[iloop] >= cost[iloop-1]: break
#h = h*s1/s_model
h *= np.divide(s1,s_model, out=np.zeros_like(s1), where=s_model!=0)
W[i0,:] = w[:]
H0[i0] = h0
return W,H0
##==============================================================================
## 2017.07.26: generate sequence
# sk: possible values of s (bin values)
##==============================================================================
def generate_data(w,h0,sk,l):
n = w.shape[0]
nbin = sk.shape[0]
s = np.full((l,n),100.)
# ini config (at t = 0)
for i in range(n):
s[0,i]=sk[np.random.randint(0,nbin),i]
p1=np.empty(nbin)
for t in range(l-1):
for i in range(n):
for k in range(nbin):
p1[k]=np.exp(sk[k,i]*(h0[i]+np.sum(w[i,:]*s[t,:])))
p2=np.sum(p1)
while s[t+1,i] == 100.:
k0=np.random.randint(0,nbin)
p=p1[k0]/p2
if p>np.random.rand():
s[t+1,i]=sk[k0,i]
return s
##==============================================================================
## cij = <delta_si(t+1) delta_sj(t)>
##==============================================================================
def cross_cov(a,b):
da = a - np.mean(a, axis=0)
db = b - np.mean(b, axis=0)
return np.matmul(da.T,db)/a.shape[0]
def convert_binary(s):
l,n = np.shape(s)
s[:,:] = 1.
for t in range(l):
for i in range(n):
if s[t,i] < 0:
s[t,i] = -1.
return s
def rescale(s):
l,n = np.shape(s)
for i in range(n):
s[:,i] = (s[:,i] - np.mean(s[:,i]))
s[:,i] = s[:,i]/np.max(np.abs(s[:,i]))
return s |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.