index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
24,700 | 79d9c818aa632b66ef7d1e2792d45f2b1945d18a | from bot.server_models.ticket import Ticket
class TicketPagination:
def __init__(self):
self.tickets = []
self.index = {}
self.massage_id = {}
def set(self, concerts: [], chat_id, massage_id):
self.tickets.clear()
for c in concerts:
self.tickets.append(Ticket(c))
self.index[chat_id] = 0
self.massage_id[chat_id] = massage_id
def current(self, chat_id):
if len(self.tickets) > 0:
return self.tickets[self.index[chat_id]]
return None
def next(self, chat_id):
if len(self.tickets) > 0:
self.index[chat_id] += 1
self.index[chat_id] %= len(self.tickets)
return self.tickets[self.index[chat_id]]
return None
def prev(self, chat_id):
if len(self.tickets) > 0:
self.index[chat_id] -= 1
self.index[chat_id] %= len(self.tickets)
return self.tickets[self.index[chat_id]]
return None
|
24,701 | e265406f8c558600b45def988b1b4b3e9f1072e8 | import asyncio
import sys
from exchanges.models import Exchange
from exchanges_crawler.crawlers.bitbay_crawler import BitBayCrawler
from exchanges_crawler.crawlers.cexio_crawler import CexioCrawler
from exchanges_crawler.crawlers.bitstamp_crawler import BitstampCrawler
from exchanges_crawler.crawlers.kraken_crawler import KrakenCrawler
from exchanges_crawler.crawlers.fiat_crawler import FiatCrawler
def init_crawlers():
exchanges = Exchange.objects.filter(active=True).all()
crawlers = []
for exchange in exchanges:
try:
crawler_class = eval(exchange.name + 'Crawler')
except NameError:
continue
crawlers.append(crawler_class(exchange))
return crawlers
# Add to settings.py
# CRONJOBS = [
# ('0 */6 * * *', 'exchanges_crawler.runner.update_fiats')
# ]
def update_fiats_runner():
exchange = Exchange.objects.filter(name="Fiat").first()
crawlers = []
if exchange:
crawlers.append(FiatCrawler(exchange))
_update_tickers(crawlers)
# Add to settings.py
# CRONJOBS = [
# ('*/3 * * * *', 'exchanges_crawler.runner.update_tickers_runner')
# ]
def update_tickers_runner():
crawlers = init_crawlers()
_update_tickers(crawlers)
# Add to settings.py
# CRONJOBS = [
# ('*/3 * * * *', 'exchanges_crawler.runner.update_orderbooks_runner')
# ]
def update_orderbooks_runner():
crawlers = init_crawlers()
_update_orderbooks(crawlers)
# Add to settings.py
# CRONJOBS = [
# ('*/3 * * * *', 'exchanges_crawler.runner.update_all_runner')
# ]
def update_all_runner():
crawlers = init_crawlers()
_update_orderbooks(crawlers)
_update_tickers(crawlers)
def _update_orderbooks(crawlers):
if sys.platform == 'win32':
loop = asyncio.ProactorEventLoop()
else:
loop = asyncio.new_event_loop()
tasks = []
for c_idx in range(len(crawlers)):
tasks.append(crawlers[c_idx].get_orderbooks())
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(asyncio.gather(*tasks))
finally:
loop.close()
def _update_tickers(crawlers):
if sys.platform == 'win32':
loop = asyncio.ProactorEventLoop()
else:
loop = asyncio.new_event_loop()
tasks = []
for c_idx in range(len(crawlers)):
tasks.append(crawlers[c_idx].get_tickers())
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(asyncio.gather(*tasks))
finally:
loop.close() |
24,702 | 2749d2ae5f7b0bd1d98b2365918f0a14e93e7fe7 | import pytest
@pytest.mark.parametrize("input,expected", [
("3+5", 8),
("2+4", 6),
pytest.mark.xfail(("6*9", 42)),
])
def test_eval(input, expected):
assert eval(input) == expected |
24,703 | bc9728aee965c8ae9e3905181af9113c833f2ce9 | # /usr/bin/env python3
from pathlib import Path
import random
import shutil
def process_dir(input_dir, output_dir, percent = 0.7):
ip = Path(input_dir)
op = Path(output_dir)
print("Deleting " + op.as_posix())
shutil.rmtree(op.as_posix(), ignore_errors=True)
for category in ip.iterdir():
output_path = (op / 'train' / category.name).mkdir(parents=True, exist_ok=True)
output_path = (op / 'validation' / category.name).mkdir(parents=True, exist_ok=True)
for category in ip.iterdir():
print("Copying category: {}".format(category.as_posix()).ljust(50), end='')
train_imgs = 0
validation_imgs = 0
for img in category.iterdir():
if random.random() < percent:
output_img = op / 'train' / category.name / img.name
train_imgs += 1
else:
output_img = op / 'validation' / category.name / img.name
validation_imgs += 1
shutil.copy(img.as_posix(), output_img.as_posix())
print("=> train({}), validation({})".format(train_imgs, validation_imgs))
if __name__ == '__main__':
process_dir('imgs/processed', 'imgs/splitted')
|
24,704 | 6a10268d2d92e94806e1c1ee9429550e39bf264b | import cv2
from picamera.array import PiRGBArray
from picamera import PiCamera
import time
import RPi.GPIO as GPIO # for the sensors
# # SENSOR SETUP
# servo_x_pin =
# servo_y_pin =
# sonic_trigger_pin =
# sonic_echo_pin =
def setup():
global servo_X # Do I need these glovbla, what should the logic be here?
global servo_Y
GPIO.setmode(GPIO.BOARD) # Numbers GPIOs by physical location
GPIO.setup(servo_x_pin, GPIO.OUT)
GPIO.setup(servo_y_pin, GPIO.OUT)
GPIO.setup(sonic_trigger_pin, GPIO.OUT)
GPIO.setup(sonic_echo, GPIO.IN)
GPIO.output(servo_x_pin, GPIO.LOW)
GPIO.output(servo_y_pin, GPIO.LOW)
servo_X = GPIO.PWM(servo_x_pin, 50) # set Frequecy of pulse to 50Hz
servo_Y = GPIO.PWM(servo_y_pin, 50)
servo_X.start(0)
servo_Y.start(0)
def servo_signal_wavelength(angle,
servo_max_duty=12.5,
servo_min_duty=2.5,
max_angle=180,
min_angle=0):
return (servo_max_duty - servo_min_duty)*(angle-min_angle) / (max_angle - min_angle) + servo_min_duty
def servo_move(angle, servo): # make the servo rotate to specific angle (0-180 degrees)
if(angle<0):
angle = 0
elif(angle > 180):
angle = 180
servo.ChangeDutyCycle(servo_signal_wavelength(angle)) # Changes the duty cycle to direct servo
def destroy():
video_stream.release()
cv2.destroyAllWindows()
servo_X.stop()
servo_Y.stop()
GPIO.cleanup()
def servo_angle_to_pulse_ms(angle, servo_max_duty=12.5, servo_min_duty=2.5, max_angle=180, min_angle=180):
return ((servo_max_duty-servo_min_duty)*(angle-min_angle))/(max_angle - min_angle) + servo_min_duty
def locate_faces(frame):
# Converting to grayscale
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# Detecting faces
faces = face_classifier.detectMultiScale(
gray,
scaleFactor=1.1,
minNeighbors=10,
minSize=(30, 30)
)
return faces
def draw_box(frame, faces):
# Drawing rectangles around found faces
for (x,y,w,h) in faces:
cv2.rectangle(frame, (x,y), (x+h,y+w), (0,0,255), 2)
return '{} rectangles drawn on frame'.format(len(faces))
def count_people(faces):
return len(faces)
def move_back_to_centre(faces, frame):
height, width, _ = frame.shape
x,y,h,w = faces[0]
mid_x, mid_y = x+ (h//2), y + (w//2)
rel_x, rel_y = mid_x - (width//2) , mid_y - (height//2)
return rel_x, rel_y
def map(value, fromLow, fromHigh, toLow, toHigh):
return (toHigh - toLow) * (value - fromLow) / (fromHigh - fromLow) + toLow
def servo_setup():
servoPin = 12
global p
GPIO.setmode(GPIO.BOARD) # Numbers GPIOs by physical location
GPIO.setup(servoPin, GPIO.OUT) # Set servoPin's mode is output
GPIO.output(servoPin, GPIO.LOW) # Set servoPin to low
p = GPIO.PWM(servoPin, 50) # set Frequece to 50Hz
p.start(0) # Duty Cycle = 0
def servo_move(
angle): # make the servo rotate to specific angle (0-180 degrees)
if (angle < 0):
angle = 0
elif (angle > 180):
angle = 180
p.ChangeDutyCycle(map(angle, 0, 180, SERVO_MIN_DUTY,
SERVO_MAX_DUTY)) # map the angle to duty cycle and output it
def loop():
while True:
for dc in range(0, 181, 1): # make servo rotate from 0 to 180 deg
servoWrite(dc) # Write to servo
time.sleep(0.001)
time.sleep(0.5)
for dc in range(180, -1, -1): # make servo rotate from 180 to 0 deg
servoWrite(dc)
time.sleep(0.001)
time.sleep(0.5)
def destroy(video_stream):
p.stop()
# STOP CAMERA AND CV2
cv2.destroyAllWindows()
GPIO.cleanup()
video_stream.close()
def setup_cam()
# setting up the camera feed
camera = PiCamera()
# TODO: look up the cost payoffs for different resolutions and framerates. Maybe do some profiling...
camera.resolution = (640, 480)
camera.framerate = 32
rawCapture = PiRGBArray(camera, size=(640, 480))
# small delay so the camera to warmup, TODO: find out why we need this line.
time.sleep(0.1)
# TODO: find the correct locations of the cascade filters on the rpi
face_cascade_path = 'cascade_filters/haarcascade_frontalface_default.xml'
face_classifier = cv2.CascadeClassifier(face_cascade_path)
video_stream = cv2.VideoCapture(0)
while True:
# Taking frames from the live stream as images. return_code can tell us wether we have ran out of frames ect... But is not used with live feeds
return_code, frame = video_stream.read()
# run cascading algorithm and search for face positonal co-ordinates
for frame in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True):
# grab the raw NumPy array representing the image, then initialize the timestamp
# and occupied/unoccupied text
image = frame.array
faces = locate_faces(image)
# then we want to move the servo's based on this TODO: how do servos move?
draw_box(image, faces)
# Displaying the augmented feed, flipping in the horizontal axis to make the display seem like a mirror
cv2.imshow('LIVE FEED', cv2.flip(image,1))
key = cv2.waitKey(1)
if len(faces) == 1:
move_x, move_y = move_back_to_centre(faces, image)
print(move_x, move_y)
if key & 0xFF == 27: # here 27 represents the esc key
break
if __name__ == '__main__': #Program start from here
print('STARTING PROGRAM...')
setup()
print('SETUP COMPLETE...')
try:
loop()
except KeyboardInterrupt: # When 'Ctrl+C' is pressed, the child program destroy() will be executed.
destroy()
|
24,705 | a4abddd1386a856eb477dc2411de55e6573e84c9 | """
A module for CI String Addressing
"""
from scipy.special import comb
import numpy as np
import re
def addressing_array(ne,no):
"""Generate and return the addrssing array for given number of
orbitals no and number of electron ne
Args:
ne: int, number of orbitals
no: int, number of electrons
Returns:
Z: numpy 2d array with shape (ne,no), addressing array.
"""
assert no >= ne and ne >= 0
if ne == 0: return None
Z = np.zeros(shape=(ne,no), dtype=np.int)
for k in range(1,ne):
for l in range(k,no-ne+k+1):
for m in range(no-l+1, no-k+1):
#Z[k-1][l-1] += binom(m, ne-k) - binom(m-1, ne-k-1)
Z[k-1][l-1] += comb(m-1, ne-k)
for l in range(ne,no+1):
Z[ne-1][l-1] = l - ne
return Z
def addressing_single_graph(config, Z):
"""
addressing strings within single CAS-like graph
(with all possible combinations of electron occupying orbitals)
"""
assert isinstance(config,str)
assert re.search(r'[^01]',config) is None
assert len(config) == Z.shape[1]
addr = 1
ie = 0
for io in range(len(config)):
ie += int(config[io])
if config[io] == '1':
addr += Z[ie-1][io]
return addr
def de_addressing_array(Z):
"""Generate and return the deaddrssing array
for an addressing array
"""
assert len(Z.shape) == 2
if(Z.shape[0] == 1): return Z.copy()
Zd = np.zeros(Z.shape, dtype=np.int)
Zd[-1] = Z[-1]
for i in range(-2,-Z.shape[0]-1,-1):
Zd[i] = np.roll(Zd[i+1],-1)+Z[i]
return Zd
def de_addressing_single_graph(addr, Z, Zd=None):
"""
deaddressing address within single CAS-like graph
(with all possible combinations of electron occupying orbitals)
"""
assert isinstance(addr,int) or isinstance(addr,np.int64)
assert addr > 0
if Zd is None:
Zd = de_addressing_array(Z)
else:
assert Z.shape == Zd.shape
config = ['0']*Z.shape[1]
io_search = [Z.shape[1]-Z.shape[0], -1] # great to small
addr_p = addr - 1
for ie in range(Z.shape[0]):
for io in range(*io_search,-1):
if addr_p >= Zd[ie][io]:
config[io] = '1'
addr_p -= Z[ie][io]
io_search[0] +=1
io_search[1] = io
break
return "".join(config)
|
24,706 | b1fb0f41a43379e8c646ba57392eb4e0e2088320 | from setuptools import setup
setup(
name='dppPackage',
version='0.1',
packages=['package'],
license='MIT',
description='Moja biblioteka',
#long_description=open('README.txt').read(),
url='https://git.e-science.pl/mharazin238652/ShoppingListPythonLibrary.git',
author='Mateusz Harazin',
author_email='mharazin238652@e-science.pl'
) |
24,707 | f39083c815492b8f522254857e81ff6d4902b950 | import nltk
from nltk.stem.lancaster import LancasterStemmer
stemmer = LancasterStemmer()
import numpy
import tflearn
import tensorflow
import random
import json
with open("intents.json") as file:
data = json.load(file)
print(data["intents"]) |
24,708 | e1c6993246d30b83a47a0c87792d996dd6e64cfc | import numpy as np
from base_type import FileName
from cpath import pjoin, output_path
from data_generator.tokenizer_wo_tf import get_tokenizer
from misc.show_checkpoint_vars import load_checkpoint_vars
def compare_before_after():
tokenizer = get_tokenizer()
ids = tokenizer.convert_tokens_to_ids(tokenizer.tokenize("heavy metal"))
dir_name = pjoin(pjoin(output_path, FileName("model")), FileName("alt_emb_heavy_metal_D"))
before = pjoin(dir_name, FileName("model.ckpt-0"))
after = pjoin(dir_name, FileName("model.ckpt-10000"))
v1_d = load_checkpoint_vars(before)
v2_d = load_checkpoint_vars(after)
for key in v1_d :
if key in v2_d:
s = np.sum(v1_d[key] - v2_d[key])
if np.abs(s) > 0.01:
print(key, s)
ori_emb = v2_d['bert/embeddings/word_embeddings']
alt_emb_before = v1_d['bert/embeddings/word_embeddings_alt']
alt_emb_after = v2_d['bert/embeddings/word_embeddings_alt']
def show_diff_from_ori(token_id):
diff = np.sum(np.abs(ori_emb[token_id] - alt_emb_after[token_id]))
print(token_id, diff)
def show_diff_from_step0(token_id):
diff = np.sum(np.abs(alt_emb_before[token_id] - alt_emb_after[token_id]))
print(token_id, diff)
print("Diff against original embedding")
print("Target words")
for token_id in ids:
show_diff_from_ori(token_id)
print("Random words")
for token_id in [321, 598, 5854]:
show_diff_from_ori(token_id)
print("Diff against step0 random init embedding")
print("Target words")
for token_id in range(0, 30000):
diff = np.sum(np.abs(alt_emb_before[token_id] - alt_emb_after[token_id]))
if diff > 0.001:
print(token_id, diff)
if __name__ == "__main__":
compare_before_after() |
24,709 | 4f167aecc4c678119c6c413a49d421303a035b51 | flag = 1
while( flag ):
try:
r,w,l=map(int,input().split(' '))
if(r == 0):
flag=0
break
if(4*r**2>=w**2+l**2):
print('Pizza {0} fits on the table.'.format(flag))
else:
print('Pizza {0} does not fit on the table.'.format(flag))
flag+=1
except:
print('error')
|
24,710 | 04146cbef8c2cdd5457e55d879ab05d005fde6eb | import sys
# note, stacks push/pop from the back
operator_stack = []
operand_stack = []
def push_operand(v):
global operand_stack
operand_stack += [v]
def push_operator(v):
global operator_stack
operator_stack += [v]
def peek_operator():
global operator_stack
if not operator_stack:
return None
return operator_stack[-1]
def pop_operand():
return operand_stack.pop()
def pop_operator():
return operator_stack.pop()
def op_plus():
push_operand(pop_operand() + pop_operand())
def op_sub():
b = pop_operand()
a = pop_operand()
push_operand(a - b)
def op_mul():
push_operand(pop_operand() * pop_operand())
def op_div():
b = pop_operand()
a = pop_operand()
push_operand(a / b)
ops = {
'+': {'prio': 1, 'fn': op_plus},
'-': {'prio': 1, 'fn': op_sub},
'*': {'prio': 2, 'fn': op_mul},
'/': {'prio': 2, 'fn': op_div},
}
def apply_op(op):
ops[op]['fn']()
def funky():
push_operand(pop_operand() + pop_operand())
def shunting(input):
skip = 0
for x in input:
if skip > 0:
skip -= 1
continue
try:
operand = int(x)
push_operand(operand)
except ValueError:
if x == '(':
# push paren
push_operator(x)
elif x == ')':
# right paren. apply ops until we hit left paren
while True:
op = pop_operator()
if op == '(':
break
elif op == '[':
# start of function call
funky()
break
apply_op(op)
elif x in ops:
# apply any operators with higher prio
prio = ops[x]['prio']
while True:
op = peek_operator()
if op in ops and ops[op]['prio'] >= prio:
apply_op(pop_operator())
else:
break
push_operator(x)
elif x == ',':
while True:
op = pop_operator()
if op == '[':
break
apply_op(op)
else:
# function call
# push_operator(x)
push_operator('[')
skip = 1
# print 'x: %r, operand: %r, operators: %r' % (
# x, operand_stack, operator_stack)
input = sys.argv[1]
shunting(input)
while peek_operator():
apply_op(pop_operator())
print pop_operand()
|
24,711 | 0af8fd88df99a1f876b221fece823a4214f42971 | '''
QUESTION -
-----------
Given an array of size n, find the second maximum element in the list.
Sample Input - [9,2,3,6]
Output - 6
APPROACH 1 - With Sorting
--------------------------
1. Find the length of the list.
2. Sort the given list.
3. If a list consist of max number occuring more than once, start iterating through the list from the end.
i. if the value at current index is equal to the value at previous index:
- decrement the index pointer.
ii. if both values are different, then return list[i-1]
TIME COMPLEXITY - O(NlogN)
APPROACH 2 - Without Sorting (Double Traversal)
------------------------------------------------
1. Initialize max_num and second_max_num to lowest number possible.
2. Iterate through the list to find the maximum number.
3. Iterate through the list 2nd time:
i. check whether the number is not equal to the max number.
ii. If not, then compare with the initialized second_max_number and update the value with each iteration.
4. Return second_max_number.
TIME COMPLEXITY - O(N)
APPROACH 3 - Without Sorting (Single Traversal)
------------------------------------------------
1. Initialize max_num and second_max_num to lowest number possible.
2. Iterate through the list:
i. if the element is greater than the max_num:
- then update the max_num = element
- update the second_max_num = max_num
ii. if the element is smaller than the max_num:
- check whether the element is greater than the second_max_num and not equal to the max_num.
- If yes, then update the second_max_num.
TIME COMPLEXITY - O(N)
'''
class Solution:
def secondMaxValueWithSorting(self, list):
length = len(list)
if not list or length == 1: # These are edge cases.
return None
list.sort()
# Iterating through the list to find the 2nd max value in case the max number is occuring more than once in the list.
for i in range(length-1, -1, -1):
if list[i] == list[i-1]:
i = i-1
else:
return list[i-1]
# return list[-2]
def secondMaxDoubleTraversal(self, list):
length = len(list)
if not list or length == 1: # Testing edge cases.
return None
max_num = float('-inf')
second_max_num = float('-inf')
for num in list: # Iterating through the list to find the max number.
if num > max_num:
max_num = num
for i in range(length): # Iterating through the list to find the second max number which is not equal to the max number.
if list[i] == max_num:
try:
i += 1
except:
return second_max_num
else:
if list[i] > second_max_num:
second_max_num = list[i]
i += 1
return second_max_num
def secondMaxSingleTraversal(self, list):
max_num = float('-inf')
second_max_num = float('-inf')
if not list or len(list) == 1: # Testing edge case.
return None
for i in range(len(list)):
if list[i] > max_num:
second_max_num = max_num
max_num = list[i]
elif (list[i] > second_max_num) and (list[i] != max_num):
second_max_num = list[i]
return second_max_num
if __name__ == '__main__':
s = Solution()
print(s.secondMaxValueWithSorting([9,9,-2,0,6]))
print(s.secondMaxDoubleTraversal([1,-2,0,6,10,10]))
print(s.secondMaxDoubleTraversal([1]))
print(s.secondMaxSingleTraversal([1,-2,0,6,10,10]))
print(s.secondMaxSingleTraversal([1]))
|
24,712 | 60607c50ec3a1c6915d3a5d4732b40cd3fb68561 | from collections import defaultdict
from Tool import *
from ImagePack import *
import pygame
from pygame.locals import *
# status
# GROUND
# JUMPED: persists only 1 frame and turns into AIR status.
# AIR
# LADDER: on ladder, rope, etc. doesn't fall.
# CLIMB: climbing terrains on sides.
# ACT
statusEnum = enum('GROUND', 'JUMPED', 'AIR', 'LADDER', 'CLIMB', 'ACT')
# sprite: superclass
class Sprite(object):
def __init__(self, x=0, y=0, name='__dummy'):
# set member variables
self.initGenerals(x, y, name)
self.initProperties()
self.initDependencies()
def initGenerals(self, x, y, name):
self.xspeed = 0 # 0: not moving, 1: to right, -1: to left
self.yspeed = 0
self.collideTerrain = [None]*4
self.status = statusEnum.AIR
self.step = 0
self.xadd = 0 # additional coordinate offset values
self.yadd = 0
# coordinate and mask(vulnerable area) size
self.x = float(x)
self.y = float(y)
self.rect = Rect((x, y), (32, 32))
self.xflip = False # False: leftward, True: rightward
# variables that have default values
# those can be set on initProperties functions
self.gravity = 0 # accelerating vertical speed by gravity
self.jumpPower = 0
self.name = name # common perfix of image names
self.imgName = []
self.imgName.append('')
self.imgList = defaultdict(list)
def initProperties(self):
pass
def initDependencies(self, prefix=''):
self.prevRect = self.rect
for key in self.imgName:
if key == '':
self.imgList[key] = ImagePack.img[prefix + self.name]
else:
self.imgList[key] = ImagePack.img[prefix + self.name + '_' + key]
def setCoord(self, x, y):
self.rect.topleft = (x, y)
def draw(self):
ImagePack.draw(DrawOption(self.rect, self.imgList['']))
def update(self):
# sprite horizontal moving
self.rect.x += self.xspeed + self.xadd
# sprite vertical moving by gravity
self.rect.y += self.yspeed + self.yadd
def updatePreorder(self):
self.prevRect = self.rect.copy()
self.collideTerrain = [None]*4
self.xadd = self.yadd = 0
def updatePostorder(self):
self.step += 1
# renew rect's coordinate (float to int) (f*ck... why Rect doesn't apply float?) |
24,713 | 0f99440ad73027c7608332d41d90091b932747af | import json
import sys
import pytest
if sys.version_info >= (3, 7):
from importlib.resources import contents, read_text
else:
from importlib_resources import contents, read_text
def test_analysis_jobs_are_valid_json():
for job_name in contents('cartography.data.jobs.analysis'):
if not job_name.endswith('.json'):
continue
blob = read_text('cartography.data.jobs.analysis', job_name)
try:
json.loads(blob)
except Exception as e:
pytest.fail(f"json.loads failed for analysis job '{job_name}' with exception: {e}")
def test_cleanup_jobs_are_valid_json():
for job_name in contents('cartography.data.jobs.cleanup'):
if not job_name.endswith('json'):
continue
blob = read_text('cartography.data.jobs.cleanup', job_name)
try:
json.loads(blob)
except Exception as e:
pytest.fail(f"json.loads failed for cleanup job '{job_name}' with exception: {e}")
|
24,714 | f6545364720d14171e06d6938bf9f35bf9174011 | # -*- coding: utf-8 -*-
"""Specification Logging Configuration"""
import os
import pathlib
import re
import shutil
import sys
from ocrd_utils import pushd_popd
from ocrd_utils.logging import (
initLogging,
getLogger
)
import pytest
sys.path.append(os.path.dirname(os.path.realpath(__file__)) + '/../ocrd')
TEST_ROOT = pathlib.Path(os.path.dirname(os.path.abspath(__file__))).parent
@pytest.fixture(name="logging_conf")
def _fixture_loggin_conf(tmpdir):
path_logging_conf_orig = os.path.join(
str(TEST_ROOT), 'ocrd_utils', 'ocrd_logging.conf')
path_logging_conf_dest = os.path.join(str(tmpdir), 'ocrd_logging.conf')
shutil.copy(path_logging_conf_orig, path_logging_conf_dest)
return str(tmpdir)
def test_configured_dateformat(logging_conf, capsys):
"""Ensure example ocrd_logging.conf is valid and produces desired record format"""
# arrange
with pushd_popd(logging_conf):
initLogging()
test_logger = getLogger('')
# act
test_logger.info("test logger initialized")
log_info_output = capsys.readouterr().out
must_not_match = r"^\d{4}-\d{2}-\d{2}.*"
assert not re.match(must_not_match, log_info_output)
match_pattern = r"^\d{2}:\d{2}:\d{2}.*"
assert re.match(match_pattern, log_info_output)
def test_configured_tensorflow_logger_present(logging_conf, capsys):
"""Ensure example ocrd_logging.conf is valid and contains logger tensorflow"""
# arrange
os.chdir(logging_conf)
initLogging()
logger_under_test = getLogger('tensorflow')
# act info
logger_under_test.info("tensorflow logger initialized")
log_info_output = capsys.readouterr().out
assert not log_info_output
# act error
logger_under_test.error("tensorflow has error")
log_error_output = capsys.readouterr().out
assert log_error_output
def test_configured_shapely_logger_present(logging_conf, capsys):
"""Ensure example ocrd_logging.conf is valid and contains logger shapely.geos"""
# arrange
os.chdir(logging_conf)
initLogging()
logger_under_test = getLogger('shapely.geos')
# act info
logger_under_test.info("shapely.geos logger initialized")
log_info_output = capsys.readouterr().out
assert not log_info_output
# act error
logger_under_test.error("shapely alert")
log_error_output = capsys.readouterr().out
assert log_error_output
|
24,715 | edd1d2e761454d33dbaa0c882d4cf58c815bb00d | #!/usr/bin/env python
import os
import sys
import tempfile
import shutil
import datetime
import h5py as H5
from numpy import *
from optparse import OptionParser
from dmtk.io.cmph5.CmpH5SortingTools import *
from dmtk.io.cmph5 import CmpH5Factory
__VERSION__ = ".64"
def __pathExists(h5, path):
try:
h5[path]
return True
except Exception, E:
return False
def __breakInBlocks(size, nProc):
bSize = [size / nProc] * nProc
bSize[0] = (bSize[0] + (size - sum(bSize)))
starts = concatenate(([0], cumsum(bSize)[:-1]))
ends = cumsum(bSize)
return(zip(starts, ends))
def __repackDataArrays(cH5, format, log):
"""
Flatten read groups according to an indexed cmp.h5 file.
"""
SORTED = "Sorted"
alnGroups = [x for x in cH5[format.ALN_GROUP_PATH]]
pulseDatasets = [cH5[x].keys() for x in alnGroups]
uPulseDatasets = reduce(lambda x,y: set.union(set(x), set(y)), pulseDatasets)
if (not all(map(lambda x : set(x) == uPulseDatasets, pulseDatasets))):
log.error("All alignment groups need to have the same datasets.")
raise Exception("Can only repack cmp.h5 files with consistent datasets across alignment groups.")
readGroupPaths = dict(zip(cH5[format.ALN_GROUP_ID], [ x for x in cH5[format.ALN_GROUP_PATH]]))
refGroupPaths = dict(zip(cH5[format.REF_GROUP_ID], [ x for x in cH5[format.REF_GROUP_PATH]]))
uPDAndType = dict(zip(uPulseDatasets, [ cH5[readGroupPaths.values()[0]][z].dtype for z in uPulseDatasets ]))
def getDataset(read, ds):
return(cH5[readGroupPaths[read[format.ALN_ID]]][ds])
def getRefGroup(gID):
return(cH5[refGroupPaths[gID]])
offsets = cH5[format.REF_OFFSET_TABLE].value
sAI = cH5[format.ALN_INDEX]
orderedRefPaths = [""] * offsets.shape[0]
for row in xrange(0, offsets.shape[0]):
log.msg("Processing reference group: %d of %d" % (row + 1, offsets.shape[0]))
orderedRefPaths[row] = "/".join([getRefGroup(offsets[row, 0]).name, SORTED])
fRow = int(offsets[row, 1])
lRow = int(offsets[row, 2])
## Don't really have to do anything if there are no references
## which aligned.
if (lRow == fRow):
continue
## Make a new Group.
newGroup = getRefGroup(offsets[row, 0]).create_group(SORTED)
log.msg("Created new read group: %s" % SORTED)
## Go through each read and write it into the new vector.
reads = sAI[fRow:lRow, ]
totalSizes = reads[:, format.OFFSET_END] - reads[:, format.OFFSET_BEGIN]
for pulseDataset in uPulseDatasets:
log.msg("Processing dataset: %s" % pulseDataset)
newDS = array([0]*sum(1 + totalSizes), dtype = uPDAndType[pulseDataset])
currentStart = 0
for readIdx in xrange(0, reads.shape[0]):
read = reads[readIdx, ]
gStart, gEnd = currentStart, currentStart + totalSizes[readIdx]
newDS[gStart:gEnd] = getDataset(read, pulseDataset)[read[format.OFFSET_BEGIN]:read[format.OFFSET_END]]
currentStart = gEnd + 1
newGroup.create_dataset(pulseDataset, data = newDS, dtype = uPDAndType[pulseDataset], maxshape = None)
## After we've moved all of the data we can move the offsets.
currentStart = 0
for i in xrange(0, reads.shape[0]):
reads[i, format.OFFSET_BEGIN] = currentStart
reads[i, format.OFFSET_END] = currentStart + totalSizes[i]
reads[i, format.ALN_ID] = row
currentStart = reads[i, format.OFFSET_END] + 1
sAI[fRow:lRow,] = reads
## Now remake the AlnGroup Dataset.
log.msg("Writing new AlnGroupPath values.")
del(cH5[format.ALN_GROUP_PATH])
del(cH5[format.ALN_GROUP_ID])
cH5.create_dataset(format.ALN_GROUP_PATH, data = orderedRefPaths,
dtype = H5.new_vlen(str), maxshape = None)
cH5.create_dataset(format.ALN_GROUP_ID, data = range(0, offsets.shape[0]),
dtype = "int32", maxshape = None)
for rg in readGroupPaths.values():
del(cH5[rg])
def sortCmpH5(inFile, outFile, deep, jobs, log):
"""
This routine takes a cmp.h5 file and sorts the AlignmentIndex
table adding two additional columns for fast access. In addition,
a new top-level attribute is added to the indicate that the file
has been sorted, as well as a table to indicate the blocks of the
alignment index associated with each reference group.
"""
success = False;
if (outFile):
log.msg("Copying: " + inFile + " to " + outFile)
shutil.copyfile(inFile, outFile)
inFile = outFile
try:
cH5 = H5.File(inFile, 'a')
format = CmpH5Format(cH5)
log.msg("Read cmp.h5 with version %s" % format.VERSION)
aI = cH5[format.ALN_INDEX]
originalAttrs = aI.attrs.items()
## empty is a special case. In general, h5py handles
## zero-length slices poorly and therefore I don't want to
## make them. Therefore, I maintain the 'empty' variable to
## indicate that. This makes some code less pleasing, e.g.,
## computing the reference index data structure.
if (aI.shape[0] == 0):
log.warn("Warning: %s empty!" % inFile)
success = True;
return True;
# sort the AlignmentIndex
aord = lexsort([aI[:,format.TARGET_END], aI[:,format.TARGET_START],
aI[:,format.REF_ID]])
assert(len(aord) == aI.shape[0])
sAI = aI.value[aord,:]
del(aI)
log.msg("Sorted AlignmentIndex.")
# construct reference offset datastructure.
refSeqIDs = cH5[format.REF_GROUP_ID]
offsets = computeRefIndexTable(refSeqIDs.value, sAI[:,format.REF_ID])
log.msg("Constructed offset datastructure.")
# fill overlap and back columns.
for row in range(0, offsets.shape[0]):
fRow = int(offsets[row, 1])
lRow = int(offsets[row, 2])
if (lRow - fRow <= 0):
continue
sAI[fRow:lRow, (format.N_BACK, format.N_OVERLAP)] = \
computeIndicesDP(sAI[fRow:lRow, format.TARGET_START],
sAI[fRow:lRow, format.TARGET_END])
log.msg("Constructed indices.")
# modify the cmp.h5 file.
# We want to keep the chunking info on the dataset.
del(cH5[format.ALN_INDEX])
cH5.create_dataset(format.ALN_INDEX, data = sAI, dtype = h5t.NATIVE_UINT32,
maxshape = (None, None))
## If the file is already sorted there's no harm in resorting.
if (__pathExists(cH5, format.REF_OFFSET_TABLE)):
log.msg(format.REF_OFFSET_TABLE + " already exists, deleting.")
del(cH5[format.REF_OFFSET_TABLE])
## create the offset datastructure in the file.
cH5.create_dataset(format.REF_OFFSET_TABLE, data = offsets,
dtype = h5t.NATIVE_UINT32, maxshape = (None, None))
## add the index attribute.
cH5['/'].attrs.create("Index", ['REF_ID', 'TARGET_START', 'TARGET_END'])
## fixup attributes.
for oA in originalAttrs:
cH5[format.ALN_INDEX].attrs.create(oA[0], oA[1])
## deep repacking.
if (deep):
log.msg("Repacking alignment arrays.")
__repackDataArrays(cH5, format, log)
## memory free.
del sAI
## manage any extra datasets.
for extraTable in format.extraTables:
if (__pathExists(cH5, extraTable)):
log.msg("Sorting table: %s" % extraTable)
eTable = cH5[extraTable].value
if (len(eTable.shape) == 1):
eTable = eTable[aord]
else:
eTable = eTable[aord,:]
## save attributes, if any for re-writing below.
originalAttrs = cH5[extraTable].attrs.items()
del(cH5[extraTable])
cH5.create_dataset(extraTable, data = eTable,
maxshape = tuple([None for x in eTable.shape]))
for oA in originalAttrs:
cH5[extraTable].attrs.create(oA[0], oA[1])
## if you make it this far, set the flag.
success = True
except Exception, E:
log.error(E)
if (os.path.exists(outFile)):
pass
finally:
try:
cH5.close()
except:
pass
finally:
return(success)
class Loggy:
def __init__(self, level):
self.level = level
def write(self, msg, level):
if (self.level >= level): sys.stderr.write(str(msg) + "\n")
def error(self, msg): self.write(msg, 0)
def warn(self, msg): self.write(msg, 1)
def msg(self, msg): self.write(msg, 2)
def main():
usage = \
""" %prog [options] input-file [output-file]
Sort cmp.h5 files. If output-file is unspecified the input-file is
overwritten. If there are a number of reference groups then the
indexing processing can occur in parallel.
version: """ + __VERSION__
parser = OptionParser(usage)
parser.add_option("-s", "--silent", dest = "silent", action = "store_false", \
default = False, help = "print nothing.")
parser.add_option("-v", "--verbose", dest = "verbose", action = "store_true", \
default = False, help = "print debugging information")
parser.add_option("-d", "--deep", dest = "deep", action = "store_true", default = False, \
help = "whether a deep sorting should be conducted, i.e. sort the AlignmentArrays")
parser.add_option("-j", "--jobs", dest = "jobs", default = 1, \
help = "Number of child processes to launch. This only speeds up processing if there are multiple references groups. Not yet Implemented.")
parser.add_option("--tmpDir", dest = "tmpdir", default = "/tmp", \
help = "Temporary directory to use when sorting in-place.")
(options, args) = parser.parse_args()
if (not len(args)):
parser.print_help()
exit(1)
infile = args[0]
## we do this in a temporary file because it is safer.
if (len(args) < 2):
ofile = tempfile.NamedTemporaryFile(dir=options.tmpdir)
outfile = ofile.name
else:
outfile = args[1]
log = Loggy(2 if options.verbose else 1 if not options.silent else 0)
success = sortCmpH5(infile, outfile, deep = options.deep, jobs = options.jobs, log = log)
if (not success):
log.error("Error during sorting. Exiting! Original file %s should still be intact." % infile)
exit(1)
else:
## add to the file log.
cmpH5 = CmpH5Factory.factory.create(outfile, 'a')
cmpH5.log("cmpH5Sort.py", __VERSION__, str(datetime.datetime.now()), ' '.join(sys.argv), "Sorting")
cmpH5.close()
if (len(args) < 2):
shutil.copyfile(outfile, infile)
ofile.close()
exit(0)
if __name__ == "__main__":
main()
|
24,716 | 7aba1a748260c3e1afa1a746cb97d22e9ed17065 | from __future__ import print_function
import clodius.db_tiles as cdt
import click.testing as clt
import clodius.cli.aggregate as cca
import h5py
import negspy.coordinates as nc
import os.path as op
import sys
sys.path.append("scripts")
testdir = op.realpath(op.dirname(__file__))
def test_clodius_aggregate_bedpe():
input_file = op.join(testdir, 'sample_data', 'isidro.bedpe')
output_file = '/tmp/isidro.bed2ddb'
cca._bedpe(input_file, output_file, 'b37',
importance_column=None,
chromosome=None,
max_per_tile=100,
tile_size=1024,
has_header=True)
"""
runner = clt.CliRunner()
result = runner.invoke(
cca.bedpe,
[input_file,
'--output-file', output_file,
'--importance-column', 'random',
'--has-header',
'--assembly', 'b37'])
#print('output:', result.output, result)
assert(result.exit_code == 0)
"""
entries = cdt.get_2d_tiles(output_file, 0, 0, 0)
#print("entries:", entries)
tileset_info = cdt.get_tileset_info(output_file)
#print('tileset_info', tileset_info)
entries = cdt.get_2d_tiles(output_file, 1, 0, 0, numx=2, numy=2)
#print("entries:", entries)
tileset_info = cdt.get_tileset_info(output_file)
#print('tileset_info', tileset_info)
|
24,717 | 21bfb33a4e7784ef65e374e7818d11a2dd354644 | """ Definition of chess piece class and its behavioural properties. """
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals
)
from collections import OrderedDict
from itertools import chain
from operator import attrgetter
from chessboard import ForbiddenCoordinates
from . import PY2
if PY2:
from itertools import izip_longest
else:
from itertools import zip_longest as izip_longest
class Piece(object):
""" A generic piece.
x: horizontal position of the piece.
y: vertical position of the piece.
"""
# Simple ASCII string identifying the kind of piece.
label = None
# Single unicode character used to represent the piece on a board.
symbol = None
# Integer uniquely identifying the type/kind of the piece. Used as a
# shortcut to the class itself. Also serves as a ranking weight of the
# territory coverage (see #5).
uid = None
# Cache territory occupied by pieces at a given position for a fixed board.
territory_cache = {}
def __init__(self, board, index):
""" Place the piece on a board at the provided linear position. """
self.board = board
self.index = index
self._x, self._y = None, None
def __repr__(self):
""" Display all relevant object internals. """
return (
'<{}: uid={}; label={}, symbol={}; x={}, y={}; index={}>'.format(
self.__class__.__name__,
self.uid, self.label, self.symbol,
self.x, self.y, self.index))
def compute_coordinates(self):
""" Compute 2D coordinates of the piece. """
self._x, self._y = self.board.index_to_coordinates(self.index)
@property
def x(self):
""" Return the piece's horizontal position.
Property is used here so we only compute position once when needed.
"""
if self._x is None:
self.compute_coordinates()
return self._x
@property
def y(self):
""" Return the piece's vertical position.
Property is used here so we only compute position once when needed.
"""
if self._y is None:
self.compute_coordinates()
return self._y
@property
def bottom_distance(self):
""" Number of squares separating the piece from board's bottom edge.
"""
return self.board.height - 1 - self.y
@property
def right_distance(self):
""" Number of squares separating the piece from board's right edge. """
return self.board.length - 1 - self.x
@property
def top_distance(self):
""" Number of squares separating the piece from board's top edge. """
return self.y
@property
def left_distance(self):
""" Number of squares separating the piece from board's left edge. """
return self.x
@property
def horizontals(self):
""" All horizontal squares from the piece's point of view.
Returns a list of relative movements up to the board's bound.
"""
horizontal_shifts = set(izip_longest(map(
lambda i: i - self.x, range(self.board.length)), [], fillvalue=0))
horizontal_shifts.discard((0, 0))
return horizontal_shifts
@property
def verticals(self):
""" All vertical squares from the piece's point of view.
Returns a list of relative movements up to the board's bound.
"""
vertical_shifts = set(izip_longest([], map(
lambda i: i - self.y, range(self.board.height)), fillvalue=0))
vertical_shifts.discard((0, 0))
return vertical_shifts
@property
def diagonals(self):
""" All diagonal squares from the piece's point of view.
Returns a list of relative movements up to the board's bound.
"""
left_top_shifts = map(lambda i: (-(i + 1), -(i + 1)), range(min(
self.left_distance, self.top_distance)))
left_bottom_shifts = map(lambda i: (-(i + 1), +(i + 1)), range(min(
self.left_distance, self.bottom_distance)))
right_top_shifts = map(lambda i: (+(i + 1), -(i + 1)), range(min(
self.right_distance, self.top_distance)))
right_bottom_shifts = map(lambda i: (+(i + 1), +(i + 1)), range(min(
self.right_distance, self.bottom_distance)))
return set(chain(
left_top_shifts, left_bottom_shifts,
right_top_shifts, right_bottom_shifts))
@property
def movements(self):
""" Return list of relative movements allowed. """
raise NotImplementedError
@property
def territory(self):
""" Return the cached territory occupied by the piece. """
cache_key = (
self.board.length, self.board.height, self.uid, self.index)
if cache_key not in self.territory_cache:
vector = self.compute_territory()
self.territory_cache[cache_key] = vector
else:
vector = self.territory_cache[cache_key]
return vector
def compute_territory(self):
""" Compute territory reachable by the piece from its current position.
Returns a list of boolean flags of squares indexed linearly, for which
a True means the square is reachable.
"""
# Initialize the square occupancy vector of the board.
vector = self.board.new_vector()
# Mark current position as reachable.
vector[self.index] = True
# List all places reacheable by the piece from its current position.
for x_shift, y_shift in self.movements:
# Mark side positions as reachable if in the limit of the board.
try:
reachable_index = self.board.coordinates_to_index(
self.x, self.y, x_shift, y_shift)
except ForbiddenCoordinates:
continue
vector[reachable_index] = True
return vector
|
24,718 | a2ebc802a3d3a95b105d5d90bd52c9db1288f10a | import numpy as np
import matplotlib.pyplot as plt
import cv2
import fingerprint_enhancer
import math
from fingerprints_kernels import *
def enhance_and_binarize(img):
enhanced_img = fingerprint_enhancer.enhance_Fingerprint(img)
_, binarized_enhanced_img = cv2.threshold(enhanced_img, 0, 255, cv2.THRESH_BINARY+cv2.THRESH_OTSU)
return binarized_enhanced_img
def thin(input_img, kernels = thinning_kernels_w):
temp = input_img.copy()
result = input_img.copy()
while True:
for j in range(len(kernels)):
result = result - cv2.morphologyEx(result, cv2.MORPH_HITMISS, kernels[j])
if (temp == result).all():
break
temp = result.copy()
return result
def prune(img):
X1 = thin(img, pruning_kernels)
#Find End Points
X2 = np.zeros(img.shape, dtype=np.uint8)
for i in range(len(pruning_kernels)):
X2 = cv2.bitwise_or(X2,
cv2.morphologyEx(X1, cv2.MORPH_HITMISS, pruning_kernels[i]))
#Dilate End Point
X3 = cv2.bitwise_and(img,
cv2.dilate(X2,
cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))))
#Union of X1 & X3
X4 = cv2.bitwise_or(X1, X3)
return X4
def skeleton(input_img):
img = input_img.copy()
result = np.zeros(img.shape, dtype=np.uint8)
kernel = cv2.getStructuringElement(cv2.MORPH_CROSS, (3, 3))
i = 0
while cv2.countNonZero(img):
i = i + 1
erode = cv2.erode(img, kernel)
opening = cv2.morphologyEx(erode, cv2.MORPH_OPEN, kernel)
subset = erode - opening
result = cv2.bitwise_or(subset, result)
img = erode.copy()
return result
def preprocess(img):
pruned_img = prune(img)
closed_img = cv2.morphologyEx(pruned_img, cv2.MORPH_CLOSE, cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3,3)))
opened_img = cv2.morphologyEx(closed_img, cv2.MORPH_OPEN, cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (2,2)))
return opened_img
def extract_ridge(input_img, kernel):
img = input_img.copy()
result = np.zeros(img.shape, dtype=np.uint8)
for i in range(len(kernel)):
result = cv2.bitwise_or(result, cv2.morphologyEx(img, cv2.MORPH_HITMISS, kernel[i]))
return result
def manhattan_distance(x, y):
(x1,x2)=x
(y1,y2)=y
return abs(x1 - y1) + abs(x2 - y2)
def euclidean_distance(x, y):
(x1,x2)=x
(y1,y2)=y
return math.sqrt((x1 - y1)**2 + (x2 - y2)** 2)
def get_minutiae_list(minutiae_img):
minutiae = []
(iMax, jMax) = minutiae_img.shape
for i in range(iMax):
for j in range(jMax):
if minutiae_img[i][j] != 0:
minutiae.append((i,j))
return minutiae
def postprocessing_minutiae_list_same_type(minutiae_list, tresh = 8, dist = manhattan_distance):
res = minutiae_list.copy()
for m1 in minutiae_list:
for m2 in minutiae_list:
if m1 != m2 and dist(m1, m2) < tresh:
if m1 in res:
res.remove(m1)
if m2 in res:
res.remove(m2)
return res
def postprocessing_minutiae_list_diff_type(minutiaeList1, minutiaeList2, tresh = 8, dist = manhattan_distance):
res = minutiaeList1.copy()
res = res + minutiaeList2
for m1 in minutiaeList1:
for m2 in minutiaeList2:
if dist(m1, m2) < tresh:
if m1 in res:
res.remove(m1)
if m2 in res:
res.remove(m2)
return res
def postprocessing(terminations_img, bifurcations_img, tresh1 = 4, tresh2 = 4, tresh3 = 4, dist = manhattan_distance):
terminations_list = get_minutiae_list(terminations_img)
bifurcations_list = get_minutiae_list(bifurcations_img)
terminations_list = postprocessing_minutiae_list_same_type(terminations_list, tresh1, dist)
bifurcations_list = postprocessing_minutiae_list_same_type(bifurcations_list, tresh2, dist)
result_list = postprocessing_minutiae_list_diff_type(terminations_list, bifurcations_list, tresh3, dist)
iMax, jMax = terminations_img.shape
result_img = np.zeros((iMax, jMax), dtype=np.uint8)
for (i,j) in result_list:
result_img[i][j] = 1
return result_img, result_list
if __name__ == "__main__":
img = cv2.imread('DB1_B/103_2.tif', cv2.IMREAD_GRAYSCALE)
enhanced_and_binarized_img = enhance_and_binarize(img)
preprocessed_img = preprocess(enhanced_and_binarized_img)
thinned_img = thin(preprocessed_img)
terminations = extract_ridge(thinned_img, ridge_terminations_kernel)
bifurcations = extract_ridge(thinned_img, ridge_bifurcations_kernel)
fin_img, fin_list = postprocessing(terminations, bifurcations)
show_minutiae = enhanced_and_binarized_img.copy()
show_minutiae = cv2.cvtColor(show_minutiae, cv2.COLOR_GRAY2BGR)
for (i,j) in fin_list:
cv2.circle(show_minutiae,(j,i), 4, (255,0,0), cv2.LINE_4)
cv2.imshow('raw image', img)
cv2.imshow('enhanced_and_binarized_img', enhanced_and_binarized_img)
cv2.imshow('preprocessed_img', preprocessed_img)
cv2.imshow('thinned_img', thinned_img)
cv2.imshow('result', fin_img)
cv2.imshow('terminations', terminations)
cv2.imshow('bifurcations', bifurcations)
cv2.imshow('result_circled', show_minutiae)
cv2.waitKey(0)
cv2.destroyAllWindows()
|
24,719 | af73a4b0f3e9ce8f4b4eaf66b6b72bac718afcfc | #!/usr/bin/env python
# * coding: utf8 *
"""
enhance.py
A module that handles appending information to the geocoded csv files
"""
try:
import arcpy
except:
pass
import csv
from pathlib import Path
from timeit import default_timer
import pandas as pd
UTM = "PROJCS['NAD_1983_UTM_Zone_12N',GEOGCS['GCS_North_American_1983',DATUM['D_North_American_1983',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Transverse_Mercator'],PARAMETER['False_Easting',500000.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',-111.0],PARAMETER['Scale_Factor',0.9996],PARAMETER['Latitude_Of_Origin',0.0],UNIT['Meter',1.0]];-5120900 -9998100 10000;-100000 10000;-100000 10000;0.001;0.001;0.001;IsHighPrecision"
GDB_NAME = 'enhance.gdb'
enhancement_layers = [{
'table': 'political.senate_districts_2022_to_2032',
'fields': ['dist'],
'rename': ['senate_district']
}, {
'table': 'political.house_districts_2022_to_2032',
'fields': ['dist'],
'rename': ['house_district']
}, {
'table': 'boundaries.county_boundaries',
'fields': ['name'],
'rename': ['county_name']
}, {
'table': 'demographic.census_tracts_2020',
'fields': ['geoid20'],
'rename': ['census_id']
}]
def create_enhancement_gdb(parent_folder):
"""Creates the file geodatabase that will be used to store the enhanced layers
:param parent_folder: The parent path to the file geodatabase to create
:type parent_folder: Path
"""
parent_folder = Path(parent_folder)
gdb_path = parent_folder / GDB_NAME
if gdb_path.exists():
print(f'{GDB_NAME} exists. deleting and recreating with fresh data')
arcpy.management.Delete(str(gdb_path))
print('creating file geodatabase')
start = default_timer()
arcpy.management.CreateFileGDB(str(parent_folder), GDB_NAME)
print(f'file geodatabase created in {default_timer() - start} seconds')
add_enhancement_layers(parent_folder / GDB_NAME)
def add_enhancement_layers(output_gdb):
"""Adds the enhancement layers to the file geodatabase
:param output_gdb: The path to the file geodatabase to add the enhancement layers to
:type output_gdb: Path
"""
print('adding enhancement layers')
start = default_timer()
maps = Path(__file__).parent.parent.parent / 'maps'
workspace = (maps / 'opensgid.agrc.utah.gov.sde').resolve()
with arcpy.EnvManager(workspace=str(workspace)):
for layer in enhancement_layers:
table_start = default_timer()
print(f' adding {layer["table"]}')
mapping = arcpy.FieldMappings()
mapping.addTable(layer['table'])
fields = arcpy.ListFields(layer['table'])
filter_mapping(mapping, fields, layer)
arcpy.conversion.FeatureClassToFeatureClass(
in_features=layer['table'],
out_path=str(output_gdb),
out_name=layer['table'].split('.')[1],
field_mapping=mapping
)
print(f' {layer["table"]} finished in {default_timer() - table_start} seconds')
print(f'enhancement layers added in {default_timer() - start} seconds')
def merge(parent_folder):
"""Creates a single csv file containing all the enhanced data
:param parent_folder: The parent path to the results folder
:type parent_folder: Path
"""
parent_folder = Path(parent_folder)
address_csv_files = sorted(parent_folder.glob('*_step_*.csv'))
frames = []
#: read all csv's delimiter='|', quoting=csv.QUOTE_MINIMAL
for address_csv_file in address_csv_files:
temp = pd.read_csv(
address_csv_file, sep='|', encoding='utf-8', names=['type', 'id', 'county', 'senate', 'house', 'census']
)
frames.append(temp)
#: merge all csv's
merged = pd.concat(frames)
merged.to_csv(parent_folder / 'all.csv', sep='|', header=False, index=False, encoding='utf-8')
def filter_mapping(mapping, fields, table_metadata):
"""Filters the field mapping to only include the fields that are needed
:param mapping: The field mapping to filter
:type mapping: arcpy.FieldMappings
:param fields: The fields on the table
:type fields: list[arcpy.Field]
:param table_metadata: The table metadata to use to filter the field mapping
:type table_metadata: dict
"""
table_metadata['fields'].append('shape')
for field in fields:
index = mapping.findFieldMapIndex(field.name)
if index == -1:
continue
if field.name.lower() not in table_metadata['fields']:
try:
mapping.removeFieldMap(index)
except Exception as ex:
print(field.name.lower())
raise ex
else:
if field.name.lower() == 'shape':
continue
field_map = mapping.getFieldMap(index)
output_field = field_map.outputField
output_field.name = table_metadata['rename'][0]
field_map.outputField = output_field
mapping.replaceFieldMap(index, field_map)
def enhance(parent_folder):
"""enhances the csv table data from the identity tables
:param parent_folder: The parent path to the csv files to enhance
:type parent_folder: Path
"""
parent_folder = Path(parent_folder).resolve()
address_csv_files = sorted(parent_folder.glob('*.csv'))
print(f'enhancing {len(address_csv_files)} csv files in {parent_folder}')
data = Path(__file__).parent.parent.parent / 'data'
workspace = (data / 'enhanced' / GDB_NAME).resolve()
arcpy.env.workspace = str(workspace)
for address_csv in address_csv_files:
job = enhance_data(address_csv)
prepare_output(job)
convert_to_csv(job)
remove_temp_tables(job)
def enhance_data(address_csv):
"""enhance the data in the csv file
"""
table_name = address_csv.stem
print(f'1. creating points from csv as {table_name}')
if not arcpy.Exists(f'{table_name}_step_1'):
arcpy.management.MakeXYEventLayer(
table=str(address_csv),
in_x_field='x',
in_y_field='y',
out_layer=f'{table_name}_temp',
spatial_reference=UTM,
in_z_field=None
)
else:
print(' skipping')
print(' creating feature class')
if not arcpy.Exists(f'{table_name}_step_1'):
arcpy.management.XYTableToPoint(
in_table=f'{table_name}_temp',
out_feature_class=f'{table_name}_step_1',
x_field='x',
y_field='y',
z_field=None,
coordinate_system=UTM
)
else:
print(' skipping')
print(' selecting match addresses')
if not arcpy.Exists(f'{table_name}_step_2'):
arcpy.management.SelectLayerByAttribute(
in_layer_or_view=f'{table_name}_step_1', selection_type='NEW_SELECTION', where_clause='score>0'
)
else:
print(' skipping')
print(' separating matched addresses')
if not arcpy.Exists(f'{table_name}_step_2'):
arcpy.management.CopyFeatures(in_features=f'{table_name}_step_1', out_feature_class=f'{table_name}_step_2')
else:
print(' skipping')
step = 2
for identity in enhancement_layers:
start = default_timer()
fields = "'".join(identity['fields'])
print(f'{step}. enhancing data with {fields} from {identity["table"]}')
enhance_table_name = identity['table'].split('.')[1]
if not arcpy.Exists(f'{table_name}_step_{step + 1}'):
arcpy.analysis.Identity(
in_features=f'{table_name}_step_{step}',
identity_features=enhance_table_name,
out_feature_class=f'{table_name}_step_{step + 1}',
join_attributes='NO_FID',
cluster_tolerance=None,
relationship='NO_RELATIONSHIPS'
)
else:
print(' skipping')
step = step + 1
continue
step = step + 1
print(f'completed: {default_timer() - start}')
return f'{table_name}_step_{step}'
def prepare_output(table):
"""prepares the output by splitting the primary key and the other field
"""
print('adding type field')
absolute_table = str(Path(arcpy.env.workspace) / table)
fields = arcpy.ListFields(absolute_table)
if 'type' in [field.name.lower() for field in fields]:
print(' skipping')
return
arcpy.management.AddField(absolute_table, 'type', 'TEXT', '', '', '1')
print('splitting type and id')
arcpy.management.CalculateField(
in_table=table, field='type', expression='left($feature.primary_key, 1)', expression_type='ARCADE'
)
arcpy.management.CalculateField(
in_table=table, field='primary_key', expression='mid($feature.primary_key, 1, 20)', expression_type='ARCADE'
)
def convert_to_csv(table):
"""writes table to csv
"""
print(f'writing {table} to csv')
destination = Path(__file__).parent.parent.parent / 'data' / 'results' / f'{table}.csv'
with arcpy.da.SearchCursor(
in_table=table,
field_names=['type', 'primary_key', 'county_name', 'senate_district', 'house_district', 'census_id'],
where_clause='message is null'
) as cursor, open(destination, 'w', encoding='utf-8', newline='') as result_file:
writer = csv.writer(result_file, delimiter='|', quoting=csv.QUOTE_MINIMAL)
for row in cursor:
writer.writerow(row)
def remove_temp_tables(table):
"""clean up method
"""
temp_tables = sorted(arcpy.ListFeatureClasses(wild_card=f'{table[:-1]}*', feature_type='Point'))
removed = False
print('removing ', ', '.join(temp_tables[:-1]))
try:
arcpy.management.Delete(temp_tables[:-1])
removed = True
except:
print('could not delete intermediate tables. trying one at a time')
if not removed: #: try pro < 2.9 style
for item in temp_tables[:-1]:
arcpy.management.Delete(item)
print('intermediate tables removed')
|
24,720 | aa2b2b9bc4af8c11d30bc394de8ea89587df4bc2 | curso_data_science = [15, 23, 43, 56]
curso_machine_learning = [13, 23, 56, 42]
enviar_email = curso_data_science.copy()
enviar_email.extend(curso_machine_learning)
print(sorted(set(enviar_email)))
print(set([1, 2, 3, 1])) #o Set ignora duplicacoes de valores!!
criando_um_set = {1, 2, 3, 4, 1} #outra forma de criar um set!! usando COLCHETES!
print(criando_um_set) # SET NAO TEM INDEX, a posicao e aleatoria!!
curso_machine_learning | curso_data_science #em conjuntos, isso e um OU (PIPE)
curso_machine_learning & curso_data_science #em conjuntos, isso e um E (E comercial),
#ou seja, nesse caso, fez ambos os cursos.
curso_machine_learning - curso_data_science #em conjuntos, o MENOS deixa somente
#os numeros que estao exclusivamente em 1 conjunto
curso_machine_learning ^ curso_data_science #em conjuntos, o CHAPEUZINHO, mais conhecido como
#ou exclusivo, deixa somente os numeros exclusivos
#de cada grupo
criando_um_set.add(654654) #em conjunto se usa ADD ao inves de APPEND, pois um conjunto nao tem
# um fim definido
frozenset(criando_um_set) #voce congela o conjunto! Que loucura!
frase = "Ola meu nome e Julio e meu pai e jose e meu avo e jose"
frase.split()
set(frase) |
24,721 | a5e445b6b228340fc774130b5741c69c24a36087 | # Raad Barnett 1231583
print("Davy's auto shop services")
service = {"Oil change":35,"Tire rotation":19,"Car wash":7,"Car wax":12}
for service_name,service_price in service.items():
print("{0} -- ${1}".format(service_name,service_price))
print()
first_service = input("Select first service:\n")
second_service = input("Select second service:\n")
if first_service=='-':
# price_first = service[first_service]
price_first = 0
first_service="No service"
else:
price_first = service[first_service]
if second_service=='-':
#
price_second = 0
second_service="No service"
else:
price_second = service[second_service]
total = price_first+price_second
print()
print("Davy's auto shop invoice\n")
if first_service!="No service":
print("Service 1: {0}, ${1}".format(first_service,service[first_service]))
else:
print("Service 1: {0}".format(first_service))
if second_service!="No service":
print("Service 2: {0}, ${1}".format(second_service,service[second_service]))
else:
print("Service 2: {0}".format(second_service))
print()
print("Total: ${0}".format(total))
|
24,722 | 2aab1ffd24523d6e98900f278fe80328a6453439 |
#calss header
class _TEAMMATE():
def __init__(self,):
self.name = "TEAMMATE"
self.definitions = [u'a player on the same team']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
|
24,723 | 346515a129624dccaad16c9470625899a5e846fc | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2015-07-25 13:16:46
# @Author : Jiank (http://jiankg.github.com)
# @doc
# 测试阶段达标奖励发送
# 1. 4.13 ios越狱删档测试 充值返现[下次测试开服前7日返还上次测试中的150%,剩余350%的钻石在剩下的53天分次返还。]
# @end
# @copyright (C) 2015, kimech
import math
from libs.rklib.core import app
from apps.configs import static_const
from apps.configs import rediskey_config
from apps.configs import game_config
from apps.services.mail import MailService
from torngas.settings_manager import settings
redis_client = app.get_storage_engine('redis').client.current
def payback_diamonds(sid, plat, account_id, uid, login_days):
"""4.13 ios越狱删档测试 充值返现
走邮件形式发送
"""
if int(sid) not in (1,2) or login_days > 60:
return
payback_amount = redis_client.hget(rediskey_config.ACT_PAYBACK_KEY % sid, "%s_%s" % (plat, account_id))
if not payback_amount:
return
else:
payback_amount = int(payback_amount)
if 0 < login_days <= 7:
get = int(math.ceil(payback_amount * 15.0 / 7))
left = payback_amount * 50 - get * login_days
else:
get = int(math.ceil(payback_amount * 35.0 / 53))
left = payback_amount * 35 - get * (login_days - 7)
left = left if left >= 0 else 0
# 第一次返还的时候 所有的vip经验都返还
awards = {static_const.DIAMOND: get}
if login_days == 1:
awards = {static_const.DIAMOND: get, static_const.VIP_EXP: payback_amount * 10}
MailService.send_game(uid, 3006, [get, left, 60 - login_days], awards)
def uc_qihoo_test_award_4003(sid, plat, account_id):
"""测试期间,最终等级达到X级
"""
act_id = 4003
level = redis_client.hget(rediskey_config.UC_QIHOO_TEST_AWARD_4003_KEY % sid, "%s_%s" % (plat, account_id))
if not level:
return {}, 0
else:
tmp,tmp1 = {},0
for i in game_config.act_sample_detail_cfg[act_id]:
cfg = game_config.act_detail_cfg.get("%s-%s" % (act_id, i))
if i > tmp1 and int(level) >= cfg["cond_a"]:
tmp,tmp1 = cfg["awards"],i
return tmp, int(level)
def uc_qihoo_test_award_4004(sid, plat, account_id):
"""测试期间,累计登录X天,送奖励
"""
act_id = 4004
login_days = redis_client.hget(rediskey_config.UC_QIHOO_TEST_AWARD_4004_KEY % sid, "%s_%s" % (plat, account_id))
if not login_days:
return {},0
else:
tmp,tmp1 = {},0
for i in game_config.act_sample_detail_cfg[act_id]:
cfg = game_config.act_detail_cfg.get("%s-%s" % (act_id, i))
if i > tmp1 and int(login_days) >= cfg["cond_a"]:
tmp,tmp1 = cfg["awards"],i
return tmp, int(login_days)
def uc_qihoo_test_award_4005(sid, plat, account_id):
"""测试期间,竞技场最终排名奖励
"""
act_id = 4005
rank = redis_client.hget(rediskey_config.UC_QIHOO_TEST_AWARD_4005_KEY % sid, "%s_%s" % (plat, account_id))
if not rank:
return {},0
else:
tmp,tmp1 = {},max(game_config.act_sample_detail_cfg[act_id])+1
for i in game_config.act_sample_detail_cfg[act_id]:
cfg = game_config.act_detail_cfg.get("%s-%s" % (act_id, i))
if tmp1 > i and int(rank) <= cfg["cond_a"]:
tmp,tmp1 = cfg["awards"],i
return tmp,int(rank)
|
24,724 | 4f0422642527a1aece81974e9822549bbf621af2 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 18 11:50:06 2018
@author: danamulligan
"""
def acronym(phrase):
l = phrase.split(' ')
acro = ""
for k in range(len(l)):
for m in range(len(l[k])):
if m == 0:
acro += l[k][m]
return acro |
24,725 | 80567fecac2c393ecb0b6b04b542136ea27f2053 | # Michal Wolas 117308883
from socket import *
import datetime
# create a socket
s = socket(AF_INET, SOCK_DGRAM)
# get the domain name of this machine
name = gethostname()
#DNS lookup of the ip address based on the domain name
ip_address = gethostbyname(name)
port = 6789
print(name, " ", ip_address, " : ", port)
#address tuple
server_address = (ip_address, port)
# bind the socket to its address
print('Server running: %s : %s ' % server_address)
try:
s.bind((ip_address,port))
except:
print("address already binded, please try again / free the address")
# opens the log file
try:
save_file = open("clientLog", "a+")
except:
print("couldn't open file")
while True:
print("\n ----- waiting for a Message (UDP) ------ \n")
# wait for a message from the client
# address is the address of the client as tuple
data, address = s.recvfrom(4096)
time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
# print the decoded message
print(data.decode())
# if the client closes its socket it will inform the server
if data.decode() == "STOP":
break
reply = data.decode()
print("Logging client message...")
# write to the log file
try:
save_file.write("message from : " + str(address) + "\n")
save_file.write("at: " + str(time) + "\n'")
save_file.write(reply + "'\n\n")
except:
print("The file has been closed / is not responding")
# change message to upper case
reply = reply.upper()
reply += " at: " + str(time)
# encode/change to bytes
reply = reply.encode()
print("sending reply to the client...")
# send the reply to the address that the message was received from
try:
s.sendto(reply,(address))
except:
print("couldn't send the reply, possibly wrong address")
# close the file and the socket
save_file.close()
s.close() |
24,726 | 9602e8e0500f66d1f630e4e290fc62863203d4c0 | from django import forms
from .models import Suggessions
class SuggessionForm(forms.ModelForm):
class Meta:
model = Suggessions
fields = ['name','email','suggession']
lables = {'name':'Name','suggession' :"Your Suggession"}
widgets = {
'suggession': forms.Textarea(attrs={'rows':5, 'cols':15}),
}
|
24,727 | 39426010981d3e514926b0a146953918eb45e493 | a = [1,2,-5,-6,11]
for num in a:
if num >=0:
print(num)
|
24,728 | 86711ad8f429c3abfbd7fa8d5771c71de8fc04eb | # inputs
n, k = int(input()), int(input())
# result inits with n in it
res = n
# loop k times and add to res
for i in range(k): res += n * 10 ** (i + 1)
# output
print(res) |
24,729 | d45ea7b30452b3f51f68c293dff8541cd6051b5a | from liblinearutil import *
import numpy as np
def X_tranformation(X):
X0 = np.ones([200,1])
X_new = np.zeros([200,1])
for k in range(len(X)):
X_new[k] = np.append()
for i in range(len(X)):
for j in range(i,len(X)):
print(X[k])
print(X[k][i]*X[k][j])
a = np.append(X[k],X[k][i]*X[k][j])
X[k] = a
print(len(X[0]))
X = np.append(X0 , X ,axis=1)
return X
train3 = np.genfromtxt('hw4_train.txt', delimiter=' ')
X = np.hsplit(train3,[6])[0] ## saparate the array into two part on the base of the nth column
Y = np.hsplit(train3,[6])[1]
Y = Y.reshape((1,200))
print(X)
X = X_tranformation(X)
# prob = problem(Y[0], X)
# param = parameter('-s 0')
# m = train(prob, param)
|
24,730 | 5dc92a3a5b95838d28ddbaa3b0357926399ec7e7 | import RPi.GPIO as GPIO
import time
import MySQLdb
GPIO.setmode(GPIO.BCM)
#GPIO.setup(22, GPIO.IN, pull_up_down = GPIO.PUD_UP) ## GPIO 22 como entrada
GPIO.setup(22, GPIO.IN) ## GPIO 22 como entrada
global count
count = 0
def guardar_dato(nivel):
db = MySQLdb.connect(host="34.223.215.43", user="root", passwd="385402292Mica_02", db="control_raspi")
cur = db.cursor()
cur.execute("UPDATE control SET nivel_flujo='"+str(nivel)+"'")
db.commit()
def contador_pulso():
global count
count = 0
for x in range(0, 60):
if GPIO.input(22) == 0:
count = count + 1
#time.sleep(0.001)
print(count)
flow = (count * 60 * 2.25 / 1000)
print "Flujo: %.3f L/m" % (flow)
guardar_dato(flow)
#GPIO.add_event_detect(22, GPIO.FALLING, callback=contador_pulso)
try:
while True:
time.sleep(1)
contador_pulso()
time.sleep(1)
except KeyboardInterrupt:
print "Script finalizado."
GPIO.cleanup() |
24,731 | 616abdbaa06ea3fc1835f2e74e7db7d67bec6c6e | import random
import string
from datetime import datetime
def random_string(length=10):
opts = string.ascii_letters + string.digits
return ''.join(random.choice(opts) for i in range(length))
def utcnow():
return datetime.utcnow().replace(microsecond=0)
|
24,732 | e27ec09a08f751029e2f54ad9f3eb7402c48be0f | # -*- coding: utf-8 -*-
"""
Created on Dec 22 2016
"""
#This file will contain a set of functions that use classes in dataFormat (Copmanies),
# get their URLs and access them to verify their existance. |
24,733 | 7557272fe26a6224a190afce4de5aeb67814b5fb | #!/usr/bin/env python
#coding=utf-8
import tornado.web
import tornado.readdb as mrd
class userHandler(tornado.web.RequestHandler):
def get(self):
username=self.get_argument("user")
user_infos=mrd.select_table=(table="users",column="*",condition="username",value=username)
self.render("user.html",users=user_infos)
|
24,734 | 778f8ca89a5063a6191632598b279c5a22b70749 | from django.http import HttpResponseRedirect
from django.contrib.auth import authenticate, login
from django.contrib.auth.forms import UserCreationForm
from django.template.response import TemplateResponse
def signup(request):
if request.method == "POST":
form = UserCreationForm(request.POST)
if form.is_valid():
username = form.clean_username()
password = form.clean_password2()
form.save()
user = authenticate(username=username, password=password)
login(request, user)
return HttpResponseRedirect('/')
else:
form = UserCreationForm()
context = {
'form': form,
}
return TemplateResponse(request, 'accounts/signup.html', context)
|
24,735 | f4c39bb83dbf7377a30cecfb9984759f8b5ed25c | try:
print('This is the text before the error')
print('5'+5)
except NameError as n:
print('This is the text after the name error ')
except TypeError as t:
print('Type error')
print(str(t)) |
24,736 | 59110306c7e66c2cce19e940a7e94567c9c21a8b | from robolink import * # API to communicate with RoboDK
from robodk import * # robodk robotics toolbox
import sys
import os
import re
PIXELS_AS_OBJECTS = False # Set to True to generate PDF or HTML simulations that include the drawn path
TCP_KEEP_TANGENCY = False # Set to True to keep the tangency along the path
SIZE_BOARD = [50, 100] # Size of the image. The image will be scaled keeping its aspect ratio
MM_X_PIXEL = 1 # in mm. The path will be cut depending on the pixel size. If this value is changed it is recommended to scale the pixel object
IMAGE_FILE = 'World map.svg' # Path of the SVG image, it can be relative to the current RDK station
#--------------------------------------------------------------------------------
# function definitions:
def point2D_2_pose(point, tangent):
"""Converts a 2D point to a 3D pose in the XY plane including rotation being tangent to the path"""
return transl(point.x, point.y, 0)*rotz(tangent.angle())
def svg_draw_quick(svg_img, board, pix_ref):
"""Quickly shows the image result without checking the robot movements."""
RDK.Render(False)
count = 0
for path in svg_img:
count = count + 1
# use the pixel reference to set the path color, set pixel width and copy as a reference
pix_ref.Recolor(path.fill_color)
if PIXELS_AS_OBJECTS:
pix_ref.Copy()
np = path.nPoints()
print('drawing path %i/%i' % (count, len(svg_img)))
for i in range(np):
p_i = path.getPoint(i)
v_i = path.getVector(i)
# Reorient the pixel object along the path
pt_pose = point2D_2_pose(p_i, v_i)
# add the pixel geometry to the drawing board object, at the calculated pixel pose
if PIXELS_AS_OBJECTS:
board.Paste().setPose(pt_pose)
else:
board.AddGeometry(pix_ref, pt_pose)
RDK.Render(True)
def svg_draw_robot(svg_img, board, pix_ref, item_frame, item_tool, robot):
"""Draws the image with the robot. It is slower that svg_draw_quick but it makes sure that the image can be drawn with the robot."""
APPROACH = 2 # approach distance in MM for each path
home_joints = [-144.700431, -60.204293, 103.198570, -99.897231, 53.177811, 38.494197]
robot.setPoseFrame(item_frame)
robot.setPoseTool(item_tool)
# get the target orientation depending on the tool orientation at home position
#orient_frame2tool = invH(item_frame.Pose())*robot.SolveFK(home_joints)*item_tool.Pose()
orient_frame2tool = roty(pi) #alternative:
orient_frame2tool[0:3,3] = Mat([0,0,0])
for path in svg_img:
# use the pixel reference to set the path color, set pixel width and copy as a reference
if PIXELS_AS_OBJECTS:
pix_ref.Copy()
np = path.nPoints()
# robot movement: approach to the first target
p_0 = path.getPoint(0)
target0 = transl(p_0.x, p_0.y, 0)*orient_frame2tool
target0_app = target0*transl(0,0,-APPROACH)
robot.MoveL(target0_app)
if TCP_KEEP_TANGENCY:
joints_now = robot.Joints().tolist()
joints_now[5] = -180
robot.MoveJ(joints_now)
for i in range(np):
p_i = path.getPoint(i)
v_i = path.getVector(i)
pt_pose = point2D_2_pose(p_i, v_i)
if TCP_KEEP_TANGENCY:
#moving the tool along the path (axis 6 may reach its limits)
target = pt_pose*orient_frame2tool
else:
#keep the tool orientation constant
target = transl(p_i.x, p_i.y, 0)*orient_frame2tool
# Move the robot to the next target
robot.MoveJ(target)
# create a new pixel object with the calculated pixel pose
if PIXELS_AS_OBJECTS:
board.Paste().setPose(pt_pose)
else:
board.AddGeometry(pix_ref, pt_pose)
target_app = target*transl(0,0,-APPROACH)
robot.MoveL(target_app)
robot.MoveL(home_joints)
#--------------------------------------------------------------------------------
# Program start
RDK = Robolink()
path_stationfile = RDK.getParam('PATH_OPENSTATION')
from svgpy.svg import *
# select the file to draw
svgfile = path_stationfile + '/Camillas tegning.svg'
# import the SVG file
svgdata = svg_load(svgfile)
IMAGE_SIZE = Point(SIZE_BOARD[0],SIZE_BOARD[1]) # size of the image in MM
svgdata.calc_polygon_fit(IMAGE_SIZE, MM_X_PIXEL)
size_img = svgdata.size_poly() # returns the size of the current polygon
# get the robot, frame and tool objects
robot = RDK.ItemUserPick('', ITEM_TYPE_ROBOT)
framedraw = RDK.Item('Frame draw')
tooldraw = RDK.Item('Gripper')
# get the pixel reference to draw
pixel_ref = RDK.Item('Ellipse')
# delete previous image if any
image = RDK.Item('Board & image')
if image.Valid() and image.Type() == ITEM_TYPE_OBJECT: image.Delete()
# make a drawing board base on the object reference "Blackboard 250mm"
board_draw = RDK.Item('Top')
pixel_ref.Copy()
# quickly show the final result without checking the robot movements:
#svg_draw_quick(svgdata, board_draw, pixel_ref)
# draw the image with the robot:
svg_draw_robot(svgdata, board_draw, pixel_ref, framedraw, tooldraw, robot)
|
24,737 | 68d1d84bd20c5e53e8781c9605d40ffbee0c0cab | import numpy as np
from cov_matern import cov_matern
import unittest
from scipy.special import gamma
class test_cov_matern(unittest.TestCase):
def test(self):
p = 2.0
nu = p+0.5
k = cov_matern(p=p)
ell = 1.7
sigma = 1.5
F, q = k.get_F_q(sigma = 1.5, ell = ell)
lmbda = np.sqrt(2.0*nu)/ell
print lmbda
F_expected = np.array([
[0., 1., 0. ],
[0., 0., 1. ],
[-lmbda**3, -3*lmbda**2, -3*lmbda]])
np.testing.assert_allclose(F, F_expected, rtol=1e-7, atol=0)
q_expected = 2.0*sigma*np.sqrt(np.pi)*lmbda**(2.0*nu)*gamma(nu+0.5)/gamma(nu)
np.testing.assert_almost_equal(q, q_expected, decimal=9)
#######################################################################
nu = 9.5
k = cov_matern(nu=nu)
ell = 2.123
sigma = 3.74
lmbda = np.sqrt(2.0*nu)/ell
for omega in np.linspace(0, 100, 100):
F, q, P_plus_coefs, P_minus_coefs = k.get_F_q(sigma, ell, return_P=True)
P_plus_coefs = np.flip(P_plus_coefs, axis=0)
P_minus_coefs = np.flip(P_minus_coefs, axis=0)
S = q/np.polynomial.polynomial.polyval(1.j*omega, P_plus_coefs)/np.polynomial.polynomial.polyval(1.j*omega, P_minus_coefs)
np.testing.assert_almost_equal(S.imag, 0, decimal=12)
S_expected = 2.0*sigma*np.sqrt(np.pi)*lmbda**(2.0*nu)*gamma(nu+0.5)/gamma(nu)*(lmbda**2+omega**2)**(-nu-0.5)
np.testing.assert_almost_equal(S.real, S_expected, decimal=12)
if __name__ == '__main__':
unittest.main() |
24,738 | cd3abcc24c047e0c1a0c74f14bae254ca31d7f19 | import bs4
import logging
import json
import requests
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger('wb')
main_url = 'https://www.wildberries.ru'
class Client:
def __init__(self):
self.session = requests.Session()
self.session.headers = {
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36',
'Accept-Language': 'ru',
}
self.result = []
def load_page(self):
url = 'https://www.wildberries.ru/catalog/zhenshchinam/odezhda/verhnyaya-odezhda'
res = self.session.get(url=url)
res.raise_for_status()
# with open('test.txt', 'w', encoding="utf-8") as file:
# file.write(res.text)
return res.text
def parse_page(self, text: str):
soup = bs4.BeautifulSoup(text, 'lxml')
container = soup.select('div.dtList.i-dtList.j-card-item')
for block in container:
self.parse_block(block=block)
def parse_block(self, block):
#logger.info(block)
#logger.info('=' * 100)
url_block = block.select_one('a.ref_goods_n_p.j-open-full-product-card')
if not url_block:
logger.error('no url_block')
return
url = url_block.get('href')
if not url:
logger.error('not url')
return
url = main_url + url
name = block.select_one('strong.brand-name')
if not name:
logger.error('not name')
return
name = name.text.replace('/', '').strip()
price = block.select_one('ins.lower-price')
if not price:
logger.error('not price')
return
price = price.text[:-1].replace(' ', '')
link_photo = url_block.select('img.thumbnail')
if not link_photo:
logger.error('not link_photo')
return
link_photo = 'https:' + link_photo[1].get('src').strip()
self.result.append([name, 'outerwearwoman', int(price), link_photo, url])
def run(self):
text = self.load_page()
self.parse_page(text=text)
with open('clothes.json', 'r', encoding="utf-8") as file:
info = json.load(file)
info.extend(self.result)
with open('clothes.json', 'w', encoding="utf-8") as file:
json.dump(info, file, indent=2, ensure_ascii=False)
if __name__ == '__main__':
parser = Client()
parser.run() |
24,739 | abb9070eb4bdb6d6bd9ce8f9103137c83162bf83 | import numpy as np
import tensorflow as tf
from collections import OrderedDict
import nltk
from pycocoevalcap.bleu.bleu import Bleu
from pycocoevalcap.rouge.rouge import Rouge
from tensorflow.python import pywrap_tensorflow
from pdb import set_trace as bp
import data_utils as dp
import data_utils
import sys, os
from tensorflow.python.ops import clip_ops
from tensorflow.python.framework import ops
from collections import defaultdict
import codecs
import cPickle
from tensorflow.python.ops import math_ops, variable_scope
from embedding_metrics import greedy_match, extrema_score, average_score
def lrelu(x, leak=0.2, name="lrelu"):
with tf.variable_scope(name):
f1 = 0.5 * (1 + leak)
f2 = 0.5 * (1 - leak)
return f1 * x + f2 * tf.abs(x)
def sent2idx(text, wordtoix, opt, is_cnn = True):
sent = [wordtoix[x] for x in text.split()]
return prepare_data_for_cnn([sent for i in range(opt.batch_size)], opt)
def prepare_data_for_cnn(seqs_x, opt):
maxlen=opt.maxlen
filter_h=opt.filter_shape
lengths_x = [len(s) for s in seqs_x]
# print lengths_x
if maxlen != None:
new_seqs_x = []
new_lengths_x = []
for l_x, s_x in zip(lengths_x, seqs_x):
if l_x < maxlen:
new_seqs_x.append(s_x)
new_lengths_x.append(l_x)
else:
new_seqs_x.append(s_x[l_x-maxlen+1:])
new_lengths_x.append(maxlen-1)
lengths_x = new_lengths_x
seqs_x = new_seqs_x
if len(lengths_x) < 1 :
return None, None
pad = filter_h -1
x = []
for rev in seqs_x:
xx = []
for i in xrange(pad):
xx.append(0)
for idx in rev:
xx.append(idx)
while len(xx) < maxlen + 2*pad:
xx.append(0)
x.append(xx)
x = np.array(x,dtype='int32')
return x
def prepare_data_for_rnn(seqs_x, opt, is_add_GO = True):
maxlen=opt.sent_len -2 #+ opt.filter_shape - 1 # 49
lengths_x = [len(s) for s in seqs_x]
# print lengths_x
if maxlen != None:
new_seqs_x = []
for l_x, s_x in zip(lengths_x, seqs_x):
if l_x < maxlen-2:
new_seqs_x.append(s_x)
else:
#new_seqs_x.append(s_x[l_x-maxlen+1:])
new_seqs_x.append(s_x[:maxlen-2]+[2])
seqs_x = new_seqs_x
lengths_x = [len(s) for s in seqs_x]
if len(lengths_x) < 1 :
return None, None
n_samples = len(seqs_x)
maxlen_x = np.max(lengths_x)
x = np.zeros(( n_samples, opt.sent_len)).astype('int32')
for idx, s_x in enumerate(seqs_x):
if is_add_GO:
x[idx, 0] = 1 # GO symbol
x[idx, 1:lengths_x[idx]+1] = s_x
else:
x[idx, :lengths_x[idx]] = s_x
return x
def restore_from_save(t_vars, sess, opt, prefix = 'd_', load_path = None):
if not load_path:
load_path = opt.load_path
if opt.load_from_pretrain:
save_keys = tensors_key_in_file(load_path)
#print(save_keys.keys())
ss = set([var.name[2:][:-2] for var in t_vars])&set([s[2:] for s in save_keys.keys()])
cc = {var.name[2:][:-2]:var for var in t_vars}
ss_right_shape = set([s for s in ss if cc[s].get_shape() == save_keys[prefix+s]]) # only restore variables with correct shape
ss_wrong_shape = ss - ss_right_shape
cc2 = {prefix+ var.name[2:][:-2]:var for var in t_vars if var.name[2:][:-2] in ss_right_shape} # name in file -> var
loader = tf.train.Saver(var_list=cc2)
loader.restore(sess, load_path)
print("Loading variables from '%s'." % load_path)
print("Loaded variables:"+str(ss_right_shape))
print("Mis-shaped variables:"+str(ss_wrong_shape))
else:
save_keys = tensors_key_in_file(load_path)
ss = [var for var in t_vars if var.name[:-2] in save_keys.keys()]
ss_right_shape = [var.name for var in ss if var.get_shape() == save_keys[var.name[:-2]]]
ss_wrong_shape = set([v.name for v in ss]) - set(ss_right_shape)
#ss = [var for var in ss if 'OptimizeLoss' not in var]
loader = tf.train.Saver(var_list= [var for var in t_vars if var.name in ss_right_shape])
loader.restore(sess, load_path)
print("Loading variables from '%s'." % load_path)
print("Loaded variables:"+str(ss_right_shape))
print("Mis-shaped variables:"+str(ss_wrong_shape))
_buckets = [(60,60)]
def read_data(source_path, target_path, opt):
"""
From tensorflow tutorial translate.py
Read data from source and target files and put into buckets.
Args:
source_path: path to the files with token-ids for the source language.
target_path: path to the file with token-ids for the target language;
it must be aligned with the source file: n-th line contains the desired
output for n-th line from the source_path.
max_size: maximum number of lines to read, all other will be ignored;
if 0 or None, data files will be read completely (no limit).
Returns:
data_set: a list of length len(_buckets); data_set[n] contains a list of
(source, target) pairs read from the provided data files that fit
into the n-th bucket, i.e., such that len(source) < _buckets[n][0] and
len(target) < _buckets[n][1]; source and target are lists of token-ids.
"""
data_set = [[] for _ in _buckets]
with tf.gfile.GFile(source_path, mode="r") as source_file:
with tf.gfile.GFile(target_path, mode="r") as target_file:
source, target = source_file.readline(), target_file.readline()
counter = 0
while source and target and (not opt.max_train_data_size or counter < opt.max_train_data_size):
counter += 1
if counter % 100000 == 0:
print(" reading data line %d" % counter)
sys.stdout.flush()
source_ids = [int(x) for x in source.split()]
target_ids = [int(x) for x in target.split()]
target_ids.append(data_utils.EOS_ID)
for bucket_id, (source_size, target_size) in enumerate(_buckets):
if opt.minlen <len(source_ids) < min(source_size, opt.maxlen) and opt.minlen <len(target_ids) < min(target_size, opt.maxlen):
data_set[bucket_id].append([source_ids, target_ids])
break
source, target = source_file.readline(), target_file.readline()
return data_set
def read_pair_data_full(src_f, tgt_f, dic_f, train_prop = 0.9, max_num=None, rev_src=False, rev_tgt = False, is_text_src = False, is_text_tgt = False, p_f = '../data/', from_p = True):
#train, val = [], []
if from_p:
p_f = src_f[:-3] + str(max_num) + '.p'
if os.path.exists(p_f):
with open(p_f, 'rb') as pfile:
train, val, test, wordtoix, ixtoword = cPickle.load(pfile)
return train, val, test, wordtoix, ixtoword
wordtoix, ixtoword = {}, {}
print "Start reading dic file . . ."
if os.path.exists(dic_f):
print("loading Dictionary")
counter=0
with codecs.open(dic_f,"r",'utf-8') as f:
s=f.readline()
while s:
s=s.rstrip('\n').rstrip("\r")
#print("s==",s)
wordtoix[s]=counter
ixtoword[counter]=s
counter+=1
s=f.readline()
def shift_id(x):
return x
src, tgt = [], []
print "Start reading src file . . ."
with codecs.open(src_f,"r",'utf-8') as f:
line = f.readline().rstrip("\n").rstrip("\r")
count, max_l = 0, 0
#max_length_fact=0
while line and (not max_num or count<max_num):
count+=1
if is_text_src:
tokens=[wordtoix[x] if x in wordtoix else dp.UNK_ID for x in line.split()]
else:
tokens=[shift_id(int(x)) for x in line.split()]
max_l = max(max_l, len(tokens))
if not rev_src: # reverse source
src.append(tokens)
else :
src.append(tokens[::-1])
#pdb.set_trace()
line = f.readline().rstrip("\n").rstrip("\r")
if np.mod(count,100000)==0:
print count
print "Source cnt: " + str(count) + " maxLen: " + str(max_l)
print "Start reading tgt file . . ."
with codecs.open(tgt_f,"r",'utf-8') as f:
line = f.readline().rstrip("\n").rstrip("\r")
count = 0
#max_length_fact=0
while line and (not max_num or count<max_num):
count+=1
if is_text_tgt:
tokens=[wordtoix[x] if x in wordtoix else dp.UNK_ID for x in line.split()]
else:
tokens=[shift_id(int(x)) for x in line.split()]
if not rev_tgt: # reverse source
tgt.append(tokens)
else :
tgt.append(tokens[::-1])
line = f.readline().rstrip("\n").rstrip("\r")
if np.mod(count,100000)==0:
print count
print "Target cnt: " + str(count) + " maxLen: " + str(max_l)
assert(len(src)==len(tgt))
all_pairs = np.array(zip(*[tgt, src]))
if not train_prop:
train , val, test = all_pairs, [], []
else:
idx = np.random.choice(len(all_pairs), int(np.floor(train_prop*len(all_pairs))))
rem_idx = np.array(list(set(range(len(all_pairs)))-set(idx)))
#v_idx = np.random.choice(rem_idx, int(np.floor(0.5*len(rem_idx))))
v_idx = np.random.choice(rem_idx, len(rem_idx)-2000)
t_idx = np.array(list(set(rem_idx)-set(v_idx)))
#pdb.set_trace()
train, val, test = all_pairs[idx], all_pairs[v_idx], all_pairs[t_idx]
if from_p:
with open(p_f, 'wb') as pfile:
cPickle.dump([train, val, test, wordtoix, ixtoword], pfile)
#print(counter)
#pdb.set_trace()
return train, val, test, wordtoix, ixtoword
def read_test(test_file, wordtoix):
print "Start reading test file . . ."
test = []
with codecs.open(test_file,"r",'utf-8') as f:
lines = f.readlines()
for line in lines:
line = line.rstrip("\n").rstrip("\r").split('\t')
conv = []
for l in line:
sent=[wordtoix[x] if x in wordtoix else dp.UNK_ID for x in l.split()] + [2]
conv.append(sent)
# bp()
test.append(conv)
return test
def tensors_key_in_file(file_name):
"""Return tensors key in a checkpoint file.
Args:
file_name: Name of the checkpoint file.
"""
try:
reader = pywrap_tensorflow.NewCheckpointReader(file_name)
return reader.get_variable_to_shape_map()
except Exception as e: # pylint: disable=broad-except
print(str(e))
return None
def get_minibatches_idx(n, minibatch_size, shuffle=False):
idx_list = np.arange(n, dtype="int32")
if shuffle:
np.random.shuffle(idx_list)
minibatches = []
minibatch_start = 0
for i in range(n // minibatch_size):
minibatches.append(idx_list[minibatch_start:
minibatch_start + minibatch_size])
minibatch_start += minibatch_size
# if (minibatch_start != n):
# # Make a minibatch out of what is left
# minibatches.append(idx_list[minibatch_start:])
return zip(range(len(minibatches)), minibatches)
# def normalizing_L1(x, axis):
# norm = tf.sqrt(tf.reduce_sum(tf.square(x), axis=axis, keep_dims=True))
# normalized = x / (norm)
# return normalized
def normalizing(x, axis):
norm = tf.sqrt(tf.reduce_sum(tf.square(x), axis=axis, keep_dims=True))
normalized = x / (norm)
return normalized
def normalizing_sum(x, axis):
# sum(x) == 1
sum_prob = tf.reduce_sum(x, axis=axis, keep_dims=True)
normalized = x / sum_prob
return normalized
def _p(pp, name):
return '%s_%s' % (pp, name)
def dropout(X, trng, p=0.):
if p != 0:
retain_prob = 1 - p
X = X / retain_prob * trng.binomial(X.shape, p=retain_prob, dtype=theano.config.floatX)
return X
""" used for initialization of the parameters. """
def ortho_weight(ndim):
W = np.random.randn(ndim, ndim)
u, s, v = np.linalg.svd(W)
return u.astype(config.floatX)
def uniform_weight(nin,nout=None, scale=0.05):
if nout == None:
nout = nin
W = np.random.uniform(low=-scale, high=scale, size=(nin, nout))
return W.astype(config.floatX)
def normal_weight(nin,nout=None, scale=0.05):
if nout == None:
nout = nin
W = np.random.randn(nin, nout) * scale
return W.astype(config.floatX)
def zero_bias(ndim):
b = np.zeros((ndim,))
return b.astype(config.floatX)
"""auxiliary function for KDE"""
def log_mean_exp(A,b,sigma):
a=-0.5*((A-theano.tensor.tile(b,[A.shape[0],1]))**2).sum(1)/(sigma**2)
max_=a.max()
return max_+theano.tensor.log(theano.tensor.exp(a-theano.tensor.tile(max_,a.shape[0])).mean())
'''calculate KDE'''
def cal_nkde(X,mu,sigma):
s1,updates=theano.scan(lambda i,s: s+log_mean_exp(mu,X[i,:],sigma), sequences=[theano.tensor.arange(X.shape[0])],outputs_info=[np.asarray(0.,dtype="float32")])
E=s1[-1]
Z=mu.shape[0]*theano.tensor.log(sigma*np.sqrt(np.pi*2))
return (Z-E)/mu.shape[0]
def cal_relevance(generated, reference, embedding): # embedding V* E
generated = [[g] for g in generated]
reference = [[s] for s in reference]
#bp()
relevance_score = [0.0,0.0,0.0]
relevance_score[0] = greedy_match(reference, generated, embedding)
relevance_score[1] = average_score(reference, generated, embedding)
relevance_score[2] = extrema_score(reference, generated, embedding)
return relevance_score
def cal_BLEU(generated, reference, is_corpus = False):
#print 'in BLEU score calculation'
#the maximum is bigram, so assign the weight into 2 half.
BLEUscore = [0.0,0.0,0.0]
for idx, g in enumerate(generated):
if is_corpus:
score, scores = Bleu(4).compute_score(reference, {0: [g]})
else:
score, scores = Bleu(4).compute_score({0: [reference[0][idx]]} , {0: [g]})
#print g, score
for i, s in zip([0,1,2],score[1:]):
BLEUscore[i]+=s
#BLEUscore += nltk.translate.bleu_score.sentence_bleu(reference, g, weight)
BLEUscore[0] = BLEUscore[0]/len(generated)
BLEUscore[1] = BLEUscore[1]/len(generated)
BLEUscore[2] = BLEUscore[2]/len(generated)
return BLEUscore
def cal_BLEU_4(generated, reference, is_corpus = False):
#print 'in BLEU score calculation'
#the maximum is bigram, so assign the weight into 2 half.
BLEUscore = [0.0,0.0,0.0,0.0]
for idx, g in enumerate(generated):
if is_corpus:
score, scores = Bleu(4).compute_score(reference, {0: [g]})
else:
score, scores = Bleu(4).compute_score({0: [reference[0][idx]]} , {0: [g]})
#print g, score
for i, s in zip([0,1,2,3],score):
BLEUscore[i]+=s
#BLEUscore += nltk.translate.bleu_score.sentence_bleu(reference, g, weight)
BLEUscore[0] = BLEUscore[0]/len(generated)
BLEUscore[1] = BLEUscore[1]/len(generated)
BLEUscore[2] = BLEUscore[2]/len(generated)
BLEUscore[3] = BLEUscore[3]/len(generated)
return BLEUscore
def cal_BLEU_4_nltk(generated, reference, is_corpus = False):
#print 'in BLEU score calculation'
#the maximum is bigram, so assign the weight into 2 half.
reference = [[s] for s in reference]
#bp()
chencherry = SmoothingFunction()
# Note: please keep smoothing turned on, because there is a bug in NLTK without smoothing (see below).
if is_corpus:
return nltk.translate.bleu_score.corpus_bleu(reference, generated, smoothing_function=chencherry.method2) # smoothing options: 0-7
else:
return np.mean([nltk.translate.bleu_score.sentence_bleu(r, g, smoothing_function=chencherry.method2) for r,g in zip(reference, generated)]) # smoothing options: 0-7
def cal_entropy(generated):
#print 'in BLEU score calculation'
#the maximum is bigram, so assign the weight into 2 half.
etp_score = [0.0,0.0,0.0,0.0]
div_score = [0.0,0.0,0.0,0.0]
counter = [defaultdict(int),defaultdict(int),defaultdict(int),defaultdict(int)]
for gg in generated:
g = gg.rstrip('2').split()
for n in range(4):
for idx in range(len(g)-n):
ngram = ' '.join(g[idx:idx+n+1])
counter[n][ngram] += 1
for n in range(4):
total = sum(counter[n].values()) +1e-10
for v in counter[n].values():
etp_score[n] += - (v+0.0) /total * (np.log(v+0.0) - np.log(total))
div_score[n] = (len(counter[n].values())+0.0) /total
return etp_score, div_score
def prepare_for_bleu(sentence):
sent=[x for x in sentence if x!=0]
while len(sent)<4:
sent.append(0)
#sent = ' '.join([ixtoword[x] for x in sent])
sent = ' '.join([str(x) for x in sent])
return sent
def _clip_gradients_seperate_norm(grads_and_vars, clip_gradients):
"""Clips gradients by global norm."""
gradients, variables = zip(*grads_and_vars)
clipped_gradients = [clip_ops.clip_by_norm(grad, clip_gradients) for grad in gradients]
return list(zip(clipped_gradients, variables))
def binary_round(x):
"""
Rounds a tensor whose values are in [0,1] to a tensor with values in {0, 1},
using the straight through estimator for the gradient.
"""
g = tf.get_default_graph()
with ops.name_scope("BinaryRound") as name:
with g.gradient_override_map({"Round": "Identity"}):
return tf.round(x, name=name)
@tf.RegisterGradient("CustomGrad")
def _const_mul_grad(unused_op, grad):
return grad/1e4
def one_hot_round(x):
"""
Rounds a tensor whose values are in [0,1] to a tensor with values in {0, 1},
using the straight through estimator for the gradient.
"""
g = tf.get_default_graph()
with g.gradient_override_map({"Log": "Identity"}):
x = tf.log(x)
x = 1e4 * x
with g.gradient_override_map({"Identity": "CustomGrad"}):
x = tf.identity(x, name="Identity")
with g.gradient_override_map({"Softmax": "Identity"}):
x = tf.nn.softmax(x)
with g.gradient_override_map({"Round": "Identity"}):
return tf.round(x) # B L V
def merge_two_dicts(x, y):
z = x.copy() # start with x's keys and values
z.update(y) # modifies z with y's keys and values & returns None
return z
def reshaping(x, opt, gen_turn = None):
if gen_turn==None: gen_turn = opt.num_turn-opt.n_context
x = np.array(x)
dim = x.shape
x = np.reshape(x, [dim[0]/opt.batch_size/(gen_turn), (gen_turn), opt.batch_size, -1])
x = np.transpose(x, (0,2,1,3))
return np.squeeze(x.reshape([dim[0],-1]))
|
24,740 | 3e0a156bcdca960f4cf21f575fe7e7907fa3d2c5 | from fastapi import FastAPI
import pandas as pd
import boto3
import json
import io
from datetime import datetime, timedelta
app = FastAPI()
AWS_ACCESS_KEY_ID = 'AWS_ACCESS_KEY_ID'
AWS_SECRET_ACCESS_KEY = 'AWS_SECRET_ACCESS_KEY'
BUCKET = 'factored-eafit-bucket'
FILE = 'semaforo/sird_constantes_dia.csv'
FILE_2 = 'semaforo/data_base/'
@app.get("/data_sird")
async def root():
s3_resource = boto3.client('s3', aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY)
csv_obj = s3_resource.get_object(Bucket=BUCKET, Key=FILE)
body = csv_obj['Body'].read().decode('utf-8')
df = pd.read_csv(io.StringIO(body))
result = df.to_json(orient="records")
parsed = json.loads(result)
return json.dumps(parsed)
@app.get("/get_clusters")
async def root():
timestamp = (datetime.utcnow() - timedelta(days=1)).strftime("%d%m%Y")
s3_resource = boto3.client('s3', aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY)
content_object = s3_resource.get_object(Bucket=BUCKET, Key=f"{FILE_2}{timestamp}.json")
file_content = content_object['Body'].read().decode('utf-8')
departments = json.loads(file_content)
return departments
# uvicorn get_data_sird:app --reload |
24,741 | 39857160b7652790f521f4a6a313f4811ee1ec3a | """
.. currentmodule:: neet.boolean.network
.. testsetup:: boolean_network
from neet.automata import ECA
from neet.boolean.network import *
from neet.statespace import BooleanSpace
API Documentation
-----------------
"""
from neet.network import UniformNetwork
from neet.python import long
from .sensitivity import SensitivityMixin
import copy
class BooleanNetwork(SensitivityMixin, UniformNetwork):
def __init__(self, size, names=None, metadata=None):
super(BooleanNetwork, self).__init__(size, 2, names, metadata)
def __iter__(self):
size = self.size
state = [0] * size
yield state[:]
i = 0
while i != size:
if state[i] == 0:
state[i] = 1
for j in range(i):
state[j] = 0
i = 0
yield state[:]
else:
i += 1
def __contains__(self, state):
try:
if len(state) != self.size:
return False
for x in state:
if x != 0 and x != 1:
return False
return True
except TypeError:
return False
def _unsafe_encode(self, state):
encoded, place = long(0), long(1)
for x in state:
encoded += place * long(x)
place <<= 1
return encoded
def decode(self, encoded):
size = self.size
state = [0] * size
for i in range(size):
state[i] = encoded & 1
encoded >>= 1
return state
def subspace(self, indices, state=None):
size = self.size
if state is not None and state not in self:
raise ValueError('provided state is not in the state space')
elif state is None:
state = [0] * size
indices = list(set(indices))
indices.sort()
nindices = len(indices)
if nindices == 0:
yield copy.copy(state)
elif indices[0] < 0 or indices[-1] >= size:
raise IndexError('index out of range')
elif nindices == size:
for state in self:
yield state
else:
initial = copy.copy(state)
yield copy.copy(state)
i = 0
while i != nindices:
if state[indices[i]] == initial[indices[i]]:
state[indices[i]] ^= 1
for j in range(i):
state[indices[j]] = initial[indices[j]]
i = 0
yield copy.copy(state)
else:
i += 1
def hamming_neighbors(self, state):
if state not in self:
raise ValueError('state is not in state space')
neighbors = [None] * self.size
for i in range(self.size):
neighbors[i] = copy.copy(state)
neighbors[i][i] ^= 1
return neighbors
def distance(self, a, b):
if a not in self:
raise ValueError('first state is not in state space')
if b not in self:
raise ValueError('second state is not in state space')
out = 0
for i in range(self.size):
out += a[i] ^ b[i]
return out
UniformNetwork.register(BooleanNetwork)
|
24,742 | 9455fc62723bfbc4e7f40a0147a4c03336d9daa3 | import pyglet
import rabbyt
import game
class Game(pyglet.window.Window):
WINDOW_WIDTH = 900
WINDOW_HEIGHT = 480
WINDOW_DIM = (WINDOW_WIDTH, WINDOW_HEIGHT)
WINDOW_FULLSCREEN = False
KEY_BINDINGS = {
'reset': pyglet.window.key.R,
'fullscreen': pyglet.window.key.F11
}
def __init__(self, *args, **kwargs):
# set window size
if Game.WINDOW_FULLSCREEN:
kwargs['fullscreen'] = True
else:
kwargs['width'] = Game.WINDOW_WIDTH
kwargs['height'] = Game.WINDOW_HEIGHT
super(Game, self).__init__( *args, **kwargs)
# prevent resize
self.resizeable = False
self.set_minimum_size(self.width, self.height)
self.set_mouse_visible = False
# rabbyt set up
rabbyt.set_default_attribs()
# game scenes, these include the main game and the supporting menu scenes
self._current_scene = None
def static_projection(self):
return (0, self.height, self.width, 0)
def start(self):
# track fps
self.fps_display = pyglet.clock.ClockDisplay()
# Create and start the first scene
self.current_scene = game.scene.GameScene(self)
self.current_scene.start()
# start the game loop
pyglet.app.run()
def stop(self):
if self.current_scene is not None:
self.current_scene.stop()
# stop the game loop
pyglet.app.exit()
def reset(self):
self.stop()
self.start()
def update(self, dt):
# update the current scene
if self.current_scene is not None:
self.current_scene.update(dt)
def on_draw(self):
rabbyt.clear()
# Draw the current scene
if self.current_scene is not None:
self.current_scene.on_draw()
# Draw fps
rabbyt.set_viewport((self.width, self.height), projection=self.static_projection())
self.fps_display.draw()
def on_key_press(self, symbol, modifiers):
# Global
if symbol == Game.KEY_BINDINGS['reset']:
self.reset()
elif symbol == Game.KEY_BINDINGS['fullscreen']:
self.toggle_fullscreen()
# have the current scene handle remaining inputs
elif self.current_scene is not None:
self.current_scene.on_key_press(symbol, modifiers)
def toggle_fullscreen(self):
self.set_fullscreen(not self.fullscreen)
if not self.fullscreen:
self.set_size(Game.WINDOW_WIDTH, Game.WINDOW_HEIGHT)
def on_key_release(self, symbol, modifiers):
if self.current_scene is not None:
self.current_scene.on_key_release(symbol, modifiers)
def on_resize(self, width, height):
if self.current_scene is not None:
self.current_scene.on_resize(width, height)
@property
def current_scene(self):
return self._current_scene
@current_scene.setter
def current_scene(self, scene):
# stop the old scene
if self.current_scene is not None:
self.current_scene.stop()
self._current_scene = scene
PROFILE = False
def main():
g = Game()
g.start()
# Start game when running this file
if __name__ == '__main__':
if PROFILE:
import cProfile
cProfile.run('main()')
else:
main()
|
24,743 | 47de2d23ff5b39aba5f4005240b72b287df2a570 | """TO-DO: Write a description of what this XBlock is."""
import pkg_resources
import logging
import json
import requests
import datetime
import pytz
import time
import re
from django.template import Context, Template
from django.contrib.auth.models import User
from xblock.core import XBlock
from xblock.fields import Scope, String, DateTime, Float, Integer
from xblock.fragment import Fragment
from xblock.exceptions import JsonHandlerError
from courseware.models import StudentModule
log = logging.getLogger(__name__)
class KVXBlock(XBlock):
"""
This xblock records a user's profile video to Kulu Valley.
"""
has_score = True
display_name = String(
display_name="Display Name",
default="Profile Videos",
scope=Scope.settings,
help="This name appears in the horizontal navigation at the top of the page."
)
weight = Float(
display_name="Problem Weight",
help=("Defines the number of points each problem is worth. "
"If the value is not set, the problem is worth the sum of the "
"option point values."),
values={"min": 0, "step": .1},
scope=Scope.settings
)
points = Integer(
display_name="Score",
help=("Grade score given to assignment by staff."),
default=1,
scope=Scope.settings
)
users_excluded_email = String(
scope=Scope.settings,
display_name="Users to exclude by email",
default='',
help="A list of email addresses of users that should not be shown. Wildcards are allowed, e.g. *.imd.org"
)
video_mp4_url = String(
display_name="Video mp4 URL",
default='',
scope=Scope.user_state,
help="Video mp4 URL"
)
########################################################
video_kulu_id = String(
display_name="Video kulu id",
default='',
scope=Scope.user_state,
help="Video kulu id"
)
video_hls_url = String(
display_name="Video HLS URL",
default='',
scope=Scope.user_state,
help="Video HLS URL"
)
video_thumbnail_url = String(
display_name="Video Thumbnail URL",
default='',
scope=Scope.user_state,
help="Video Thumbnail URL"
)
video_date_created = DateTime(
display_name="Video Creation Date",
default=None,
scope=Scope.user_state,
help="Video Creation Date"
)
def is_course_staff(self):
# pylint: disable=no-member
"""
Check if user is course staff.
"""
return getattr(self.xmodule_runtime, 'user_is_staff', False)
def is_instructor(self):
# pylint: disable=no-member
"""
Check if user role is instructor.
"""
return self.xmodule_runtime.get_user_role() == 'instructor'
def is_in_studio(self):
"""
Return true if in studio.
"""
return getattr(self.xmodule_runtime, 'is_author_mode', False)
def student_view(self, context=None):
"""
The primary view of the KVXBlock, shown to students
when viewing courses.
"""
self.updateStudentVideoUrls()
context = {
'not_in_studio': not self.is_in_studio(),
'display_name': self.display_name,
'video_kulu_id': none_to_empty(self.video_kulu_id),
'video_mp4_url': none_to_empty(self.video_mp4_url),
'video_hls_url': none_to_empty(self.video_hls_url),
'video_thumbnail_url': none_to_empty(self.video_thumbnail_url),
'video_date_created': date_handler(self.video_date_created),
'video_icon_url': self.runtime.local_resource_url(self, 'public/images/video.png'),
'ascending_sort_icon_url': self.runtime.local_resource_url(self, 'public/images/down.png'),
'descending_sort_icon_url': self.runtime.local_resource_url(self, 'public/images/up.png')
}
frag = Fragment()
frag.add_content(
render_template(
'static/html/kvxblock.html',
context
)
)
frag.add_css(load_resource("static/css/kvxblock.css"))
frag.add_javascript(load_resource("static/js/vendor/easyxdm/easyXDM.debug.js"))
frag.add_javascript(load_resource("static/js/vendor/URI.js"))
frag.add_javascript(load_resource("static/js/vendor/jquery.ui.touch-punch.min.js")) # required for shapeshift on mobile
frag.add_javascript(load_resource("static/js/vendor/jquery.shapeshift.min.js"))
frag.add_javascript(load_resource("static/js/vendor/CryptoJS/core-min.js"))
frag.add_javascript(load_resource("static/js/vendor/CryptoJS/enc-utf16-min.js"))
frag.add_javascript(load_resource("static/js/vendor/CryptoJS/enc-base64-min.js"))
frag.add_javascript(load_resource("static/js/vendor/CryptoJS/md5.js"))
frag.add_javascript(load_resource("static/js/vendor/CryptoJS/tripledes.js"))
# videojs
frag.add_css_url("https://vjs.zencdn.net/5.8.0/video-js.css")
frag.add_javascript_url("https://vjs.zencdn.net/ie8/1.1.2/videojs-ie8.min.js")
frag.add_javascript_url("https://vjs.zencdn.net/5.8.0/video.js")
frag.add_javascript(load_resource("static/js/src/kvcreator.js"))
frag.add_javascript(load_resource("static/js/src/kvxblock.js"))
frag.initialize_js('KVXBlock')
return frag
def studio_view(self, context=None):
"""
Return fragment for editing block in studio.
"""
try:
cls = type(self)
edit_fields = (
(field, type, none_to_empty(getattr(self, field.name)), validator)
for field, type, validator in (
(cls.display_name, 'String', 'string'),
(cls.points, 'Integer', 'number'),
(cls.weight, 'Float', 'number'),
(cls.users_excluded_email, 'TextArea', 'string'),
)
)
context = {
'fields': edit_fields
}
html = render_template('static/html/kvxblockedit.html', context)
fragment = Fragment(html)
fragment.add_javascript(load_resource("static/js/src/kvxblockedit.js"))
fragment.initialize_js('kvXBlockInitStudio')
return fragment
except: # pragma: NO COVER
log.error("Don't swallow my exceptions", exc_info=True)
raise
@XBlock.json_handler
def save_kvxblock(self, data, suffix=''):
# pylint: disable=unused-argument
"""
Persist xblock data when updating settings in studio.
"""
self.display_name = data.get('display_name', self.display_name)
# Validate points before saving
points = data.get('points', self.points)
# Check that we are an int
try:
points = int(points)
except ValueError:
raise JsonHandlerError(400, 'Points must be an integer')
# Check that we are positive
if points < 0:
raise JsonHandlerError(400, 'Points must be a positive integer')
self.points = points
# Validate weight before saving
weight = data.get('weight', self.weight)
# Check that weight is a float.
if weight:
try:
weight = float(weight)
except ValueError:
raise JsonHandlerError(400, 'Weight must be a decimal number')
# Check that we are positive
if weight < 0:
raise JsonHandlerError(
400, 'Weight must be a positive decimal number'
)
self.weight = weight
users_excluded_email = data.get('users_excluded_email', self.users_excluded_email)
try:
regexp_string = self.regexp_from_users_excluded_email(users_excluded_email)
re.compile(regexp_string)
except:
raise JsonHandlerError(400, 'Users to exclude by email is causing an error, please edit.')
self.users_excluded_email = users_excluded_email
########################################################
# Video handlers
########################################################
def regexp_from_users_excluded_email(self, users_excluded_email):
regexp_string = ''
emails = users_excluded_email.split('\n')
for email in emails:
if len(email) > 0:
regexp = '^' + email + '$'
regexp = regexp.replace('.', '\\.')
regexp = regexp.replace('*', '.*')
if len(regexp_string) > 0:
regexp_string += '|' + regexp
else:
regexp_string += regexp
log.info('regexp: %s', regexp_string)
return regexp_string
def get_videos(self):
regexp = None
try:
regexp_string = self.regexp_from_users_excluded_email(self.users_excluded_email)
if (len(regexp_string)):
regexp = re.compile(regexp_string)
except:
log.info("regexp is invalid: '%s', showing all students instead", regexp_string)
modules = StudentModule.objects.filter(module_state_key=self.location)
userVideos = []
for module in modules:
moduleState = json.loads(module.state)
# log.info('moduleState = {}'.format(moduleState))
user = User.objects.get(id=module.student_id)
if regexp and regexp.match(user.email):
continue
userName = user.username
try:
if ('video_kulu_id' in moduleState):
video_kulu_id = moduleState['video_kulu_id']
if (len(video_kulu_id) > 0):
dateCreated = ''
dateCreatedIso = ''
try:
if (moduleState['video_date_created']):
dateCreatedObj = datetime.datetime.strptime(moduleState['video_date_created'], DateTime.DATETIME_FORMAT)
dateCreated = date_handler(dateCreatedObj)
dateCreatedIso = dateCreatedObj.isoformat()
except:
pass
video_mp4_url = moduleState['video_mp4_url']
video_hls_url = moduleState['video_hls_url']
if (video_mp4_url is None or video_hls_url is None):
try:
mp4_url, hls_url = self.get_video_urls(video_kulu_id, retry=False)
if (mp4_url and video_mp4_url is None) or (hls_url and video_hls_url is None):
video_mp4_url = moduleState['video_mp4_url'] = mp4_url
video_hls_url = moduleState['video_hls_url'] = hls_url
module.state = json.dumps(moduleState)
module.save()
except:
pass
userVideos.append({
'name': userName,
'video_mp4_url': video_mp4_url,
'video_hls_url': video_hls_url,
'video_thumbnail_url': moduleState['video_thumbnail_url'],
'video_date_created': dateCreated,
'video_date_created_iso': dateCreatedIso,
'video_kulu_id': video_kulu_id,
})
except:
pass
return userVideos
def updateStudentVideoUrls(self):
if (self.video_kulu_id and
(self.video_mp4_url is None or self.video_hls_url is None)):
log.warning("student video is missing urls: kulu_id=%s mp4_url=%s, hls_url=%s",
self.video_kulu_id, self.video_mp4_url, self.video_hls_url)
log.info(
"updating student video for course:%s module:%s student:%s",
self.course_id,
self.location,
self.logged_in_username()
)
try:
mp4_url, hls_url = self.get_video_urls(self.video_kulu_id, retry=False)
self.video_mp4_url = mp4_url
self.video_hls_url = hls_url
except:
log.info('failed to update urls for kulu id %s', self.video_kulu_id)
@XBlock.json_handler
def get_all_videos(self, data, suffix=''):
log.info(
"get_all_videos for course:%s module:%s student:%s",
self.course_id,
self.location,
self.logged_in_username()
)
return {
'all_videos': json.dumps(self.get_videos())
}
@XBlock.json_handler
def set_video_id(self, data, suffix=''):
"""
Set the video url and thumbnail url from Kulu Valley.
We first call a KV api to get the mp4/hls url.
Calling with 'kulu_id' = '' deletes existing video.
"""
kulu_id = data['kulu_id']
def studentVideoData():
return {
'video_mp4_url': none_to_empty(self.video_mp4_url),
'video_hls_url': none_to_empty(self.video_hls_url),
'video_thumbnail_url': self.video_thumbnail_url,
'video_date_created': date_handler(self.video_date_created),
'video_kulu_id': self.video_kulu_id,
}
if (kulu_id and kulu_id != ''):
template_kulu_valley_preview_url = "https://imd.kuluvalley.com/kulu/{}/thumbnail?v=18"
thumbnail_url = template_kulu_valley_preview_url.format(kulu_id)
self.video_kulu_id = kulu_id
self.video_mp4_url = None
self.video_hls_url = None
self.video_thumbnail_url = thumbnail_url
self.video_date_created = nowUTC()
self.mark_as_done()
log.info(
"set_video_id for course:%s module:%s student:%s",
self.course_id,
self.location,
self.logged_in_username()
)
try:
mp4_url, hls_url = self.get_video_urls(kulu_id) # may not be available at this point
self.video_mp4_url = mp4_url
self.video_hls_url = hls_url
return studentVideoData()
except requests.exceptions.HTTPError as e:
if (e.response.status_code == 404):
raise JsonHandlerError(404, 'video not found')
else:
return studentVideoData()
except:
return studentVideoData()
else:
# Delete the video
self.video_mp4_url = None
self.video_hls_url = None
self.video_thumbnail_url = None
self.video_date_created = None
self.video_kulu_id = None
self.mark_as_not_done()
return studentVideoData()
def get_video_urls(self, kulu_id, retry=True):
"""
Call KV api to get mp4/hls urls.
"""
mp4_url = None
hls_url = None
if (kulu_id):
def fetch_kulu_urls(kulu_id):
mp4_url = None
hls_url = None
kulu_valley_kulus_url = "https://imd.kuluvalley.com/api/2.1/rest/kulus/"
r = requests.get(kulu_valley_kulus_url + kulu_id)
if (r.status_code == requests.codes.ok):
o = r.json()
variants = o["kulu"]["media"]["variants"]
for variant in variants:
if (variant["formatCode"] == "hls_default"):
hls_url = variant["url"]
if (variant["formatCode"] == "mobile_mp4"):
mp4_url = variant["url"]
log.info("hls_url = %s", hls_url)
log.info("mp4_url = %s", mp4_url)
r.raise_for_status()
return mp4_url, hls_url
log.info("getting kulu valley urls")
mp4_url, hls_url = fetch_kulu_urls(kulu_id)
if retry:
retry_count = 1
max_retries = 1
while (retry_count <= max_retries and (mp4_url is None or hls_url is None)):
log.info("getting kulu valley urls: retry %d", retry_count)
sleep_for = 2 ** (retry_count-1) # 1, 2, 4.. seconds
log.info("sleeping for %.1f seconds", sleep_for)
time.sleep(sleep_for)
mp4_url, hls_url = fetch_kulu_urls(kulu_id)
retry_count += 1
return mp4_url, hls_url
########################################################
def mark_as_done(self):
"""
Mark the assignment as done for a this student.
"""
grade_event = {'value': self.points, 'max_value': self.points}
self.runtime.publish(self, 'grade', grade_event)
def mark_as_not_done(self):
"""
Mark the assignment as not done for a this student.
"""
grade_event = {'value': 0, 'max_value': self.points}
self.runtime.publish(self, 'grade', grade_event)
def logged_in_username(self):
loggedInUser = User.objects.get(id=self.scope_ids.user_id)
return loggedInUser.username
@staticmethod
def workbench_scenarios():
"""A canned scenario for display in the workbench."""
return [
("KVXBlock",
"""<vertical_demo>
<kvxblock/>
</vertical_demo>
"""),
]
def render_template(template_path, context=None): # pragma: NO COVER
"""
Evaluate a template by resource path, applying the provided context.
"""
if context is None:
context = {}
template_str = load_resource(template_path)
template = Template(template_str)
return template.render(Context(context))
def load_resource(resource_path): # pragma: NO COVER
"""
Gets the content of a resource
"""
resource_content = pkg_resources.resource_string(__name__, resource_path)
return resource_content.decode("utf8")
def nowUTC():
"""
Get current date and time in UTC.
"""
return datetime.datetime.now(pytz.utc)
date_handler = lambda obj: (
obj.strftime('%d %b %Y %H:%M:%S')
if isinstance(obj, datetime.datetime)
or isinstance(obj, datetime.date)
else None
)
def none_to_empty(data):
return data if data is not None else ''
|
24,744 | 9e5e369820dad064e09d512d2e3dce942f1f04f9 |
import tornado.httpserver
import tornado.web
try: import simplejson as json
except ImportError: import json
import logging
import logging.config
from models.application import *
from server.decorators import authenticated_request
from server.handlers import BaseHandler, LoginHandler
from models.base import Base
from models.packages import *
from models.node import *
from models.ssl import *
from models.scheduler import *
from db.client import *
from scheduler.jobManager import job_lister, remove_job
from scheduler.timeBlocker import *
from tagging.tagManager import *
from search.search import *
from utils.common import *
from packages.pkgManager import *
from node.nodeManager import *
from transactions.transactions_manager import *
from logger.rvlogger import RvLogger
from sqlalchemy import distinct, func
from sqlalchemy.orm import sessionmaker, class_mapper
from jsonpickle import encode
logging.config.fileConfig('/opt/TopPatch/conf/logging.config')
logger = logging.getLogger('rvapi')
class TagListerByTagHandler(BaseHandler):
@authenticated_request
def get(self):
self.session = self.application.session
self.session = validate_session(self.session)
result = tag_lister(self.session)
self.session.close()
self.set_header('Content-Type', 'application/json')
self.write(json.dumps(result, indent=4))
class TagListerByNodeHandler(BaseHandler):
@authenticated_request
def get(self):
self.session = self.application.session
self.session = validate_session(self.session)
result = tag_list_by_nodes(self.session)
self.session.close()
self.set_header('Content-Type', 'application/json')
self.write(json.dumps(result, indent=4))
class TagAddHandler(BaseHandler):
@authenticated_request
def post(self):
username = self.get_current_user()
self.session = self.application.session
self.session = validate_session(self.session)
tag = None
try:
tag = self.get_argument('operation')
except Exception as e:
self.write("Wrong argument passed %s, the argument needed is operation" % (e))
result = tag_adder(self.session, tag, username=username)
self.session.close()
self.set_header('Content-Type', 'application/json')
self.write(json.dumps(result, indent=4))
class TagAddPerNodeHandler(BaseHandler):
@authenticated_request
def post(self):
username = self.get_current_user()
self.session = self.application.session
self.session = validate_session(self.session)
try:
self.msg = self.get_argument('operation')
except Exception as e:
self.write("Wrong arguement passed %s, the argument needed is tag" % (e))
result = tag_add_per_node(self.session, self.msg, username=username)
self.session.close()
self.set_header('Content-Type', 'application/json')
self.write(json.dumps(result, indent=4))
class TagRemovePerNodeHandler(BaseHandler):
@authenticated_request
def post(self):
username = self.get_current_user()
self.session = self.application.session
self.session = validate_session(self.session)
try:
self.msg = self.get_argument('operation')
except Exception as e:
self.write("Wrong arguement passed %s, the argument needed is tag" % (e))
result = tag_remove_per_node(self.session, self.msg,
username=username)
self.session.close()
self.set_header('Content-Type', 'application/json')
self.write(json.dumps(result, indent=4))
class TagRemoveHandler(BaseHandler):
@authenticated_request
def post(self):
username = self.get_current_user()
self.session = self.application.session
self.session = validate_session(self.session)
tag = None
try:
tag = self.get_argument('operation')
except Exception as e:
self.write("Wrong arguement passed %s, the argument needed is tag" % (e))
result = tag_remove(self.session, tag, username=username)
self.session.close()
self.set_header('Content-Type', 'application/json')
self.write(json.dumps(result, indent=4))
class TagsHandler(BaseHandler):
@authenticated_request
def get(self):
username = self.get_current_user()
session = self.application.session
session = validate_session(session)
tag_name = self.get_argument('tag_name', None)
tag_id = self.get_argument('tag_id', None)
result = None
if tag_name:
result = get_all_data_for_tag(session, tag_name=tag_name)
elif tag_id:
result = get_all_data_for_tag(session, tag_id=tag_id)
else:
result = {
'pass': False,
'message': 'Invalid Arguments'
}
self.set_header('Content-Type', 'application/json')
session.close()
self.write(json.dumps(result, indent=4, encoding='utf8'))
|
24,745 | 73c1d9db01684d0d9f52bbb7416fd63c0b9dd74a | #!/usr/bin/env python
import datetime
import os
import types
import unittest2 as unittest
import awstats_reader
opd = os.path.dirname
test_file_dir = os.path.join(opd(opd(os.path.abspath(__file__))), 'test_files')
class TestAwstatsHelpers(unittest.TestCase):
"""Tests various helper functions in the awstats_reader package"""
def test_14_digit_datetime(self):
"""Ensure date/time strings are being evaluated correctly"""
obj = awstats_reader.awstats_datetime('20091130165230')
dt = datetime.datetime(2009, 11, 30, 16, 52, 30)
self.assertEqual(obj, dt)
def test_8_digit_date(self):
"""Ensure date strings are being evaluated correctly"""
obj = awstats_reader.awstats_datetime('20091130')
dt = datetime.date(2009, 11, 30)
self.assertEqual(obj, dt)
def test_14_digit_datetime_detection(self):
"""Ensure the AwstatsDateTime object is returned"""
obj = awstats_reader.awstats_datetime('20091130165230')
self.assertTrue(isinstance(obj, awstats_reader.AwstatsDateTime))
def test_8_digit_date_detection(self):
"""Ensure the AwstatsDate object is returned"""
obj = awstats_reader.awstats_datetime('20091130')
self.assertTrue(isinstance(obj, awstats_reader.AwstatsDate))
def test_year_year_zero_datetime_parse(self):
"""Ensure the Awstats 'year zero' is parsed correctly"""
obj = awstats_reader.awstats_datetime('0')
self.assertEqual(obj,datetime.datetime(1,1,1))
def test_year_zero_datetime_output(self):
"""Ensure the Awstats 'year zero' is printed correctly"""
obj = awstats_reader.awstats_datetime('0')
self.assertEqual(obj.strftime('%Y%m%d%H%M%S'), '0')
def test_good_year_datetime_output(self):
"""Ensure a good date is printed correctly)"""
obj = awstats_reader.awstats_datetime('20110430184200')
self.assertEqual(obj.strftime('%Y%m%d%H%M%S'), '20110430184200')
def test_datetime_invalid_string(self):
"""Ensure an invalid date/time string raises an exception"""
self.assertRaises(RuntimeError, awstats_reader.awstats_datetime, '2009')
def test_attr_dict(self):
"""Ensure AttrDict behaves correctly"""
obj = awstats_reader.AttrDict([('this','that'), ('thus','those')])
self.assertEqual(obj.thus, 'those')
class TestAwstatsReader(unittest.TestCase):
"""Tests the AwstatsReader main object"""
def test_init(self):
"""Ensure we can initialize the class"""
ar = awstats_reader.AwstatsReader('/tmp', 'example.com')
self.assertTrue(isinstance(ar, awstats_reader.AwstatsReader))
def test_invalid_year_fail(self):
"""Ensure getting an invalid year raises an exception"""
ar = awstats_reader.AwstatsReader('/tmp', 'example.com')
self.assertRaises(KeyError, ar.__getitem__, 9999)
def test_valid_year(self):
"""Ensure getting a valid year returns an AwstatsYear object"""
ar = awstats_reader.AwstatsReader(test_file_dir, 'jjncj.com')
obj = ar[2009]
self.assertTrue(isinstance(obj, awstats_reader.AwstatsYear))
def test_invalid_dir(self):
"""Ensure passing an invalid directory raises an exception"""
self.assertRaises(OSError, awstats_reader.AwstatsReader, '/tmp/XYZ', 'example.com')
def test_iter_operation(self):
"""Ensure AwstatsYear's __iter__ function is working"""
ar = awstats_reader.AwstatsReader(test_file_dir, 'jjncj.com')
year_iter = ar.__iter__()
self.assertTrue(isinstance(year_iter, types.GeneratorType))
def test_found_all_years(self):
"""Ensure all years were found"""
ar = awstats_reader.AwstatsReader(test_file_dir, 'jjncj.com')
self.assertEqual(ar.years, [2008,2009])
def test_iter_all_years(self):
"""Ensure year iterator is working"""
ar = awstats_reader.AwstatsReader(test_file_dir, 'jjncj.com')
self.assertEqual([ary.year for ary in ar], [2008,2009])
def test_contains_year_true(self):
"""Ensure __contains__ is working positively"""
ary = self.ar = awstats_reader.AwstatsReader(test_file_dir, 'jjncj.com')
self.assertTrue(2008 in ary)
def test_contains_year_false(self):
"""Ensure __contains__ is working negatively"""
ary = self.ar = awstats_reader.AwstatsReader(test_file_dir, 'jjncj.com')
self.assertFalse(2111 in ary)
def test_str(self):
"""Ensure AwstatsReader __str__ is operating correctly"""
ar = awstats_reader.AwstatsReader(test_file_dir, 'jjncj.com')
self.assertEqual(str(ar), '<AwstatsReader: 2008, 2009>')
class TestAwstatsYear(unittest.TestCase):
def setUp(self):
self.ar = awstats_reader.AwstatsReader(test_file_dir, 'jjncj.com')
def test_invalid_month(self):
"""Ensure getting an invalid month raises an exception"""
ar_year = self.ar[2009]
self.assertRaises(KeyError, ar_year.__getitem__, 1)
def test_valid_month(self):
"""Ensure getting a valid month returns an AwstatsMonth object"""
ar_month = self.ar[2009][11]
self.assertTrue(isinstance(ar_month, awstats_reader.AwstatsMonth))
def test_found_all_months(self):
"""Ensure all months were found"""
ary = self.ar[2009]
self.assertEqual(ary.months, [11,12])
def test_iter_all_months(self):
"""Ensure month is working"""
ary = self.ar[2009]
self.assertEqual([arm.month for arm in ary], [11,12])
def test_contains_month_true(self):
"""Ensure __contains__ is working positively"""
ary = self.ar = awstats_reader.AwstatsReader(test_file_dir, 'jjncj.com')[2009]
self.assertTrue(11 in ary)
def test_contains_month_false(self):
"""Ensure __contains__ is working negatively"""
ary = self.ar = awstats_reader.AwstatsReader(test_file_dir, 'jjncj.com')[2009]
self.assertFalse(10 in ary)
def test_str(self):
"""Ensure AwstatsYear __str__ is operating correctly"""
ary = self.ar[2009]
self.assertEqual(str(ary), '<AwstatsYear 2009: 11, 12>')
class TestAwstatsMonth(unittest.TestCase):
wanted_sections = ['general', 'time', 'visitor', 'day', 'domain',
'login', 'robot', 'worms', 'emailsender',
'emailreceiver', 'session', 'sider', 'filetypes', 'os',
'browser', 'screensize', 'unknownreferer',
'unknownrefererbrowser', 'origin', 'sereferrals',
'pagerefs', 'searchwords', 'keywords', 'misc', 'errors',
'cluster', 'sider_404', 'plugin_geoip_city_maxmind']
def setUp(self):
self.ar = awstats_reader.AwstatsReader(test_file_dir, 'jjncj.com')
def test_month_iterator(self):
"""Ensure all sections are found"""
arm = self.ar[2009][11]
self.assertEqual(list(arm), self.__class__.wanted_sections)
def test_month_keys(self):
"""Ensure all sections are found"""
arm = self.ar[2009][11]
self.assertEqual(arm.keys(), self.__class__.wanted_sections)
def test_get_invalid_section(self):
"""Ensure getting an invalid sections raises an exception"""
arm = self.ar[2009][11]
self.assertRaises(KeyError, arm.__getitem__, 'invalid_section')
def test_get_valid_section(self):
"""Ensure getting an valid section resturns an AwstatsSection object"""
arm = self.ar[2009][11]
ars = arm['general']
self.assertTrue(isinstance(ars, awstats_reader.AwstatsSection))
def test_len(self):
"""Ensure the len() function returns the correct value"""
arm = self.ar[2009][11]
self.assertEqual(len(arm.keys()), len(self.__class__.wanted_sections))
def test_str(self):
"""Ensure AwstatsMonth __str__ is operating correctly"""
arm = self.ar[2009][11]
self.assertEqual(str(arm), '<AwstatsMonth 2009-11>')
class TestAwstatsSection(unittest.TestCase):
wanted_lines = ['LastLine', 'FirstTime', 'LastTime', 'LastUpdate',
'TotalVisits', 'TotalUnique', 'MonthHostsKnown',
'MonthHostsUnknown']
def setUp(self):
self.ar = awstats_reader.AwstatsReader(test_file_dir, 'jjncj.com')
def test_get_valid_line(self):
"""Ensure getting a valid line returns an ordered dict"""
ars = self.ar[2009][11]['general']
self.assertTrue(isinstance(ars['TotalVisits'], awstats_reader.AttrDict))
def test_get_invalid_line(self):
"""Ensure getting an invalid line raises an exception"""
ars = self.ar[2009][11]['general']
self.assertRaises(KeyError, ars.__getitem__, 'invalid_section')
def test_section_iterator(self):
"""Ensure the section iterator works"""
ars = self.ar[2009][11]['general']
self.assertEqual(list(ars), self.__class__.wanted_lines)
def test_section_keys(self):
"""Ensure the section keys works"""
ars = self.ar[2009][11]['general']
self.assertEqual(ars.keys(), self.__class__.wanted_lines)
def test_get_return_default(self):
"""Ensure get() returns the default default value"""
ars = self.ar[2009][11]['general']
self.assertTrue(ars.get('invalid_row') is None)
def test_get_return_default_provided(self):
"""Ensure get() returns the provided default value"""
ars = self.ar[2009][11]['general']
self.assertEqual(ars.get('invalid_row', 'ZZZZZZ'), 'ZZZZZZ')
def test_get_return_desired_value(self):
"""Ensure get() returns the desired value"""
ars = self.ar[2009][11]['general']
self.assertEqual(ars.get('TotalVisits'),
awstats_reader.AttrDict([('value', 1475)]))
def test_get_sort_info(self):
"""Ensure sort_info() returns the correct value"""
ars = self.ar[2009][11]['day']
self.assertEqual(ars.get_sort_info(), (31, 'key', True))
def test_merge_max(self):
"""Test max merge operation"""
ars = self.ar[2009][11]['general']
ars2 = awstats_reader.AwstatsReader(test_file_dir,
'joshuakugler.com')[2009][11]['general']
self.assertEqual(ars.merge(ars2, 'LastLine', 'line'), 1011585)
def test_merge_repl(self):
"""Test repl merge operation"""
ars = self.ar[2009][11]['general']
ars2 = awstats_reader.AwstatsReader(test_file_dir,
'joshuakugler.com')[2009][11]['general']
self.assertEqual(ars.merge(ars2, 'LastLine', 'signature'), '')
def test_merge_min(self):
"""Test min merge operation"""
ars = self.ar[2009][11]['general']
ars2 = awstats_reader.AwstatsReader(test_file_dir,
'joshuakugler.com')[2009][11]['general']
self.assertEqual(ars.merge(ars2, 'FirstTime', 'first_time'),
awstats_reader.AwstatsDateTime(2009, 11, 1, 0, 2, 37))
def test_merge_sum(self):
"""Test sum merge operation"""
ars = self.ar[2009][11]['general']
ars2 = awstats_reader.AwstatsReader(test_file_dir,
'joshuakugler.com')[2009][11]['general']
self.assertEqual(ars.merge(ars2, 'LastUpdate', 'parsed'), 1262637)
def test_merge_latest(self):
"""Test 'latest' merge operation"""
ars = self.ar[2009][11]['sider_404']
ars2 = awstats_reader.AwstatsReader(test_file_dir,
'joshuakugler.com')[2009][11]['sider_404']
self.assertEqual(ars.merge(ars2, '/styles/widgets/blog-widget.css',
'last_url_referer'), 'http://joshuakugler.com/')
def test_str_function(self):
"""Test the 'str' function"""
ars = self.ar[2009][11]['general']
self.assertEqual(str(ars), "<AwstatsSection general, OrderedDict([('LastLine', ['20091202000343', '1011585', '206082338', '54716901457']), ('FirstTime', ['20091101000237']), ('LastTime', ['20091130234113']), ('LastUpdate', ['20091201094510', '1011585', '0', '886950', '70062', '54572']), ('TotalVisits', ['1475']), ('TotalUnique', ['547']), ('MonthHostsKnown', ['397']), ('MonthHostsUnknown', ['196'])])>")
def test_items_function(self):
"""Test the 'str' function"""
ars = self.ar[2009][11]['general']
self.assertEqual(list(ars.items()), [('LastLine', ['20091202000343', '1011585', '206082338', '54716901457']), ('FirstTime', ['20091101000237']), ('LastTime', ['20091130234113']), ('LastUpdate', ['20091201094510', '1011585', '0', '886950', '70062', '54572']), ('TotalVisits', ['1475']), ('TotalUnique', ['547']), ('MonthHostsKnown', ['397']), ('MonthHostsUnknown', ['196'])])
class TestAwstatsMerge(unittest.TestCase):
"""Test functions and procedures in awstats_cache_merge"""
def test_make_get_field(self):
"""Ensure make_get_field constructs a function which returns the proper value"""
import odict
od = odict.OrderedDict([('pages', 4), ('hits', 15), ('bandwidth', 386873)])
f = awstats_reader.make_get_field('bandwidth')
self.assertEqual(f(('dz', od)), 386873)
|
24,746 | 7ff13cba482185e4e3b6af79cdca07c874f2dad6 | # Author: Maria Cristoforo
# Date: November 10, 2020
# Purpose: the vertex class
from cs1lib import *
RADIUS = 9
EDGE_WIDTH = 4
class Vertex:
def __init__(self, name, x, y):
self.name = name
self.x = x
self.y = y
self.adj = []
def __str__(self):
name_list = []
for i in range(len(self.adj)):
name_list.append(self.adj[i].name)
return self.name + "; " + "Location: " + self.x + "," + self.y + "; " + "Adjacent Vertices: " + ", ".join(name_list)
def draw_vertex(self, r, g, b):
set_fill_color(r, g, b)
disable_stroke()
draw_circle(int(self.x), int(self.y), RADIUS)
def draw_edge(self, v, r, g, b):
enable_stroke()
set_stroke_width(EDGE_WIDTH)
set_stroke_color(r, g, b)
draw_line(int(self.x), int(self.y), int(v.x), int(v.y))
def draw_edges_adjacent(self, r, g, b):
for i in range(len(self.adj)):
self.draw_edge(self.adj[i], r, g, b)
# function to determine whether the mouse is in the smallest square
# surrounding a vertex
def within_square(self, x, y):
if x > (int(self.x) - RADIUS) and x < (int(self.x) + RADIUS):
if y > (int(self.y) - RADIUS) and y < (int(self.y) + RADIUS):
return True
return False
|
24,747 | c330005b6b1832386069aa4064e682d8ff9b4fe5 | n = int(input())
outer_List = []
for i in range(0, n):
word = input()
outer_List.append(word)
int_set = set()
print(len(set(outer_List)))
for i in outer_List:
if i not in int_set:
print(outer_List.count(i), end=" ")
int_set.add(i)
'''from collections import Counter, OrderedDict
class OrderedCounter(Counter, OrderedDict):
pass
d = OrderedCounter(input() for _ in range(int(input())))
print(len(d))
print(*d.values())''' |
24,748 | a2de76bbe88c44bee38d929d98b2421b0bc1ccb1 | # -*- coding: utf-8 -*-
headers = {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'POST, GET, PUT, DELETE,OPTIONS' ,
'Access-Control-Allow-Headers': 'content-type, accept, x-custom-header, Authorization',
'Access-Control-Max-Age': '3600'
}
#token SIE API BM
toekn = 'c94ad844a798f3fc61abda40814a83949c4fa2a588a278ff2d96dca127149a32'
url = "https://www.banxico.org.mx/SieAPIRest/service/v1/series/:idSerie/datos/:fechaIni/:fechaFin"
udis = 'SP68257'
dollar = 'SF63528' |
24,749 | 8a6779b17ccd78cacdc65de41b1b21ad3c92442a | import numpy as np
import smf.mc_val as mc
def test_is_discounted_price_process_martingale():
s0 = np.array([2203.7, 84.2])
t_expr = 184
days_a_year = 247
r = 0.013 / days_a_year
alpha = np.array([0.00947784, 0.01790058])
beta = np.array([-0.00018925, -0.0012299])
corr_mx = np.array([[1., 0.50664951], [0.50664951, 1.]])
paths = mc.generate_scenarios(s0,
r,
t_expr,
alpha,
beta,
corr_mx)
num = np.exp(r * np.arange(0, t_expr+1),
dtype=np.float_)
assert np.allclose(np.mean(paths[:, -1, :], axis=0) / num[-1], s0, rtol=.01)
def test_is_univariate_radon_nikodym_expectation_equal_1():
s0 = np.array([2203.7, 84.2])
t_expr = 184
days_a_year = 247
r = 0.013 / days_a_year
alpha = np.array([0.00947784, 0.01790058])
beta = np.array([-0.00018925, -0.0012299])
corr_mx = np.array([[1., 0.50664951], [0.50664951, 1.]])
paths, inc = mc.generate_scenarios(s0,
r,
t_expr,
alpha,
beta,
corr_mx,
with_p=True,
return_inc=True)
rn = mc.calculate_rn_derivative(np.ones(2), alpha, beta, corr_mx[0, 1], r, inc, dim=1)
assert np.allclose(np.mean(rn), 1, rtol=.01)
def test_is_multivariate_radon_nikodym_expectation_equal_1():
s0 = np.array([2203.7, 84.2])
t_expr = 184
days_a_year = 247
r = 0.013 / days_a_year
alpha = np.array([0.00947784, 0.01790058])
beta = np.array([-0.00018925, -0.0012299])
corr_mx = np.array([[1., 0.50664951], [0.50664951, 1.]])
paths, inc = mc.generate_scenarios(s0,
r,
t_expr,
alpha,
beta,
corr_mx,
with_p=True,
return_inc=True)
rn = mc.calculate_rn_derivative(np.ones(2), alpha, beta, corr_mx[0, 1], r, inc, dim=2)
assert np.allclose(np.mean(rn), 1, rtol=.01)
|
24,750 | 1fc25294ab80fd45abf801ce0fd8622ce766d054 | from django.conf import settings
from django.contrib.postgres.search import TrigramSimilarity
from django.db.models import Avg, Count
from notifications.models import Notification
from rest_framework import generics, mixins, status, viewsets
from rest_framework.exceptions import NotFound, PermissionDenied
from rest_framework.generics import (CreateAPIView, ListAPIView,
RetrieveUpdateDestroyAPIView)
from rest_framework.pagination import PageNumberPagination
from rest_framework.permissions import (AllowAny, IsAuthenticated,
IsAuthenticatedOrReadOnly)
from rest_framework.response import Response
from rest_framework.views import APIView
from .models import Article, Comment, CommentEditHistory, Ratings, Tag, Bookmarks
from .renderers import (ArticleJSONRenderer, CommentEditHistoryJSONRenderer,
CommentJSONRenderer, CommentLikeJSONRenderer,
FavoriteJSONRenderer, NotificationJSONRenderer,
RatingJSONRenderer, BookmarkJSONRenderer)
from .serializers import (ArticleSerializer, CommentEditHistorySerializer,
CommentSerializer, NotificationSerializer,
RatingSerializer, TagSerializer,
UpdateCommentSerializer)
class LargeResultsSetPagination(PageNumberPagination):
"""
Set pagination results settings
"""
page_size = 10
page_size_query_param = 'page_size'
max_page_size = 10
class ArticleViewSet(mixins.CreateModelMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
viewsets.GenericViewSet):
"""
A viewset that provides `retrieve`, `create`, and `list` actions.
To use it, override the class and set the `.queryset` and
`.serializer_class` attributes.
"""
lookup_field = 'slug'
queryset = Article.objects.annotate(
average_rating=Avg("rating__stars")
)
permission_classes = (IsAuthenticatedOrReadOnly, )
renderer_classes = (ArticleJSONRenderer, )
serializer_class = ArticleSerializer
pagination_class = LargeResultsSetPagination
def create(self, request):
"""
Overrides the create method to create a article
"""
article = request.data.get('article', {})
serializer = self.serializer_class(data=article)
serializer.is_valid(raise_exception=True)
serializer.save(author=request.user.profile)
return Response(serializer.data, status=status.HTTP_201_CREATED)
def list(self, request):
"""
Overrides the list method to get all articles
"""
queryset = Article.objects.all()
serializer_context = {'request': request}
page = self.paginate_queryset(queryset)
serializer = self.serializer_class(
page,
context=serializer_context,
many=True
)
output = self.get_paginated_response(serializer.data)
return output
def retrieve(self, request, slug):
"""
Override the retrieve method to get a article
"""
serializer_context = {'request': request}
try:
serializer_instance = self.queryset.get(slug=slug)
except Article.DoesNotExist:
raise NotFound("An article with this slug doesn't exist")
serializer = self.serializer_class(
serializer_instance,
context=serializer_context
)
return Response(serializer.data, status=status.HTTP_200_OK)
def update(self, request, slug):
"""
Override the update method to update an article
"""
serializer_context = {'request': request}
try:
serializer_instance = self.queryset.get(slug=slug)
except Article.DoesNotExist:
raise NotFound("An article with this slug doesn't exist.")
if not serializer_instance.author_id == request.user.profile.id:
raise PermissionDenied(
"You are not authorized to edit this article.")
serializer_data = request.data.get('article', )
serializer = self.serializer_class(
serializer_instance,
context=serializer_context,
data=serializer_data,
partial=True
)
serializer.is_valid(raise_exception=True)
return Response(serializer.data, status=status.HTTP_200_OK)
def destroy(self, request, slug):
"""
Override the destroy method to delete an article
"""
try:
article = self.queryset.get(slug=slug)
except Article.DoesNotExist:
raise NotFound("An article with this slug doesn't exist")
if article.author_id == request.user.profile.id:
article.delete()
else:
raise PermissionDenied(
"You are not authorized to delete this article.")
return Response(None, status=status.HTTP_204_NO_CONTENT)
def get_queryset(self):
queryset = self.queryset
tag = self.request.query_params.get('tag', None)
if tag is not None:
queryset = queryset.filter(tags__tag=tag)
return queryset
class RateAPIView(APIView):
permission_classes = (IsAuthenticatedOrReadOnly,)
renderer_classes = (RatingJSONRenderer,)
serializer_class = RatingSerializer
def post(self, request, slug):
"""
Method that posts users article ratings
"""
rating = request.data.get("rate", {})
serializer = self.serializer_class(data=rating)
serializer.is_valid(raise_exception=True)
rating = serializer.data.get('rating')
try:
article = Article.objects.get(slug=slug)
except Article.DoesNotExist:
raise NotFound("An article with this slug does not exist")
ratings = Ratings.objects.filter(rater=request.user.profile,
article=article).first()
if not ratings:
ratings = Ratings(
article=article,
rater=request.user.profile,
stars=rating)
ratings.save()
avg = Ratings.objects.filter(
article=article).aggregate(Avg('stars'))
return Response({
"avg": avg
}, status=status.HTTP_201_CREATED)
if ratings.counter >= 5:
raise PermissionDenied(
"You are not allowed to rate this article more than 5 times."
)
ratings.counter += 1
ratings.stars = rating
ratings.save()
avg = Ratings.objects.filter(article=article).aggregate(Avg('stars'))
return Response({"avg": avg}, status=status.HTTP_201_CREATED)
class FavoriteAPIView(APIView):
lookup_field = 'slug'
permission_classes = (IsAuthenticatedOrReadOnly,)
renderer_classes = (FavoriteJSONRenderer,)
serializer_class = ArticleSerializer
queryset = Article.objects.all()
def post(self, request, slug):
"""
Method that favorites articles.
"""
serializer_context = {'request': request}
try:
article = Article.objects.get(slug=slug)
except Article.DoesNotExist:
raise NotFound("An article with this slug does not exist")
request.user.profile.favorite(article)
serializer = self.serializer_class(
article,
context=serializer_context
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
def delete(self, request, slug):
"""
Method that favorites articles.
"""
serializer_context = {'request': request}
try:
article = Article.objects.get(slug=slug)
except Article.DoesNotExist:
raise NotFound("An article with this slug does not exist")
request.user.profile.unfavorite(article)
serializer = self.serializer_class(
article,
context=serializer_context
)
return Response(serializer.data, status=status.HTTP_200_OK)
class CommentsListCreateAPIView(generics.ListCreateAPIView):
lookup_field = 'article__slug'
lookup_url_kwarg = 'article_slug'
permission_classes = (IsAuthenticatedOrReadOnly,)
queryset = Comment.objects.root_nodes().select_related(
'article', 'article__author', 'article__author__user',
'author', 'author__user'
)
renderer_classes = (CommentJSONRenderer,)
serializer_class = CommentSerializer
def filter_queryset(self, queryset):
# The built-in list function calls `filter_queryset`. Since we only
# want comments for a specific article, this is a good place to do
# that filtering.
filters = {self.lookup_field: self.kwargs[self.lookup_url_kwarg]}
return queryset.filter(**filters)
def create(self, request, article_slug=None):
data = request.data.get('comment', {})
context = {'author': request.user.profile}
try:
context['article'] = Article.objects.get(slug=article_slug)
except Article.DoesNotExist:
raise NotFound('An article with this slug does not exist.')
serializer = self.serializer_class(data=data, context=context)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
class CommentsDestroyGetCreateAPIView(
RetrieveUpdateDestroyAPIView,
CreateAPIView
):
lookup_url_kwarg = 'comment_pk'
permission_classes = (IsAuthenticatedOrReadOnly,)
queryset = Comment.objects.all()
serializer_class = CommentSerializer
def destroy(self, request, article_slug=None, comment_pk=None):
try:
comment = Comment.objects.get(pk=comment_pk,)
except Comment.DoesNotExist:
raise NotFound('A comment with this ID does not exist.')
comment.delete()
return Response(None, status=status.HTTP_204_NO_CONTENT)
def create(self, request, article_slug=None, comment_pk=None):
data = request.data.get('comment', None)
context = {'author': request.user.profile}
try:
context['article'] = Article.objects.get(slug=article_slug)
except Article.DoesNotExist:
raise NotFound('An article with this slug does not exist.')
try:
context['parent'] = Comment.objects.get(pk=comment_pk)
except Comment.DoesNotExist:
raise NotFound('A comment with this id does not exists')
serializer = self.serializer_class(data=data, context=context)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
def update(self, request, article_slug=None,
comment_pk=None, *args, **kwargs):
serializer_class = UpdateCommentSerializer
data = request.data.get('comment', None)
try:
comment = Comment.objects.get(pk=comment_pk,
author=request.user.profile)
except Comment.DoesNotExist:
raise NotFound(
'This comment does not exist for authenticated user.'
)
if comment.body != data.get('body'):
CommentEditHistory.objects.create(
body=comment.body,
comment_id=comment.pk,
updated_at=comment.updated_at
)
updated_comment = serializer_class.update(
data=data,
instance=comment
)
return Response(
self.serializer_class(updated_comment).data,
status=status.HTTP_200_OK
)
return Response(
self.serializer_class(comment).data,
status=status.HTTP_200_OK
)
class CommentEditHistoryAPIView(ListAPIView):
permission_classes = [IsAuthenticatedOrReadOnly, ]
renderer_classes = [CommentEditHistoryJSONRenderer, ]
serializer_class = CommentEditHistorySerializer
queryset = CommentEditHistory.objects.all()
def list(self, request, slug, comment_pk, *args, **kwargs):
try:
Comment.objects.get(pk=comment_pk, author=request.user.profile)
serializer_instance = self.queryset.filter(comment_id=comment_pk)
except Comment.DoesNotExist:
raise NotFound
serializer = self.serializer_class(serializer_instance, many=True)
return Response(serializer.data, status.HTTP_200_OK)
class LikesAPIView(APIView):
permission_classes = (IsAuthenticatedOrReadOnly, )
renderer_classes = (ArticleJSONRenderer, )
serializer_class = ArticleSerializer
def put(self, request, slug):
serializer_context = {'request': request}
try:
serializer_instance = Article.objects.get(slug=slug)
except Article.DoesNotExist:
raise NotFound("An article with this slug does not exist")
if serializer_instance in Article.objects.filter(
dislikes=request.user):
serializer_instance.dislikes.remove(request.user)
if serializer_instance in Article.objects.filter(
likes=request.user):
serializer_instance.likes.remove(request.user)
else:
serializer_instance.likes.add(request.user)
serializer = self.serializer_class(serializer_instance,
context=serializer_context,
partial=True)
return Response(serializer.data, status=status.HTTP_200_OK)
class DislikesAPIView(APIView):
permission_classes = (IsAuthenticatedOrReadOnly, )
renderer_classes = (ArticleJSONRenderer, )
serializer_class = ArticleSerializer
def put(self, request, slug):
serializer_context = {'request': request}
try:
serializer_instance = Article.objects.get(slug=slug)
except Article.DoesNotExist:
raise NotFound("An article with this slug does not exist")
if serializer_instance in Article.objects.filter(likes=request.user):
serializer_instance.likes.remove(request.user)
if serializer_instance in Article.objects.filter(
dislikes=request.user):
serializer_instance.dislikes.remove(request.user)
else:
serializer_instance.dislikes.add(request.user)
serializer = self.serializer_class(serializer_instance,
context=serializer_context,
partial=True)
return Response(serializer.data, status=status.HTTP_200_OK)
class TagListAPIView(generics.ListAPIView):
queryset = Tag.objects.all()
permission_classes = (AllowAny,)
serializer_class = TagSerializer
def list(self, request):
serializer_data = self.get_queryset()
serializer = self.serializer_class(serializer_data, many=True)
return Response({
'tags': serializer.data
}, status=status.HTTP_200_OK)
class NotificationViewset(mixins.ListModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
viewsets.GenericViewSet):
permission_classes = (IsAuthenticated, )
serializer_class = NotificationSerializer
renderer_classes = (NotificationJSONRenderer, )
def list(self, request):
unread_count = request.user.notifications.unread().count()
read_count = request.user.notifications.read().count()
unread_serializer = self.serializer_class(
data=request.user.notifications.unread(), many=True)
unread_serializer.is_valid()
read_serializer = self.serializer_class(
data=request.user.notifications.read(), many=True)
read_serializer.is_valid()
request.user.notifications.mark_as_sent()
return Response({'unread_count': unread_count, 'read_count': read_count,
'unread_list': unread_serializer.data, 'read_list': read_serializer.data},
status=status.HTTP_200_OK)
def update(self, request, id):
try:
instance_data = Notification.objects.get(pk=id)
except Notification.DoesNotExist:
raise NotFound("The notification with the given id doesn't exist")
instance_data.mark_as_read()
return Response("Notification marked as read", status=status.HTTP_200_OK)
def delete(self, request, id):
try:
notification = Notification.objects.get(pk=id)
except Notification.DoesNotExist:
raise NotFound("The notification with the given id doesn't exist")
notification.delete()
return Response({"Message": "Notification has been deleted"}, status=status.HTTP_200_OK)
class ReadAllNotificationViewset(mixins.ListModelMixin, viewsets.GenericViewSet):
def update(self, request):
try:
instance_data = Notification.objects.filter(unread=True).all()
if len(instance_data) < 1:
raise ValueError
except ValueError:
raise NotFound("You have no unread notifications")
instance_data.mark_all_as_read()
return Response({"Message": "You have marked all notifications as read"},
status=status.HTTP_200_OK)
class FilterAPIView(generics.ListAPIView):
model = Article
queryset = Article.objects.all()
permission_classes = (AllowAny,)
serializer_class = ArticleSerializer
context_object_name = 'articles'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.config_kwargs = {}
search_settings = getattr(settings, 'ARTICLE_SEARCH_SETTINGS', {})
if 'config' in search_settings:
self.config_kwargs['config'] = search_settings['config']
def get_queryset(self):
queryset = self.queryset
title = self.request.query_params.get('title', None)
if title is not None:
queryset = queryset.annotate(
similarity=TrigramSimilarity('title', title),
).filter(similarity__gt=0.3).order_by('-similarity')
author = self.request.query_params.get('author', None)
if author is not None:
queryset = queryset.annotate(
similarity=TrigramSimilarity(
'author__user__username', author),
).filter(similarity__gt=0.3).order_by('-similarity')
tag = self.request.query_params.get('tag', None)
if tag is not None:
queryset = queryset.annotate(
similarity=TrigramSimilarity(
'tags__tag', tag),
).filter(similarity__gt=0.3).order_by('-similarity')
return queryset.order_by('created_at')
class LikeCommentLikesAPIView(APIView):
permission_classes = (IsAuthenticatedOrReadOnly, )
renderer_classes = (CommentLikeJSONRenderer, )
serializer_class = CommentSerializer
def post(self, request, article_slug=None, comment_pk=None):
serializer_context = {'request': request}
context = {'author': request.user.profile}
try:
context['article'] = Article.objects.get(slug=article_slug)
except Article.DoesNotExist:
raise NotFound('An article with this slug does not exist.')
try:
serializer_instance = Comment.objects.get(pk=comment_pk)
except Comment.DoesNotExist:
raise NotFound('A comment with this id does not exist')
if serializer_instance in Comment.objects.filter(
comment_likes=request.user):
serializer_instance.comment_likes.remove(request.user)
else:
serializer_instance.comment_likes.add(request.user)
if serializer_instance in Comment.objects.filter(
comment_dislikes=request.user):
serializer_instance.comment_dislikes.remove(request.user)
serializer_instance.comment_likes.add(request.user)
serializer = self.serializer_class(serializer_instance,
context=serializer_context,
partial=True)
return Response(serializer.data, status=status.HTTP_201_CREATED)
class DislikeCommentLikesAPIView(APIView):
permission_classes = (IsAuthenticatedOrReadOnly, )
renderer_classes = (CommentLikeJSONRenderer, )
serializer_class = CommentSerializer
def post(self, request, article_slug=None, comment_pk=None):
serializer_context = {'request': request}
context = {'author': request.user.profile}
try:
context['article'] = Article.objects.get(slug=article_slug)
except Article.DoesNotExist:
raise NotFound('An article with this slug does not exist.')
try:
serializer_instance = Comment.objects.get(pk=comment_pk)
except Comment.DoesNotExist:
raise NotFound('A comment with this id does not exists')
if serializer_instance in Comment.objects.filter(
comment_dislikes=request.user):
serializer_instance.comment_dislikes.remove(request.user)
else:
serializer_instance.comment_dislikes.add(request.user)
if serializer_instance in Comment.objects.filter(
comment_likes=request.user):
serializer_instance.comment_likes.remove(request.user)
serializer_instance.comment_dislikes.add(request.user)
serializer = self.serializer_class(
serializer_instance,
context=serializer_context,
partial=True
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
class BookmarkAPIView(APIView):
lookup_field = 'slug'
permission_classes = (IsAuthenticatedOrReadOnly,)
renderer_classes = (BookmarkJSONRenderer,)
serializer_class = ArticleSerializer
def post(self, request, slug):
"""
Method that add article to bookmarked ones
"""
serializer_context = {'request': request}
try:
article = Article.objects.get(slug=slug)
except Article.DoesNotExist:
raise NotFound("An article with this slug does not exist")
bookmark = Bookmarks.objects.filter(
user=request.user.profile, article=article).first()
if not bookmark:
bookmarks = Bookmarks(article=article, user=request.user.profile)
bookmarks.save()
serializer = self.serializer_class(
article,
context=serializer_context
)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response({
"msg": "Article with the slug '{}' is already in bookmarks".format(slug)
}, status=status.HTTP_202_ACCEPTED)
def delete(self, request, slug):
"""
Method that removes article from the bookmarked ones
"""
serializer_context = {'request': request}
try:
article = Article.objects.get(slug=slug).id
except Article.DoesNotExist:
raise NotFound("An article with this slug does not exist")
try:
bookmarked_article = Bookmarks.objects.get(article=article)
except Bookmarks.DoesNotExist:
raise NotFound("This article has not been bookmarked")
bookmarked_article.delete()
return Response({
"msg": "Article with the slug '{}' has been removed from bookmarks".format(slug)
}, status=status.HTTP_200_OK)
|
24,751 | eaf295d4ae67329376c82f8fc095b4ad682bfa2e | #!/usr/bin/env python3
# 664C_olymp.py - Codeforces.com/problemset/problem/664/C by Sergey 2016
import unittest
import sys
import re
###############################################################################
# Olymp Class (Main Program)
###############################################################################
class Olymp:
""" Olymp representation """
def __init__(self, test_inputs=None):
""" Default constructor """
it = iter(test_inputs.split("\n")) if test_inputs else None
def uinput():
return next(it) if it else sys.stdin.readline().rstrip()
# Reading single elements
[self.n] = map(int, uinput().split())
# Reading a single line of multiple elements
self.nums = []
for i in range(self.n):
strs = uinput()
m = re.search("(\d+)", strs)
if m:
self.nums.append(m.group(1))
used = set([])
self.years = dict()
self.revyears = dict()
for i in range(1989, 10000):
cur = i
yr = str(cur % 10)
while yr in used:
cur //= 10
yr = str(cur % 10) + yr
used.add(yr)
self.years[i] = yr
self.revyears[yr] = i
def translate(self, s):
if len(s) <= 3:
return self.revyears[s]
else:
if(int(s[0:4]) > 1989 and int(s[0:4]) < 9999):
s = "1" + s
return(int(s))
else:
return int(s)
def calculate(self):
""" Main calcualtion function of the class """
outs = map(str, map(self.translate, self.nums))
result = "\n".join(outs)
return str(result)
###############################################################################
# Unit Tests
###############################################################################
class unitTests(unittest.TestCase):
def test_single_test(self):
""" Olymp class testing """
# Constructor test
test = "5\nIAO'15\nIAO'2015\nIAO'1\nIAO'9\nIAO'0"
d = Olymp(test)
self.assertEqual(d.n, 5)
self.assertEqual(d.nums[0:2], ["15", "2015"])
# Sample test
self.assertEqual(
Olymp(test).calculate(), "2015\n12015\n1991\n1989\n1990")
# Sample test
test = "4\nIAO'9\nIAO'99\nIAO'999\nIAO'9999"
self.assertEqual(Olymp(test).calculate(), "1989\n1999\n2999\n9999")
# Sample test
test = ""
# self.assertEqual(Olymp(test).calculate(), "0")
# My tests
test = ""
# self.assertEqual(Olymp(test).calculate(), "0")
# Time limit test
# self.time_limit_test(5000)
def time_limit_test(self, nmax):
""" Timelimit testing """
import random
import timeit
# Random inputs
test = str(nmax) + " " + str(nmax) + "\n"
numnums = [str(i) + " " + str(i+1) for i in range(nmax)]
test += "\n".join(numnums) + "\n"
nums = [random.randint(1, 10000) for i in range(nmax)]
test += " ".join(map(str, nums)) + "\n"
# Run the test
start = timeit.default_timer()
d = Olymp(test)
calc = timeit.default_timer()
d.calculate()
stop = timeit.default_timer()
print("\nTimelimit Test: " +
"{0:.3f}s (init {1:.3f}s calc {2:.3f}s)".
format(stop-start, calc-start, stop-calc))
if __name__ == "__main__":
# Avoiding recursion limitaions
sys.setrecursionlimit(100000)
if sys.argv[-1] == "-ut":
unittest.main(argv=[" "])
# Print the result string
sys.stdout.write(Olymp().calculate())
|
24,752 | 9122d23dcd820a0618f729b7f062662c371686ab | # Need threading for multiple clients and a way to terminate gracefully
import threading
# Process command line arguments
import argparse
# Needed for network communication
import socket
# Cryptography (signature)
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.hazmat.primitives.serialization import PublicFormat, Encoding, load_pem_public_key
from cryptography.exceptions import InvalidSignature
# Globals for handling the frames
lock = threading.Lock()
publicKeyMapping = {}
def new_client(client_socket, caddr):
global lock, publicKeyMapping
if client_socket:
try:
# msgCode 0 = REGISTER, 1 = READ, more later if needed...
msgCode = client_socket.recv(1)
# Register
if msgCode == b'0':
# print("IN REGISTER")
# Receive client's public key and signed public key
clientPublicKeySize = client_socket.recv(2)
clientPublicKey = client_socket.recv(int.from_bytes(clientPublicKeySize, "big")) # Serialized
clientPublicKeyDeserialized = load_pem_public_key(clientPublicKey, default_backend())
clientPublicKeySignedSize = client_socket.recv(2)
clientPublicKeySigned = client_socket.recv(int.from_bytes(clientPublicKeySignedSize, "big"))
# Verify signature
clientPublicKeyDeserialized.verify(
clientPublicKeySigned,
clientPublicKey,
padding.PSS(
mgf=padding.MGF1(hashes.SHA256()),
salt_length=padding.PSS.MAX_LENGTH
),
hashes.SHA256()
)
with lock:
# Add mapping into dict
publicKeyMapping[clientPublicKey] = caddr
# Send Complete message
client_socket.send(b'REGISTER COMPLETE')
print("New public key is registered")
# Print publicKeyMapping
print("=== Current Mapping ====")
with lock:
for k,v in publicKeyMapping.items():
print(v, '\n', k)
print("========================")
# Read
elif msgCode == b'1':
# print("IN READ")
# Receive client's public key to be looked up
clientPublicKeySize = client_socket.recv(2)
clientPublicKey = client_socket.recv(int.from_bytes(clientPublicKeySize, "big"))
# print("clientPublicKeySize:", clientPublicKeySize)
# print("Looking up:\n", clientPublicKey)
with lock:
# Lookup IP
if clientPublicKey in publicKeyMapping:
ip = publicKeyMapping[clientPublicKey]
# Send IP
client_socket.send(ip.encode())
else:
client_socket.send(b'UNREGISTERED')
else:
client_socket.send(b'INVALID MESSAGE CODE')
except InvalidSignature as e:
print("Verification failed. Closing connection.")
raise e
finally:
# print("A connection has closed")
client_socket.close()
if __name__ == '__main__':
# Handle arguments
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--host-ip", type=str, required=False,
help="ip address of the device", default='127.0.0.1')
ap.add_argument("-p", "--port", type=int, required=False,
help="ephemeral port number of the server (1024 to 65535)", default=7777)
args = vars(ap.parse_args())
print("Setting up server...")
# Socket Create
server_socket = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
host_ip = args["host_ip"]
port = args["port"]
socket_address = (host_ip,port)
# Socket Bind
server_socket.bind(socket_address)
# Socket Listen
server_socket.listen(5)
print("LISTENING AT:",socket_address)
threads = []
try:
while True:
print("waiting for connection - before accept")
client_socket,(caddr, cport) = server_socket.accept()
if client_socket:
print("waiting for connection - after accept")
print('GOT CONNECTION FROM: %s:%s' % (caddr, cport))
connThread = threading.Thread(target=new_client, args=(client_socket,caddr,))
threads.append(connThread)
connThread.start()
except KeyboardInterrupt as e:
print("Shutdown may leave some temp files in local directory")
finally:
print("Shutting Down Server")
try:
server_socket.shutdown(socket.SHUT_RDWR)
except OSError as e:
if e.strerror == "Socket is not connected":
print("No connections found, proceeding to close socket.")
else:
raise e
finally:
print("Closing Server Socket")
server_socket.close()
|
24,753 | 368ab621a14699d377364eeb5e4e2031ea0cce5e | from django.contrib import admin
from .models import *
# Register your models here.
class InlineImage(admin.TabularInline):
model = Images
class ProductAdmin(admin.ModelAdmin):
inlines = [InlineImage]
admin.site.register(Products, ProductAdmin)
admin.site.register(Images)
admin.site.register(TypeProducts)
|
24,754 | 96dc85fe5543b33f74a24be6ff819cde9f8c1afa | #!/usr/bin/env python
import sys
sys.path.append('/home/ubuntu/RedBoard')
import redboard
redboard.servo8_off()
redboard.servo9_off()
redboard.servo10_off()
redboard.servo11_off()
redboard.servo5_off()
redboard.servo6_off()
redboard.servo13_off()
|
24,755 | f1ea922329d9f133564193d7c00f4d93b48caba6 | #!/usr/bin/python
# if string contains emergency, then ask if email is urgent
# If string contains joke, then ask if email is non-urgent
#email_1="This is an urgent message"
email_1="This is a good joke"
if email_1.find('urgent') != -1:
print "Do you want to send as urgent message?"
elif email_1.find('joke') != -1:
print "Do you want to send as non-urgent message?"
|
24,756 | abc844a5416d9b30abcc3f436a848b9ae1009aa0 | from load import mnist
import numpy as np
from math import sqrt
A=[0,0]
B=[3,4]
print sqrt(sum( (a - b)**2 for a, b in zip(A, B)))
|
24,757 | 06ddfa782e7d6f84b0cadecb9ae1547eb48b9e91 | from django.shortcuts import render
from rest_framework import viewsets
from django_rest_ml.sklearn.models import SklearnModel
from django_rest_ml.sklearn.serializers import SklearnModelSerializer
# Create your views here.
class SklearnModelViewSet(viewsets.ModelViewSet):
"""
API endpoint that allows sklearn models to be viewed or edited.
"""
queryset = SklearnModel.objects.all().order_by('-created')
serializer_class = SklearnModelSerializer
|
24,758 | c1ac0f602c5b6ae744308a65f85c72253e6624ec | import math
N=52
n=11
P=0
while 1 :
result = math.factorial(N)/(math.factorial(n)*math.factorial(N-n)) # NCn 값
one = result * (0.1**n) * (0.9**(N-n)) # 한번 계산한 값
P = P + one
print('n이 %d일 때, one은 %0.10e, P는 %0.10e입니다.' % (n, one, P))
n = n + 1
if(n>N):
print("연산을 종료합니다.")
break
if(P >= 0.01):
print("P=%0.10lf Happy Happy" % P)
break |
24,759 | 92780297565a897c674e0e93900b4f559ed4ef90 | """MC2-P1: Market simulator."""
import pandas as pd
import datetime as dt
import math
from util import get_data
# Leverage-function: borrowing money property
# How much invested in market / liquidation value of account
def check_leverage(new_pos):
leverage = (new_pos[:-1].abs().sum()) / (new_pos.sum())
if abs(leverage.values) > 2:
return False
return True
def compute_portvals(orders_file="./orders/orders.csv", start_val=1000000):
# this is the function the autograder will call to test your code
# TODO: Your code here
# Get data
# FORMAT: Date, Symbol, Order(Buy/Sell), # of shares
orders = pd.read_csv(orders_file, index_col=[0])
orders = orders.sort_index()
# Determine stock symbols, start and end date from the 'orders'
ls_symbols = list(set(orders['Symbol'].values))
start_date = orders.index[0]
end_date = orders.index[-1]
dates = pd.date_range(start_date, end_date)
# Read prices to 'prices' dataframe, use adjusted close price
# Automatically adds SPY to ensure correct market dates
prices = get_data(ls_symbols, dates)
# Create dataframe to follow daily positions
# At first no trades, thus every day only cash
position = pd.DataFrame(index=prices.index, columns=prices.columns)
position = position.fillna(0)
position['Cash'] = start_val
for order in orders.iterrows():
multiplier = 1
if order[1]['Order'] == 'SELL':
multiplier = -1
# Parsing
date = order[0]
stock = order[1]['Symbol']
amount = order[1]['Shares']
trade_val = multiplier * amount * prices.ix[date:, stock]
# Create new position
new_pos = position.ix[date].copy().to_frame()
new_pos.ix[stock] += trade_val
new_pos.ix['Cash'] -= trade_val
# Constraint: Leverage cannot exceed 2, if so reject the trade
# in 2016 limit 2, in 2017 limit 1.5
under_leverage_limit = check_leverage(new_pos)
if under_leverage_limit:
position.ix[date:, stock] += trade_val
# Update cash column
position.ix[date:, 'Cash'] -= trade_val[date]
# Calculate daily portfolio value by summing 'position' dataframe's columns
port_val = position.sum(axis=1).to_frame()
# Secret: on June 15th, 2011 ignore all orders
# This has not been implemented
return port_val
def compute_portfolio_stats(port_val,
rfr=0.0, sf=252.0):
# Calculate daily returns
daily_returns = port_val / port_val.shift(1) - 1
daily_returns = daily_returns[1:]
# Get portfolio statistics (note: std_daily_ret = volatility)
cr = port_val.ix[-1] / port_val.ix[0] - 1
adr = daily_returns.mean()
sddr = daily_returns.std()
sr = math.sqrt(sf) * (adr - rfr) / sddr
return cr, adr, sddr, sr
def test_code():
# this is a helper function you can use to test your code
# note that during autograding his function will not be called.
# Define input parameters
of = "./testcases2016/orders-12-modified.csv"
sv = 1000000
# Process orders
portvals = compute_portvals(orders_file=of, start_val=sv)
if isinstance(portvals, pd.DataFrame):
portvals = portvals[portvals.columns[0]] # just get the first column
else:
"warning, code did not return a DataFrame"
# Get portfolio stats
start_date = dt.datetime.date(portvals.index[0])
end_date = dt.datetime.date(portvals.index[-1])
cr, adr, stddr, sr = compute_portfolio_stats(portvals)
# Portfolio statistics
print "Date Range: {} to {}".format(start_date, end_date)
print "Sharpe Ratio of Fund: {}".format(sr)
print "Cumulative Return of Fund: {}".format(cr)
print "Standard Deviation of Fund: {}".format(stddr)
print "Average Daily Return of Fund: {}".format(adr)
print "Final Portfolio Value: {}".format(portvals[-1])
if __name__ == "__main__":
test_code()
|
24,760 | 85c970e637c3feb6749047dfa2f59f2556c8959e | # Uses python3
import sys
def get_fibonacci_huge_naive(n, m):
v1, v2, v3 = 1, 1, 0 # initialise a matrix [[1,1],[1,0]]
for rec in bin(n)[3:]: # perform fast exponentiation of the matrix (quickly raise it to the nth power)
calc = v2*v2
v1, v2, v3 = v1*v1+calc, (v1+v3)*v2, calc+v3*v3
if rec=='1':
v1, v2, v3 = v1+v2, v1, v2
fib = v2
return fib%m
if __name__ == '__main__':
input = sys.stdin.read();
n, m = map(int, input.split())
print(get_fibonacci_huge_naive(n, m))
|
24,761 | e673785b8ac74851f5cb9f26d2fbea041fadc206 | #Password Generator Mini Project
from PyQt5 import QtWidgets
from PyQt5.QtWidgets import QApplication, QMainWindow
import sys
from random import randint
def passGen():
specChars = "!@#$%^&*()_+{}:<>?"
specCharsLen = len(specChars)
nums = "1234567890"
numsLen = len(nums)
chars = "qwertyuiopasdfghjklzxcvbnm"
charsLen = len(chars)
password = ""
while len(password) < 15:
val = randint(1,3) #randomizes whether character will be special, number, or letter
#Randomizes which special, number, or letter is placed in password
if val == 1:
ind = randint(0,specCharsLen-1)
password = password + specChars[ind]
elif val == 2:
ind = randint(0,numsLen-1)
password = password + nums[ind]
else:
ind = randint(0,charsLen-1)
password = password + chars[ind]
return password
def window():
app = QApplication(sys.argv)
win = QMainWindow()
win.setGeometry(200, 200, 300, 200)
win.setWindowTitle("Password Generator")
label = QtWidgets.QLabel(win)
label.setText("Password:\n" + passGen())
label.resize(100, 50)
win.show()
sys.exit(app.exec_())
window()
#Add storage of passwords with username and website name
|
24,762 | d921dd14c0a96f71049a6598638069de1895d802 | # si_formatter
si_prefix = {
0: '',
1: 'k', # kilo
2: 'M', # Mega
3: 'G', # Giga
4: 'T', # Tera
5: 'P', # Peta
6: 'E', # Exa
7: 'Z', # Zeta
8: 'Y' # Yotta
}
def return_with_si(value, unit):
prefix = 0
if prefix < si_prefix.keys().__len__():
while value >= 1e3:
value /= 1e3
prefix += 1
# if >= one million grams convert units to tons
if unit == 'g' and prefix >= 2:
unit = 'ton(s)'
prefix -= 2
return "%.2f %s%s" % (value, si_prefix[prefix], unit)
|
24,763 | 200da1e1778b8af1efd35cf658f4441e73c47922 | # Generated by Django 3.2.5 on 2021-08-04 18:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('test1', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='test1',
name='test2',
field=models.CharField(default=True, max_length=250),
),
]
|
24,764 | 2f9b3d6fba6535af913c2b9de1464507dee6afb9 |
import smtplib
from email.mime.text import MIMEText
def send_mail(email, name, password):
from_email = "hackedraw159@gmail.com"
from_password = "6263420714"
to_email = email
img = "https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcTntY3A8wM9MtyPqd13CaOdNriuNFh1rmHGOA&usqp=CAU"
subject = "Attendance System : Login - OTP"
message = "<!doctype html><html lang='en'><head><meta charset='utf-8'><meta name='viewport' " \
"content='width=device-width, initial-scale=1'><style>.box{margin: 1rem;width: auto;background-color: " \
"black;padding: 1rem;border: aqua 5px solid;border-radius: 20px;color:white;box-shadow: 0 19px 38px " \
"rgba(0,0,0,0.50), 0 15px 12px rgba(0,0,0,0.22);}body{color: white;background-color: white;}h2{" \
"color: aqua;}</style></head><body><div class='container box'><div class='row'>" \
"<div class='col'><h1 style='text-align: left'>Attendance System Using Face Recognition</h1></div>" \
"<div class='col'><img src=%s class='img-fluid rounded'></div>" \
"</div></div><section><div class='container'><div " \
"class='box'><h1>Hi</h1><div><h2>%s ,</h2></div><p>Please use the OTP below to login to your " \
"account.</p><h2>%s</h2><p>Please don't share your OTP with anyone, If this wasn't you, contact your " \
"google support team.</p></div></div></section></body></html>" % (img, name, password)
msg = MIMEText(message, 'html')
msg['Subject'] = subject
msg['To'] = to_email
msg['From'] = from_email
gmail = smtplib.SMTP('smtp.gmail.com', 587)
gmail.ehlo()
gmail.starttls()
gmail.login(from_email, from_password)
gmail.send_message(msg)
send_mail('abhishekparmarjnv@gmail.com', 'Abhishek Parmar', 4571) |
24,765 | e3cd7e17f56ac76aeb1d9de12f7ee7fe504bc5a9 | from django import forms
# Forms ?
# it allows users to input information on client side
# it processes data that users input
class AddProductForm(forms.Form):
quantity = forms.IntegerField()
is_update = forms.BooleanField(required=False, initial=False, widget=forms.HiddenInput)
|
24,766 | f96685b4ef299c7cdf74ad8a043e6b1bffd65cd5 | import pandas as pd
import matplotlib.pyplot as plt
df = pd.read_csv('top.csv')
plt.title("Popular Genres Of 2019")
fig, ax = plt.subplots(figsize=(10, 3), tight_layout=True)
ax.tick_params(axis='x', rotation=70)
plt.hist(df.Genre)
plt.show() |
24,767 | beead6000718603d65c96c6986cf316aa79b578e | def check_entity_continue(tag):
label = False
if tag.startswith("I"):
label = True
return label
def ner_format_result(char_list, tag_list):
entities, entity = [], []
for idx, tag in enumerate(tag_list):
check_label = check_entity_continue(tag) if idx > 0 else False
if not check_label and entity:
entities.append(entity)
entity = []
entity.append([idx, char_list[idx], tag])
entities.append(entity)
results = []
for entity in entities:
if entity[0][2].startswith("B"):
results.append(
{
"begin": entity[0][0] + 1,
"end": entity[-1][0] + 1,
"words": "".join([i[1] for i in entity]),
"ner_type": entity[0][2].split("-")[1]
}
)
return results |
24,768 | e68ac2867ce0efde1cfce51f5ae6b15257c85d85 | # pylint: disable=missing-docstring
from copy import deepcopy
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from rest_framework import exceptions, status
from resolwe.flow.models import Collection, DescriptorSchema, Process
from resolwe.flow.views import CollectionViewSet, DescriptorSchemaViewSet
from resolwe.permissions.models import Permission, PermissionModel, get_anonymous_user
from resolwe.permissions.utils import (
assign_contributor_permissions,
check_owner_permission,
check_public_permissions,
check_user_permissions,
set_permission,
)
from resolwe.test import ResolweAPITestCase, TestCase
class CollectionPermissionsTest(ResolweAPITestCase):
def setUp(self):
User = get_user_model()
self.user1 = User.objects.create(username="test_user1", email="user1@test.com")
self.user2 = User.objects.create(username="test_user2", email="user2@test.com")
self.user3 = User.objects.create(username="test_user3", email="user1@test.com")
self.owner = User.objects.create(username="owner")
self.public = get_anonymous_user()
self.group = Group.objects.create(name="Test group")
self.collection = Collection.objects.create(
contributor=self.owner, name="Test collection 1"
)
self.collection.set_permission(Permission.OWNER, self.owner)
self.process = Process.objects.create(
name="Test process",
contributor=self.owner,
)
self.resource_name = "collection"
self.viewset = CollectionViewSet
super().setUp()
def test_set_permission(self):
"""Test set permission on permission queryset."""
collection = Collection.objects.create(
contributor=self.owner, name="Test collection 2"
)
self.assertEqual(collection.get_permission(self.user1), Permission.NONE)
self.assertEqual(self.collection.get_permission(self.user1), Permission.NONE)
Collection.objects.filter(name__startswith="Test collection").set_permission(
Permission.VIEW, self.user1
)
self.assertEqual(collection.get_permission(self.user1), Permission.VIEW)
self.assertEqual(self.collection.get_permission(self.user1), Permission.VIEW)
def test_public_user(self):
"""Public user cannot create/edit anything"""
set_permission(Permission.SHARE, self.user1, self.collection)
data = {"public": "view"}
resp = self._detail_permissions(self.collection.pk, data, self.user1)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
data = {"public": "none"}
resp = self._detail_permissions(self.collection.pk, data, self.user1)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
data = {"public": "edit"}
resp = self._detail_permissions(self.collection.pk, data, self.user1)
self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)
data = {"public": "share"}
resp = self._detail_permissions(self.collection.pk, data, self.user1)
self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)
data = {"public": "owner"}
resp = self._detail_permissions(self.collection.pk, data, self.user1)
self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)
def test_user_without_share(self):
"""User without ``SHARE`` permission cannot do anything"""
set_permission(Permission.EDIT, self.user1, self.collection)
# Can not add permissions to users.
data = {"users": {self.user2.pk: "view"}}
resp = self._detail_permissions(self.collection.pk, data, self.user1)
self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)
# Can not add permissions to groups.
data = {"users": {self.group.pk: "view"}}
resp = self._detail_permissions(self.collection.pk, data, self.user1)
self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)
def test_user_with_share(self):
self.collection.set_permission(Permission.SHARE, self.user1)
# Can set permissions to users.
data = {"users": {self.user2.pk: "view"}}
resp = self._detail_permissions(self.collection.pk, data, self.user1)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertEqual(self.collection.get_permissions(self.user2), [Permission.VIEW])
# Can set permissions to groups.
data = {"groups": {self.group.pk: "view"}}
resp = self._detail_permissions(self.collection.pk, data, self.user1)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertEqual(self.collection.get_permissions(self.group), [Permission.VIEW])
def test_protect_owner(self):
"""Only owners can modify `owner` permission"""
self.collection.set_permission(Permission.SHARE, self.user1)
# User with share permission cannot grant ``owner`` permission
data = {"users": {self.user2.pk: "owner"}}
resp = self._detail_permissions(self.collection.pk, data, self.user1)
self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)
self.assertNotIn("owner", self.collection.get_permissions(self.user2))
self.assertFalse(PermissionModel.objects.filter(user=self.user2).exists())
# User with share permission cannot revoke ``owner`` permission
self.collection.set_permission(Permission.OWNER, self.user2)
data = {"users": {self.user2.pk: "editor"}}
resp = self._detail_permissions(self.collection.pk, data, self.user1)
self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(
self.collection.get_permissions(self.user2),
[Permission.VIEW, Permission.EDIT, Permission.SHARE, Permission.OWNER],
)
self.collection.set_permission(Permission.NONE, self.user2)
# Now let user1 be owner on collection.
set_permission(Permission.OWNER, self.user1, self.collection)
# ``owner`` permission cannot be assigned to a group
data = {"groups": {self.group.pk: "owner"}}
resp = self._detail_permissions(self.collection.pk, data, self.user1)
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
self.assertFalse(PermissionModel.objects.filter(group=self.group).exists())
# User with owner permission can grant ``owner`` permission
data = {"users": {self.user2.pk: "owner"}}
resp = self._detail_permissions(self.collection.pk, data, self.user1)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertEqual(
self.collection.get_permissions(self.user2),
[Permission.VIEW, Permission.EDIT, Permission.SHARE, Permission.OWNER],
)
# User with owner permission can revoke ``owner`` permission
data = {"users": {self.user2.pk: "edit"}}
resp = self._detail_permissions(self.collection.pk, data, self.user1)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertFalse(
PermissionModel.objects.filter(
user=self.user2, value=Permission.OWNER.value
).exists()
)
# User with owner permission cannot remove all owners
data = {"users": {self.user1.pk: "edit", self.owner.pk: "edit"}}
resp = self._detail_permissions(self.collection.pk, data, self.user1)
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(resp.data["detail"], "Object must have at least one owner.")
owner_permissions = self.collection.permission_group.permissions.filter(
value=Permission.OWNER.value
)
owner_count = owner_permissions.count()
self.assertEqual(owner_count, 2)
# User can delete his owner permission if there is at least one other owner
self.assertTrue(owner_permissions.filter(user=self.user1).exists())
data = {"users": {self.user1.pk: "view"}}
resp = self._detail_permissions(self.collection.pk, data, self.user1)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertFalse(owner_permissions.filter(user=self.user1.pk).exists())
def test_nonexisting_permission(self):
self.collection.set_permission(Permission.SHARE, self.owner)
# Add one valid permission to make sure that no permission is applied if any of them is unknown.
data = {"users": {self.user1.pk: "view", self.user2.pk: "foo"}}
resp = self._detail_permissions(self.collection.pk, data, self.owner)
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(resp.data["detail"], "Unknown permission: foo")
self.assertEqual(self.collection.get_permissions(self.user1), [])
self.assertEqual(self.collection.get_permissions(self.user2), [])
def test_nonexisting_user_group(self):
self.collection.set_permission(Permission.SHARE, self.owner)
# Whole request should fail, so `user1` shouldn't have any permission assigned.
data = {"users": {"999": "view", self.user1.pk: "view"}}
resp = self._detail_permissions(self.collection.pk, data, self.owner)
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(resp.data["detail"], "Unknown user: 999")
# Maybe different?
self.assertEqual(self.collection.get_permissions(self.user1), [])
# Whole request should fail, so `group` shouldn't have any permission assigned.
data = {"groups": {"999": "view", self.group.pk: "view"}}
resp = self._detail_permissions(self.collection.pk, data, self.owner)
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(resp.data["detail"], "Unknown group: 999")
self.assertEqual(self.collection.get_permissions(self.group), [])
def test_share_by_email(self):
set_permission(Permission.SHARE, self.user1, self.collection)
data = {"users": {self.user2.email: "view"}}
resp = self._detail_permissions(self.collection.pk, data, self.user1)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertEqual(self.collection.get_permissions(self.user2), [Permission.VIEW])
# Check if error is raised when trying to share with duplicated email.
data = {"users": {self.user3.email: "view"}}
resp = self._detail_permissions(self.collection.pk, data, self.user1)
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(
resp.data["detail"], "Cannot uniquely determine user: user1@test.com"
)
class DescriptorSchemaPermissionsTest(ResolweAPITestCase):
def setUp(self):
self.group = Group.objects.create(name="Test group")
self.resource_name = "collection"
self.viewset = DescriptorSchemaViewSet
super().setUp()
self.descriptor_schema = DescriptorSchema.objects.create(
contributor=self.contributor
)
assign_contributor_permissions(self.descriptor_schema)
def test_set_permissions(self):
# Can add permissions to users.
data = {"users": {self.user.pk: "view"}}
resp = self._detail_permissions(
self.descriptor_schema.pk, data, self.contributor
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertEqual(
self.descriptor_schema.get_permissions(self.user), [Permission.VIEW]
)
# Can add permissions to groups.
data = {"groups": {self.group.pk: "view"}}
resp = self._detail_permissions(
self.descriptor_schema.pk, data, self.contributor
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertEqual(
self.descriptor_schema.get_permissions(self.group), [Permission.VIEW]
)
# Can remove permissions from users.
data = {"users": {self.user.pk: None}}
resp = self._detail_permissions(
self.descriptor_schema.pk, data, self.contributor
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertEqual(self.descriptor_schema.get_permissions(self.user), [])
# Can remove permissions from groups.
data = {"groups": {self.group.pk: None}}
resp = self._detail_permissions(
self.descriptor_schema.pk, data, self.contributor
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
self.assertEqual(self.descriptor_schema.get_permissions(self.group), [])
class PermissionsUtilitiesTest(TestCase):
def test_filter_owner_permission(self):
"""Check that ``owner`` permission is catched everywhere"""
User = get_user_model()
user1 = User.objects.create(username="test_user1", email="user1@test.com")
obj = DescriptorSchema.objects.create(contributor=user1)
obj.set_permission(Permission.VIEW, user1)
data_template = {
"users": {user1.id: "view"},
"groups": {1: "edit", 2: "NONE"},
}
check_owner_permission(data_template, False, obj)
# Check that only owner can set owner permission.
data = deepcopy(data_template)
data["users"][1] = "owner"
with self.assertRaises(exceptions.PermissionDenied):
check_owner_permission(data, False, obj)
check_owner_permission(data, True, obj)
# Check that only owner can rewoke owner permission.
obj.set_permission(Permission.OWNER, user1)
data = deepcopy(data_template)
data["users"][1] = "edit"
with self.assertRaises(exceptions.PermissionDenied):
check_owner_permission(data, False, obj)
check_owner_permission(data, True, obj)
# Check that group can not be owner.
obj.set_permission(Permission.VIEW, user1)
data = deepcopy(data_template)
data["groups"][1] = "owner"
with self.assertRaises(exceptions.ParseError):
check_owner_permission(data, False, obj)
with self.assertRaises(exceptions.ParseError):
check_owner_permission(data, True, obj)
def test_filter_user_permissions(self):
"""Check that user cannot change his own permissions"""
data = {
"users": {
1: "view",
2: "NONE",
}
}
with self.assertRaises(exceptions.PermissionDenied):
check_user_permissions(data, 1)
with self.assertRaises(exceptions.PermissionDenied):
check_user_permissions(data, 2)
check_user_permissions(data, 3)
def test_filter_public_permissions(self):
"""Check that public user cannot get to open permissions"""
data = {"public": "view"}
check_public_permissions(data)
data = {"public": "edit"}
with self.assertRaises(exceptions.PermissionDenied):
check_public_permissions(data)
data = {"public": "share"}
with self.assertRaises(exceptions.PermissionDenied):
check_public_permissions(data)
data = {"public": "owner"}
with self.assertRaises(exceptions.PermissionDenied):
check_public_permissions(data)
|
24,769 | 3d69cfdb2192a8866e94f00a126e1287e7ad8b6c | #coding=utf-8
import random
import csv
from functools import reduce
import os
import sys
# 无权无向图
def unweight_graph(nums, path):
nodes = [i for i in range(0, int(nums))]
l = [[i] for i in nodes]
for item in l:
max_num = 5 if len(nodes) > 5 else len(nodes) - 1
node = random.sample(nodes, random.randint(0, max_num))
node_clean = [i for i in node if i > item[0]]
for j in node_clean:
if item[0] not in l[j]:
l[j].append(item[0])
item += node_clean
if os.path.exists(path):
path += '_1'
with open(path, 'w') as f:
writer = csv.writer(f)
for i in l:
i = [str(j) for j in i]
writer.writerow([' '.join(i)])
return path
if __name__ == '__main__':
try:
print(unweight_graph(sys.argv[1], sys.argv[2]))
except Exception as e:
print(e) |
24,770 | c91c91e5933cdb0b346063d1004151c4aa297080 | import socket
def server():
server = socket.socket()
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server.bind(('', 8000))
server.listen(100)
while 1:
con, _ = server.accept()
con.recv(32*1024)
con.sendall(b"""HTTP/1.1 200 OK \r
Content-Type: text/plain\r
Content-Length: 5\r
\r
hello""")
con.close()
if __name__ == '__main__':
server() |
24,771 | b9e431dd5c1b03e2a289c922a5401cef9433eb89 | '''
Given an array of integers nums and a positive integer k, find whether it's possible to divide this array into k non-empty subsets whose sums are all equal.
Example 1:
Input: nums = [4, 3, 2, 3, 5, 2, 1], k = 4
Output: True
Explanation: It's possible to divide it into 4 subsets (5), (1, 4), (2,3), (2,3) with equal sums.
Note:
1 <= k <= len(nums) <= 16.
0 < nums[i] < 10000.
'''
class Solution:
def canPartitionKSubsets(self, nums: List[int], k: int) -> bool:
sum_nums = sum(nums)
if sum_nums % k != 0:
return False
target = sum_nums // k
visited = [False for i in range(len(nums))]
return self.valid(nums=nums,
visited=visited,
start=0,
group=k,
tmp_sum=0,
target_sum=target)
def valid(self, nums: List[int], visited: List[bool], start: int, group: int, tmp_sum: int, target_sum: int):
if group == 1:
return True
if tmp_sum == target_sum:
return self.valid(nums=nums,
visited=visited,
start=0,
group=group-1,
tmp_sum=0,
target_sum=target_sum)
for i in range(start, len(nums)):
if visited[i]:
continue
visited[i] = True
if self.valid(nums=nums,
visited=visited,
start=i+1,
group=group,
tmp_sum=tmp_sum+nums[i],
target_sum=target_sum):
return True
visited[i] = False
return False
|
24,772 | 0a36375444d2dbef36f37bac1fcc6eeba7cfce07 | """
This file contains test cases to verify the correct implementation of the
functions required for this project including minimax, alphabeta, and iterative
deepening. The heuristic function is tested for conformance to the expected
interface, but cannot be automatically assessed for correctness.
"""
import unittest
import timeit
import signal
import isolation
import game_agent
from collections import Counter
from copy import deepcopy
from copy import copy
from functools import wraps
WRONG_MOVE = "Your {} search returned an invalid move at search depth {}." + \
"\nValid choices: {}\nYour selection: {}"
WRONG_NUM_EXPLORED = "Your {} search visited the wrong nodes at search " + \
"depth {}. If the number of visits is too large, " + \
"make sure that iterative deepening is only running " + \
"when the `iterative` flag is set in the agent " + \
"constructor.\nMax explored size: {}\nNumber you " + \
"explored: {}"
UNEXPECTED_VISIT = "Your {} search did not visit the number of expected " + \
"unique nodes at search depth {}.\nMax explored size: " + \
"{}\nNumber you explored: {}"
ID_ERROR = "Your ID search returned the wrong move at a depth of {} with " + \
"a {}ms time limit. {} {} {}"
ID_FAIL = "Your agent did not explore enough nodes during the search; it " + \
"did not finish the first layer of available moves."
TIMER_MARGIN = 15 # time (in ms) to leave on the timer to avoid timeout
def curr_time_millis():
return 1000 * timeit.default_timer()
def timeout(time_limit):
"""
Function decorator for unittest test cases to specify test case timeout.
"""
class TimeoutException(Exception):
""" Subclass Exception to catch timer expiration during search """
pass
def handler(*args, **kwargs):
""" Generic handler to raise an exception when a timer expires """
raise TimeoutException("Test aborted due to timeout. Test was " +
"expected to finish in less than {} second(s).".format(time_limit))
def wrapUnitTest(testcase):
@wraps(testcase)
def testWrapper(self, *args, **kwargs):
signal.signal(signal.SIGALRM, handler)
signal.alarm(time_limit)
try:
return testcase(self, *args, **kwargs)
finally:
signal.alarm(0)
return testWrapper
return wrapUnitTest
class EvalTable():
def __init__(self, table):
self.table = table
def score(self, game, player):
row, col = game.get_player_location(player)
return self.table[row][col]
class CounterBoard(isolation.Board):
def __init__(self, *args, **kwargs):
super(CounterBoard, self).__init__(*args, **kwargs)
self.counter = Counter()
self.visited = set()
def copy(self):
new_board = CounterBoard(self.__player_1__, self.__player_2__,
width=self.width, height=self.height)
new_board.move_count = self.move_count
new_board.__active_player__ = self.__active_player__
new_board.__inactive_player__ = self.__inactive_player__
new_board.__last_player_move__ = copy(self.__last_player_move__)
new_board.__player_symbols__ = copy(self.__player_symbols__)
new_board.__board_state__ = deepcopy(self.__board_state__)
new_board.counter = self.counter
new_board.visited = self.visited
return new_board
def forecast_move(self, move):
self.counter[move] += 1
self.visited.add(move)
new_board = self.copy()
new_board.apply_move(move)
return new_board
@property
def counts(self):
""" Return counts of (total, unique) nodes visited """
return sum(self.counter.values()), len(self.visited)
class Project1Test(unittest.TestCase):
def initAUT(self, depth, eval_fn, iterative=False, method="minimax", loc1=(3, 3), loc2=(0, 0), w=7, h=7):
reload(game_agent)
agentUT = game_agent.CustomPlayer(depth, eval_fn, iterative, method)
board = CounterBoard(agentUT, 'null_agent', w, h)
board.apply_move(loc1)
board.apply_move(loc2)
return agentUT, board
@timeout(1)
# @unittest.skip("Skip minimax test.") # Uncomment this line to skip test
def test_minimax(self):
""" Test CustomPlayer.minimax """
h, w = 7, 7
method = "minimax"
value_table = [[0] * w for _ in range(h)]
value_table[1][5] = 1
value_table[4][3] = 2
value_table[6][6] = 3
eval_fn = EvalTable(value_table)
expected_moves = [set([(1, 5)]),
set([(3, 1), (3, 5)]),
set([(3, 5), (4, 2)])]
counts = [(8, 8), (92, 27), (1650, 43)]
for idx, depth in enumerate([1, 3, 5]):
agentUT, board = self.initAUT(depth, eval_fn, False, method, loc1=(2, 3), loc2=(0, 0))
move = agentUT.get_move(board, board.get_legal_moves(), lambda: 1e3)
num_explored_valid = board.counts[0] == counts[idx][0]
num_unique_valid = board.counts[1] == counts[idx][1]
self.assertTrue(num_explored_valid,
WRONG_NUM_EXPLORED.format(method, depth, counts[idx][0], board.counts[0]))
self.assertTrue(num_unique_valid,
UNEXPECTED_VISIT.format(method, depth, counts[idx][1], board.counts[1]))
self.assertIn(move, expected_moves[idx],
WRONG_MOVE.format(method, depth, expected_moves[idx], move))
@timeout(1)
# @unittest.skip("Skip alpha-beta test.") # Uncomment this line to skip test
def test_alphabeta(self):
""" Test CustomPlayer.alphabeta """
h, w = 7, 7
method = "alphabeta"
value_table = [[0] * w for _ in range(h)]
value_table[2][5] = 1
value_table[0][4] = 2
value_table[1][0] = 3
value_table[5][5] = 4
eval_fn = EvalTable(value_table)
expected_moves = [set([(2, 5)]),
set([(2, 5)]),
set([(1, 4)]),
set([(1, 4), (2, 5)])]
counts = [(2, 2), (26, 13), (552, 36), (10564, 47)]
for idx, depth in enumerate([1, 3, 5, 7]):
agentUT, board = self.initAUT(depth, eval_fn, False, method, loc1=(0, 6), loc2=(0, 0))
move = agentUT.get_move(board, board.get_legal_moves(), lambda: 1e4)
num_explored_valid = board.counts[0] <= counts[idx][0]
num_unique_valid = board.counts[1] <= counts[idx][1]
self.assertTrue(num_explored_valid,
WRONG_NUM_EXPLORED.format(method, depth, counts[idx][0], board.counts[0]))
self.assertTrue(num_unique_valid,
UNEXPECTED_VISIT.format(method, depth, counts[idx][1], board.counts[1]))
self.assertIn(move, expected_moves[idx],
WRONG_MOVE.format(method, depth, expected_moves[idx], move))
@timeout(1)
# @unittest.skip("Skip alpha-beta pruning test.") # Uncomment this line to skip test
def test_alphabeta_pruning(self):
""" Test pruning in CustomPlayer.alphabeta """
h, w = 15, 15
depth = 6
method = "alphabeta"
value_table = [[0] * w for _ in range(h)]
value_table[3][14] = 1
eval_fn = EvalTable(value_table)
blocked_cells = [(0, 9), (0, 13), (0, 14), (1, 8), (1, 9), (1, 14),
(2, 9), (2, 11), (3, 8), (3, 10), (3, 11), (3, 12),
(4, 9), (4, 11), (4, 13), (5, 10), (5, 12), (5, 13),
(5, 14), (6, 11), (6, 13), (9, 0), (9, 2), (10, 3),
(11, 3), (12, 0), (12, 1), (12, 3), (12, 4), (12, 5)]
agentUT, board = self.initAUT(depth, eval_fn, False, method, (0, 14), (14, 0), w, h)
for r, c in blocked_cells:
board.__board_state__[r][c] = "X"
move = agentUT.get_move(board, board.get_legal_moves(), lambda: 1e4)
expected_move = (2, 13)
max_visits = (40, 18)
num_explored_valid = board.counts[0] < max_visits[0]
num_unique_valid = board.counts[1] <= max_visits[1]
self.assertTrue(num_explored_valid,
WRONG_NUM_EXPLORED.format(method, depth, max_visits[0], board.counts[0]))
self.assertTrue(num_unique_valid,
UNEXPECTED_VISIT.format(method, depth, max_visits[1], board.counts[1]))
self.assertEqual(move, expected_move,
WRONG_MOVE.format(method, depth, expected_move, move))
@timeout(10)
# @unittest.skip("Skip iterative deepening test.") # Uncomment this line to skip test
def test_id(self):
""" Test iterative deepening for CustomPlayer.minimax """
w, h = 11, 11
method = "minimax"
value_table = [[0] * w for _ in range(h)]
value_table[3][0] = 1
value_table[2][3] = 1
value_table[4][4] = 2
value_table[7][2] = 3
eval_fn = EvalTable(value_table)
depths = ["7+", "6", "5", "4", "3", "2", "1"]
exact_counts = [((4, 4), set([(2, 3), (3, 0)])),
((16, 6), set([(2, 3), (3, 0)])),
((68, 20), set([(2, 3), (3, 2)])),
((310, 21), set([(2, 3), (3, 2)])),
((1582, 45), set([(3, 0), (3, 2)])),
((7534, 45), set([(3, 0), (3, 2)])),
((38366, 74), set([(0, 3), (2, 3), (3, 0), (3, 2)]))]
time_limit = 3200
while time_limit >= TIMER_MARGIN:
agentUT, board = self.initAUT(-1, eval_fn, True, method, (1, 1), (0, 0), w, h)
legal_moves = board.get_legal_moves()
timer_start = curr_time_millis()
time_left = lambda : time_limit - (curr_time_millis() - timer_start)
move = agentUT.get_move(board, legal_moves, time_left)
finish_time = time_left()
self.assertTrue(len(board.visited) > 4, ID_FAIL)
self.assertTrue(finish_time > 0,
"Your search failed iterative deepening due to timeout.")
# print time_limit, board.counts, move
time_limit /= 2
# Skip testing if the search exceeded 7 move horizon
if (board.counts[0] > exact_counts[-1][0][0] or
board.counts[1] > exact_counts[-1][0][1] or
finish_time < 5):
continue
for idx, ((n, m), c) in enumerate(exact_counts[::-1]):
if n > board.counts[0]:
continue
self.assertIn(move, c, ID_ERROR.format(depths[idx], 2 * time_limit, move, *board.counts))
break
@timeout(1)
# @unittest.skip("Skip eval function test.") # Uncomment this line to skip test
def test_custom_eval(self):
""" Test output interface of CustomEval """
player1 = "Player1"
player2 = "Player2"
game = isolation.Board(player1, player2)
heuristic = game_agent.CustomEval()
self.assertIsInstance(heuristic.score(game, player1), float,
"The heuristic function should return a floating point")
if __name__ == '__main__':
unittest.main()
|
24,773 | ad7da633ae0ef65c81500ae739e1bc79a94b4eee | import datetime
import math
import json
f = open('../datasets/drifer_data.dat').read()
features = []
for counter, line in enumerate(f.split("\n")):
if counter < 3:
continue
l = line.split()
if len(l) < 7:
continue
id = l[0]
month = int(l[1])
day_dec = float(l[2])
year = int(l[3])
lat = l[4]
lon = l[5]
day = int(math.floor(day_dec))
hour = (day_dec * 24) % 24
minute = (day_dec * 24 *60) % 60
timestamp = int(datetime.datetime(year,month,day,int(hour),int(minute)).strftime('%s'))
if timestamp < 1505574000 - 60 * 60 * 24 * 7 or timestamp > 1506805200 + 60 * 60 * 24 * 7:
continue
features.append({
'type':'Feature',
'geometry':{
'type':'Point',
'coordinates':[lon,lat]
},
'properties':{
'timestamp':int(timestamp) * 1000
}
})
print json.dumps({'type':'FeatureCollection','features':features})
|
24,774 | f53f1ce081a7aa2f8fd019ba46d89b8af8b41953 | def chocolateDistribution(n):
sum = 2
for i in range(0, n):
sum = sum * 2
return sum
print(chocolateDistribution(5))
|
24,775 | 04a031ac0a2bbae7f1c0400373b04800366fdbda | normal_tags = (["c-button","c-buttonGroup","c-calendar","c-divider","c-iframe","c-label","c-reveal","c-subView","c-breadcrumb","c-cardBook","c-carousel","c-menu","c-titlebar","c-stack","c-steps","c-checkbox","c-toggle","c-datepicker","c-dropdown","c-customDropdown","c-form","c-input","c-progress","c-radio-group","c-rating","c-select-button","c-select","c-tag-editor","c-textarea","c-dialog","c-layer","c-panel","c-segment","c-sidebar","c-split-pane","c-tab","c-listView","c-nestedList","c-pager","c-table","c-timeLine","c-tree"]) |
24,776 | 33998a9ddbe71ef4445b971cffbee79057bea2b3 | from flask import Flask, render_template, session, request, flash, redirect, url_for
from flask_sqlalchemy import SQLAlchemy
from flask_wtf.csrf import CSRFProtect
from userForm import userForm, gameForm, acceptForm, playTurnForm, updatePassword
from socket import gethostname
import random
app = Flask(__name__)
app.config.from_object('config')
CSRFProtect(app)
db = SQLAlchemy(app)
# Register blueprints
from gameDB import gameDB
app.register_blueprint(gameDB)
from gameDB import GameFunctions
from gameDB import PlayersInGame, Game, GameInstance, Card, CardInstance, CardsInGame, Pile, GameLog
from userDB import userDB
app.register_blueprint(userDB)
from userDB import User
from uno import uno
app.register_blueprint(uno)
from uno.uno import Uno
passedUserName = None
@app.route("/")
def main():
if('username' in session):
passedUserName = session['username']
else:
passedUserName = None
return render_template('index.html', passedUserName=passedUserName)
@app.route("/newgame", methods = ['GET', 'POST'])
def newGame():
passedUserName = session['username']
users = User.query.filter(User.username != passedUserName).all()
existingUser = User.query.filter_by(username=passedUserName).first()
newGameForm = gameForm()
newGameForm.player2.choices = [(u.id, u.username) for u in User.query.filter(User.username != passedUserName)]
newGameForm.player3.choices = [(u.id, u.username) for u in User.query.filter(User.username != passedUserName)]
newGameForm.player4.choices = [(u.id, u.username) for u in User.query.filter(User.username != passedUserName)]
newGameForm.player2.choices.insert(0, ('0', 'Select'))
newGameForm.player3.choices.insert(0, ('0', 'Select'))
newGameForm.player4.choices.insert(0, ('0', 'Select'))
if request.method == 'POST':
gameName = newGameForm.game.data
#THIS IS WHERE WE KEEP TRACK OF WHICH GAME IS STARTING MODIFY FOR FUTURE GAMES
baseGameID = 2
if gameName == 'uno':
baseGameID = 2
gamePlayers = newGameForm.numberOfPlayers.data
playerNum2 = newGameForm.player2.data
playerNum3 = newGameForm.player3.data
playerNum4 = newGameForm.player4.data
if gamePlayers == '03':
if playerNum2 == 0 or playerNum3 == 0 or playerNum2 == playerNum3:
return render_template('error.html')
else:
inviteList = [playerNum2, playerNum3]
GameFunctions.initGameInstance(baseGameID, existingUser.id, inviteList)
return redirect(url_for('login'))
if gamePlayers == '04':
if playerNum2 == 0 or playerNum3 == 0 or playerNum4 == 0 or playerNum2 == playerNum3 or playerNum2 == playerNum4 or playerNum4 == playerNum3:
return render_template('error.html')
else:
inviteList = [playerNum2, playerNum3, playerNum4]
GameFunctions.initGameInstance(baseGameID, existingUser.id, inviteList)
return redirect(url_for('login'))
else:
if playerNum2 == 0:
return render_template('error.html')
else:
inviteList = [playerNum2]
GameFunctions.initGameInstance(baseGameID, existingUser.id, inviteList)
return redirect(url_for('login'))
return render_template('newgame.html', passedUserName=passedUserName, users=users, newGameForm=newGameForm)
@app.route("/newuser", methods = ['GET', 'POST'])
def newUser():
logInForm = userForm()
if request.method == 'POST':
if logInForm.validate() == False:
return render_template('newuser.html', form = logInForm)
else:
existingUser = User.query.filter_by(username=logInForm.username.data).first()
if(existingUser):
flash('Sorry, username already exists.')
return render_template('newuser.html', form = logInForm)
else:
#check entered passwords and if they match add user to the database
if(logInForm.password.data != logInForm.password2.data):
flash('The passwords do not match.')
return render_template('newuser.html', form = logInForm)
else:
#addUserToDB(logInForm.username.data, logInForm.password.data)
newUser = User(logInForm.username.data, logInForm.password.data)
db.session.add(newUser)
db.session.commit()
session['username'] = logInForm.username.data
return redirect(url_for('login'))
else:
return render_template('newuser.html', form = logInForm)
@app.route("/profile", methods = ['GET', 'POST'])
def profile():
passedUserName = session['username']
updatePW = updatePassword()
if request.method == 'POST':
if updatePW.validate() == False:
return render_template('profile.html', updatePW=updatePW)
else:
existingUser = User.query.filter_by(username=passedUserName).one()
if(not existingUser.check_password(updatePW.PW.data)):
flash('There was a problem with the password you entered.')
return render_template('profile.html', updatePW=updatePW)
else:
#check entered passwords match
if(updatePW.NewPW.data != updatePW.NewPW2.data):
flash('The passwords do not match.')
return render_template('profile.html', updatePW=updatePW)
else:
existingUser.set_password(updatePW.NewPW.data)
db.session.commit()
flash('Password sucessfully updated.')
return render_template('profile.html', updatePW=updatePW)
return render_template('profile.html', passedUserName=passedUserName, updatePW=updatePW)
@app.route("/signin", methods = ['GET', 'POST'])
def signIn():
logInForm = userForm()
if request.method == 'POST':
if logInForm.validate() == False:
flash('There was a problem with data that was entered.')
return render_template('signin.html', form = logInForm)
else:
existingUser = User.query.filter_by(username=logInForm.username.data).first()
if(not existingUser.check_password(logInForm.password.data)):
flash('There was a problem with the password you entered.')
return render_template('signin.html', form = logInForm)
if(existingUser):
session['username'] = existingUser.username
return redirect(url_for('login'))
else:
flash('Username does not exist.')
return render_template('signin.html', form = logInForm)
else:
return render_template('signin.html', form = logInForm)
@app.route("/statistics")
def statistics():
if('username' in session):
passedUserName = session['username']
existingUser = User.query.filter_by(username=passedUserName).first()
userGames = existingUser.games_played
userWins = existingUser.wins
if (userGames > 0):
userRecord = 100 * (userWins/userGames)
else:
userRecord = 0
return render_template('statistics.html', existingUser=existingUser, userRecord=userRecord)
@app.route("/rules")
def rules():
if('username' in session):
passedUserName = session['username']
else:
passedUserName = None
return render_template('rules.html', passedUserName=passedUserName)
@app.route("/login")
def login():
if('username' in session):
passedUserName = session['username']
existingUser = User.query.filter_by(username=passedUserName).first()
gameids = PlayersInGame.query.filter(PlayersInGame.user_id == existingUser.id).all()
users = User.query.all()
players = PlayersInGame.query.all()
playableGame = []
gname = ''
for game in gameids:
thisGame = GameFunctions.gamePlay(game.game_instance)
gameInfo = GameFunctions.getGameInstance(game.game_instance)
gname = gameInfo.Game.name
if(thisGame.isPendingInvites() == False and gameInfo.status != 'Ended'):
playableGame.append(game)
if(existingUser.role == 'Admin'):
return render_template('adminLogin.html', passedUserName=passedUserName, gname=gname, gameids=gameids, playableGame=playableGame, users=users, players=players)
else:
return render_template('login.html', passedUserName=passedUserName, gname=gname, gameids=gameids, playableGame=playableGame, users=users, players=players)
else:
return render_template('index.html')
@app.route("/delete", methods = ['DELETE'])
def adminDelete():
passedUserName = session['username']
users = User.query.all()
games = GameFunctions.getGameInstance(game_id)
return render_template('delete.html', passedUserName=passedUserName, users=users)
@app.route("/logout")
def logout():
session['username'] = None
return redirect(url_for('main'))
@app.route("/acceptgame/<game_id>", methods = ['GET', 'POST'])
def acceptgame(game_id):
acceptGameForm = acceptForm()
passedUserName = session['username']
existingUser = User.query.filter_by(username=passedUserName).first()
game = GameInstance.query.filter_by(id=game_id).first()
gname = Game.query.filter_by(id=game.base_game).first()
if request.method == 'POST':
inviteStatus = acceptGameForm.status.data
thisGame = GameFunctions.gamePlay(game_id)
if inviteStatus == 'Accept':
#thisGame = PlayersInGame.query.filter(game_id).first()
thisGame.acceptInvite(existingUser.id)
return redirect(url_for('login'))
elif inviteStatus == 'Decline':
thisGame.declineInvite(existingUser.id)
return redirect(url_for('login'))
else:
return redirect(url_for('login'))
else:
return render_template('acceptgame.html', passedUserName=passedUserName, gname=gname, acceptGameForm=acceptGameForm, game_id=game_id)
@app.route("/winner", methods = ['GET', 'POST'])
def winner():
passedUserName = session['username']
return render_template('winner.html', passedUserName=passedUserName)
@app.route("/playturn/<game_id>", methods = ['GET', 'POST'])
def playturn(game_id):
dump = ''
passedUserName = session['username']
existingUser = User.query.filter_by(username=passedUserName).first()
playersThisGame = PlayersInGame.query.filter_by(game_instance=game_id).order_by(PlayersInGame.turn_order).all()
users = User.query.all()
usersPlaying = []
for p in playersThisGame:
for u in users:
if p.user_id == u.id:
usersPlaying.append(u)
# GameInstance/Game object
gameInfo = GameFunctions.getGameInstance(game_id)
thisGame = GameFunctions.gamePlay(game_id)
turnForm = playTurnForm()
# Invalid Game ID
if not gameInfo:
dump = "Game Instance not found."
return redirect(url_for('login'))
# GameInstance is Uno
if gameInfo.Game.name == 'Uno':
game = Uno(game_id)
active_player = game.getCurrentPlayerID()
if request.method == 'POST':
if 'drawCard' in request.form:
game.draw(active_player)
active_player = game.getCurrentPlayerID()
elif 'playCard' in request.form:
wc = None
# Wild card was played, get color
if 'wildColor' in request.form:
wc = request.form["wildColor"]
if wc == 'Select':
#Mif the player doesn't pick the computer picks.
colors = ['Red', 'Yellow', 'Green', 'Blue']
wc = random.choice(colors)
# playCard returned false; move is illegal
if game.playCard(active_player, request.form["cid"], wildColor=wc):
active_player = game.getCurrentPlayerID()
else:
flash('Can\'t play that card')
# Makes moves for AI until next human player's turn
while game.getCurrentPlayer().User.role == 'AI':
game.autoplay()
active_player = game.getCurrentPlayerID()
g = game.getThisGame(existingUser.id)
discard = g["Discard Top"]
players = g["Players"]
hand = g["Player Hand"]
turnForm.play.choices = [(c.id, c.Card.name, c.Card.img_front) for c in hand]
deck_count = g["Deck Count"]
discard_count = g["Discard Count"]
logs = game.getLogs()
if gameInfo.status == 'Ended':
return redirect(url_for('statistics'))
else:
dump = "That is not a game we have right now."
return redirect(url_for('login'))
return render_template('playturn.html', existingUser=existingUser, turnForm=turnForm, players=players, usersPlaying=usersPlaying, dump=dump, logs=logs, discard=discard, deck=True, deck_count=deck_count, discard_count=discard_count, hand=hand, active=active_player, endgame=True)
@app.route("/error", methods = ['GET', 'POST'])
def error():
if('username' in session):
passedUserName = session['username']
else:
passedUserName = None
return render_template('error.html', passedUserName=passedUserName)
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, nothing at this URL.', 404
if __name__ == "__main__":
#db.create_all()
if 'liveconsole' not in gethostname():
app.run(debug = True)
#app.run()
|
24,777 | a6cc08392dc96955df5680b32e3ca6e12f434e66 | # -*- coding: utf-8 -*-
import pandas as pd
import talib as tb
def SMA(x=pd.DataFrame(),k=30):
y=pd.DataFrame(columns = x.columns)
for i in range(0,len(x.columns)):
y.iloc[:,i]=pd.Series(tb.SMA((x.iloc[:,i]).values,timeperiod=k))
y2=y.set_index(x.index)
return y2
def EMA(x=pd.DataFrame(),k=30):
y=pd.DataFrame(columns = x.columns)
for i in range(0,len(x.columns)):
y.iloc[:,i]=pd.Series(tb.EMA((x.iloc[:,i]).values,timeperiod=k))
y2=y.set_index(x.index)
return y2
def csRank(x=pd.DataFrame()):
y2=x.rank(axis=1,pct=True)
return y2
def csMean(x=pd.DataFrame()):
y=pd.DataFrame(columns=x.columns,index=x.index)
for i in range(len(x.columns)):
y.iloc[:,i]=x.mean(axis=1)
return y
def tsRank(x=pd.DataFrame(),k=30):
y2=pd.DataFrame(index=x.index,columns=x.columns)
for i in range(k,len(x)+1):
y=(x.iloc[i-k:i,:]).rank(axis=0,pct=True)
y2.iloc[i-1,:]=y.iloc[-1,:]
return y2
def Sum(x=pd.DataFrame(),k=30):
y2=pd.DataFrame(index=x.index,columns=x.columns)
for i in range(k,len(x)+1):
y=(x.iloc[i-k:i,:]).sum(axis=0)
y2.iloc[i-1,:]=y
return y2
def tsMax(x=pd.DataFrame(),k=30):
y2=pd.DataFrame(index=x.index,columns=x.columns)
for i in range(k,len(x)+1):
y=(x.iloc[i-k:i,:]).max(axis=0)
y2.iloc[i-1,:]=y
return y2
def csMax(x=pd.DataFrame()):
y=pd.DataFrame(columns=x.columns,index=x.index)
for i in range(len(x.columns)):
y.iloc[:,i]=x.max(axis=1)
return y
def tsMin(x=pd.DataFrame(),k=30):
y2=pd.DataFrame(index=x.index,columns=x.columns)
for i in range(k,len(x)+1):
y=(x.iloc[i-k:i,:]).min(axis=0)
y2.iloc[i-1,:]=y
return y2
def csMin(x=pd.DataFrame()):
y=pd.DataFrame(columns=x.columns,index=x.index)
for i in range(len(x.columns)):
y.iloc[:,i]=x.min(axis=1)
return y
def stdDev(x=pd.DataFrame(),k=30):
y=pd.DataFrame(columns = x.columns)
for i in range(0,len(x.columns)):
y.iloc[:,i]=pd.Series(tb.STDDEV((x.iloc[:,i]).values,timeperiod=k))
y2=y.set_index(x.index)
return y2
def tsZscore(x=pd.DataFrame(),k=30):
y = pd.DataFrame(columns=x.columns, index=x.index)
y = (x-SMA(x,k))/stdDev(x,k)
return y
def delay(x=pd.DataFrame(),k=30):
y=x.shift(periods=k)
return y
def delta(x=pd.DataFrame(),k=30):
y=x-delay(x,k)
return y
def momentum(x=pd.DataFrame(),k=30):
y=pd.DataFrame(columns = x.columns)
for i in range(0,len(x.columns)):
y.iloc[:,i]=pd.Series(tb.MOM((x.iloc[:,i]).values,timeperiod=k))
y2=y.set_index(x.index)
return y2
def RSI(x=pd.DataFrame(),k=30):
y=pd.DataFrame(columns = x.columns)
for i in range(0,len(x.columns)):
y.iloc[:,i]=pd.Series(tb.RSI((x.iloc[:,i]).values,timeperiod=k))
y2=y.set_index(x.index)
return y2
|
24,778 | 0f5d659f50682dfd3b5b291ab419f8caf01a4e16 | from django.shortcuts import render, redirect
from .models import*
from django.http import JsonResponse
import json
from django.views.decorators.csrf import csrf_exempt
import datetime
from .authentication import valid,SendMailWithHtml,senddeliverymail
from random import randint
from django.contrib.auth import login, authenticate
from django.contrib.auth.forms import UserCreationForm
from django.template.loader import render_to_string
# Create your views here.
@csrf_exempt
def updateItem(request):
if request.user.is_authenticated:
customer, created = Customer.objects.get_or_create(user=request.user)
customer.save()
order, created = Order.objects.get_or_create(
customer=customer, complete=False)
order.save()
else:
if "orderId" in request.session:
try:
order = Order.objects.get(pk=request.session["orderId"])
print("order found")
except:
order = Order(complete=False)
order.save()
request.session["orderId"] = order.id
print("couln't find order")
else:
order = Order(complete=False)
order.save()
request.session["orderId"] = order.id
print("created new order")
try:
data = json.loads(request.body)
except:
return JsonResponse("couldn't add item error-code:1", safe=False)
productID = data['productID']
action = data["action"]
print(productID, action, request.user)
product = Product.objects.get(pk=productID)
orderItem, created = OrderItem.objects.get_or_create(
product=product, order=order)
if not created:
if action == "add":
orderItem.quantity += 1
msg = "item added"
elif action == "remove":
orderItem.quantity -= 1
msg = "item removed"
if orderItem.quantity == 0:
order.orderitem_set.remove(orderItem)
print("removed")
orderItem.delete()
order.save()
return JsonResponse("Item deleted", safe=False)
orderItem.save()
return JsonResponse(msg, safe=False)
def store(request):
if request.user.is_authenticated:
customer = request.user.customer
order, created = Order.objects.get_or_create(
customer=customer, complete=False)
else:
if "orderId" in request.session:
try:
order = Order.objects.get(pk=request.session["orderId"])
print("order found")
except:
order = Order(complete=False)
order.save()
request.session["orderId"] = order.id
print("couln't find order")
else:
order = Order(complete=False)
order.save()
request.session["orderId"] = order.id
print("created new order")
if "completed_orders" not in request.session:
request.session["completed_orders"] = []
products = Product.objects.all()
context = {"products": products, "order": order}
return render(request, "store/store.html", context)
def cart(request):
if request.user.is_authenticated:
customer = request.user.customer
order, created = Order.objects.get_or_create(
customer=customer, complete=False)
else:
if "orderId" in request.session:
try:
order = Order.objects.get(pk=request.session["orderId"])
print("order found")
except:
order = Order(complete=False)
order.save()
request.session["orderId"] = order.id
print("couln't find order")
else:
order = Order(complete=False)
order.save()
request.session["orderId"] = order.id
print("created new order")
items = order.orderitem_set.all()
context = {"items": items, "order": order}
return render(request, "store/cart.html", context)
def checkout(request):
if request.user.is_authenticated:
customer = request.user.customer
order, created = Order.objects.get_or_create(
customer=customer, complete=False)
else:
if "orderId" in request.session:
try:
order = Order.objects.get(pk=request.session["orderId"])
except:
redirect("/")
else:
redirect("/")
items = order.orderitem_set.all()
if len(items)==0:
return store(request)
context = {"items": items, "order": order}
return render(request, "store/checkout.html", context)
def submitorder(request):
if request.method == "POST":
if request.user.is_authenticated:
customer = request.user.customer
order, created = Order.objects.get_or_create(
customer=customer, complete=False)
else:
customer, created = Customer.objects.get_or_create(
name=request.POST["name"], email=request.POST["email"])
customer.save()
order = Order.objects.get(pk=request.session["orderId"])
order.customer = customer
order.save()
print(order.orderitem_set.all())
order.total = order.total_amount
order.transaction_id = datetime.datetime.now().timestamp()
shippingAddress = ShippingAddress(
customer=customer,
order=order,
address=request.POST["address"],
city=request.POST["city"],
state=request.POST["state"],
zipcode=request.POST["zipcode"],
country=request.POST["country"]
)
order.complete = True
order.save()
shippingAddress.save()
html = render_to_string("email/order2.html",{"ship":shippingAddress})
SendMailWithHtml(customer.email,html,"Order Received")
# sendemail(customer.email,text,"Order received")
html = render_to_string("email/delivered.html",{"ship":shippingAddress})
senddeliverymail(customer.email,html)
if not request.user.is_authenticated:
request.session["completed_orders"].append(
request.session["orderId"])
print("completed order :", request.session["completed_orders"])
del request.session["orderId"]
return redirect("/")
def register(request, error="", success="", con={}):
if not request.user.is_authenticated:
if request.method == "GET":
context = {"error": error, "success": success}
context.update(con)
return render(request, "registration/register.html", context)
else:
form = UserCreationForm(request.POST)
if form.is_valid() and "email" in request.session and "name" in request.session:
form.save()
name = str(request.session["name"])
username = form.cleaned_data.get('username')
raw_password = form.cleaned_data.get('password1')
user = authenticate(username=username, password=raw_password)
login(request, user)
customer = Customer.objects.create(
user=user, name=name, email=request.session["email"])
return redirect("/")
request.method = "GET"
return register(request, error=form.errors)
redirect("/register")
return redirect("/")
def loginView(request, error=""):
if not request.user.is_authenticated:
if request.method == "GET":
return render(request, "registration/login.html", {"error": error})
else:
P = request.POST
if "username" in P and "password" in P:
username = P["username"]
password = P["password"]
user = authenticate(username=username, password=password)
if user is not None:
login(request, user)
return redirect("/")
else:
request.method = "GET"
return loginView(request, error="Incorrect Username or Password")
return redirect("/")
def logoutView(request):
return redirect("/accounts/logout")
def OTP(request, action):
if not request.user.is_authenticated and not "verified" in request.session:
if request.method == "GET":
if action == "send" and "email" in request.GET and "name" in request.GET:
print("block 1")
email = request.GET["email"]
if valid(email):
name = request.GET["name"]
otp = randint(100000, 999999)
text = render_to_string("email/otp.html",{"name":name,"otp":otp})
SendMailWithHtml(email, text,'one time password')
request.session["name"]= name
request.session["OTP"] = str(otp)
request.session["email"] = email
return register(request, success="OTP send", con={"email": email,"name":name})
elif action == "verify" and "OTP" in request.GET and "OTP" in request.session:
if request.session["OTP"] == request.GET["OTP"]:
request.session["verified"] = True
return register(request, success="OTP verified")
elif "email" in request.session:
return register(request, error="INVALID OTP ! please try resending!", con={"email": request.session["email"]})
return register(request, error="invalid information")
return redirect("/")
def product(request,productID):
if request.user.is_authenticated:
customer = request.user.customer
order, created = Order.objects.get_or_create(
customer=customer, complete=False)
else:
if "orderId" in request.session:
try:
order = Order.objects.get(pk=request.session["orderId"])
print("order found")
except:
order = Order(complete=False)
order.save()
request.session["orderId"] = order.id
print("couln't find order")
else:
order = Order(complete=False)
order.save()
request.session["orderId"] = order.id
print("created new order")
if "completed_orders" not in request.session:
request.session["completed_orders"] = []
try:
prod = Product.objects.get(pk=productID)
except:
return store(request)
return render(request,"store/product.html",{"product":prod,"order":order}) |
24,779 | 5bca74405e0da01a9bcda4e0612ac69fdfec9915 | import os
import pandas as pd
if __name__ == '__main__':
columns = [
'display_text_NL',
'display_text_DE',
'rwth',
'mumc',
'value_type',
'options',
'default_option',
'category_def',
]
dd_file = '/Volumes/USB_SECURE1/Data/ESPRESSO/DataDict_LapLiver_v1.1.xlsx'
dd_df = dfs = pd.read_excel(dd_file, 'Attributes', comment='#')
dd_df.drop(columns, axis=1, inplace=True)
print(dd_df.columns)
out_dir = '/Volumes/USB_SECURE1/Data/ESPRESSO/Patients_LapLiver'
id_file = '/Volumes/USB_SECURE1/Data/ESPRESSO/Biobank_Patiëntennummers_en_SIDs.xlsx'
id_df = pd.read_excel(id_file, 'IDs', comment='#', index_col='SID')
for idx, row in id_df.iterrows():
if row['lap_liver'] == 1:
f = '{}.xlsx'.format(idx)
f = os.path.join(out_dir, f)
dd_df.to_excel(f, index=False)
print(f)
|
24,780 | 186e9d11cc1f7e66ca4e761a0dbb47a22dee8357 |
def handler(request):
pass |
24,781 | d1aed463114e298aebd544b454c9211f4b830cd9 | # Taking number from user and checking if it is even or odd
# Taking range from user and finding all the even and odd numbers present in that range
# Using While Loop
# Taking range from user and finding all the even and odd numbers present in that range
# Using For Loop
evenNumber = 4 |
24,782 | 04747b07d211077b486c1a386d7b7a6dc646b21a | from sys import argv
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.patches import Polygon
def get_data(fil) :
data = []
fil = open(fil, "r+")
text = fil.readline()
while text != "" :
data.append(float(text))
text = fil.readline()
return data
numBoxes = 16
xaxis = []
Names = ["PCA IVAG, ","PCA, ","RP IVAG, ","RP, "]
for i in [4,8,16,32] :
for j in Names :
xaxis.append(j + str(i))
data = []
argv.pop(0)
for fil in argv :
data.append(get_data(fil))
print fil
##
## matplotlib.org/examples/pylab_examples/boxplot_demo2.html
##
fig, ax1 = plt.subplots(figsize=(10, 6))
fig.canvas.set_window_title('A Boxplot Example')
plt.subplots_adjust(left=0.075, right=0.95, top=0.9, bottom=0.25)
bp = plt.boxplot(data, notch=0, sym='+', vert=1, whis=1.5, widths=1)
plt.setp(bp['boxes'], color='black')
plt.setp(bp['whiskers'], color='black')
plt.setp(bp['fliers'], color='red', marker='+')
# Add a horizontal grid to the plot, but make it very light in color
# so we can use it for reading data values but not be distracting
ax1.yaxis.grid(True, linestyle='-', which='major', color='lightgrey',
alpha=0.5)
# Hide these grid behind plot objects
ax1.set_axisbelow(True)
ax1.set_title('Comparison of various methods ISI values')
ax1.set_xlabel('Preprocessing and Number of subjects')
ax1.set_ylabel('LOG(ISI)')
ax1.set_yscale('log')
## Now fill the boxes with desired colors
#boxColors = ['darkkhaki', 'royalblue']
#numBoxes = numDists*2
#medians = list(range(numBoxes))
#for i in range(numBoxes):
# box = bp['boxes'][i]
# boxX = []
# boxY = []
# for j in range(5):
# boxX.append(box.get_xdata()[j])
# boxY.append(box.get_ydata()[j])
# boxCoords = list(zip(boxX, boxY))
# # Alternate between Dark Khaki and Royal Blue
# k = i % 2
# boxPolygon = Polygon(boxCoords, facecolor=boxColors[k])
# ax1.add_patch(boxPolygon)
# # Now draw the median lines back over what we just filled in
# med = bp['medians'][i]
# medianX = []
# medianY = []
# for j in range(2):
# medianX.append(med.get_xdata()[j])
# medianY.append(med.get_ydata()[j])
# plt.plot(medianX, medianY, 'k')
# medians[i] = medianY[0]
# # Finally, overplot the sample averages, with horizontal alignment
# # in the center of each box
# plt.plot([np.average(med.get_xdata())], [np.average(data[i])],
# color='w', marker='*', markeredgecolor='k')
# Set the axes ranges and axes labels
ax1.set_xlim(0.5, numBoxes + 0.5)
top = 1.0
bottom = -1.0
ax1.set_ylim([0.01, 1])
xtickNames = plt.setp(ax1, xticklabels=xaxis)
#plt.setp(xtickNames, rotation=45, fontsize=8)
plt.setp(xtickNames, rotation=45, fontsize=8)
# Due to the Y-axis scale being different across samples, it can be
# hard to compare differences in medians across the samples. Add upper
# X-axis tick labels with the sample medians to aid in comparison
# (just use two decimal places of precision)
pos = np.arange(numBoxes) + 1
#upperLabels = [str(np.round(s, 2)) for s in medians]
#weights = ['bold', 'semibold']
#for tick, label in zip(range(numBoxes), ax1.get_xticklabels()):
# k = tick % 2
# ax1.text(pos[tick], top - (top*0.05), upperLabels[tick],
# horizontalalignment='center', size='x-small', weight=weights[k],
# color=boxColors[k])
# Finally, add a basic legend
plt.show()
fig.savefig('test.png')
|
24,783 | bf3dfe34ff60d6257846ef157bae9adf994c55fe | from django.db import models
class Courses(models.Model):
semester_text = models.CharField(max_length = 100)
crn = models.IntegerField(default = 100)
subject = models.CharField(max_length = 100)
course_Number = models.IntegerField(default = 100)
section = models.IntegerField(default = 100)
title = models.CharField(max_length = 100)
|
24,784 | 9ee3f48e418038a359163d1cfce21dafee727a3a | import abc
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
LOSSES = ["VAE", "betaB"]
RECON_DIST = ["bernoulli", "laplace", "gaussian"]
def get_loss_fn(loss_name, **kwargs_parse):
kwargs_all = dict(rec_dist=kwargs_parse['rec_dist'], steps_anneal=kwargs_parse['reg_anneal'])
if loss_name = 'VAE':
return BetaHLoss(beta=1, **kwargs_all)
elif loss_name == 'betaB':
return BetaBLoss(C_init=kwargs_parse['betaB_initC'],
C_fin=kwargs_parse['betaB_finC'],
gamma=kwargs_parse['betaB_G'],
**kwargs_all)
else:
assert loss_name not in LOSSES
raise ValueError("Unknown loss: {}".format(loss_name))
class BaseLoss(abc.ABC):
def __init__(self, record_loss_every=50, rec_dist="bernoulli", steps_anneal=0):
self.n_train_steps = 0
self.record_loss_every = record_loss_every
self.rec_dist = rec_dist
self.steps_anneal = steps_anneal
@abc.abstractmethod
def __call__(self, data, recon_data, latent_dist, is_train, storer, **kwargs):
def _pre_call(self, is_train, storer):
if is_train:
self.n_train_steps += 1
if not is_train or self.n_train_steps % self.record_loss_every == 1:
storer = storer
else:
storer = None
return storer
class BetaHLoss(BaseLoss):
r"""
Compute the Beta-VAE loss as in [1]
Parameters
----------
beta : float, optional
Weight of the kl divergence.
kwargs:
Additional arguments for `BaseLoss`, e.g. rec_dist`.
References
----------
[1] Higgins, Irina, et al. "beta-vae: Learning basic visual concepts with
a constrained variational framework." (2016).
"""
def __init__(self, beta=4, **kwargs):
super().__init__(**kwargs)
self.beta = beta
def __call__(self, data, recon_data, latent_dist, is_train, storer, **kwargs):
storer = _pre_call(is_train, storer)
reconst_loss = _reconstruction_loss(data, recon_data, storer=storer, distribution=self.rec_dist)
kl_loss = _kl_normal_loss(*latent_dist, storer)
anneal_reg = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if is_train else 1)
loss = reconst_loss + anneal_reg * (self.beta * kl_loss)
if storer is not None:
storer['loss'].append(loss.item())
return loss
class BetaBLoss(BaseLoss):
r"""
Compute the Beta-VAE loss as in [1]
Parameters
----------
C_init : float, optional
Starting annealed capacity C.
C_fin : float, optional
Final annealed capacity C.
gamma : float, optional
Weight of the KL divergence term.
kwargs:
Additional arguments for `BaseLoss`, e.g. rec_dist`.
References
----------
[1] Burgess, Christopher P., et al. "Understanding disentangling in
$\beta$-VAE." arXiv preprint arXiv:1804.03599 (2018).
"""
def __init__(self, C_init=0., C_fin=20., gamma=100., **kwargs):
super().__init__(**kwargs)
self.gamma = gamma
self.C_fin = C_fin
self.C_init = C_init
def __call__(self, data, recon_data, latent_dist, is_train, storer, **kwargs):
storer = self._pre_call(is_train, storer)
reconst_loss = _reconstruction_loss(data, recon_data, storer=storer, distribution=self.rec_dist)
kl_loss = _kl_normal_loss(*latent_dist, storer)
C = (linear_annealing(self.C_init, self.C_fin, self.n_train_steps, self.steps_anneal)
if is_train else self.C_fin)
loss = reconst_loss + self.gamma * (kl_loss - C).abs()
if storer is not None:
storer['loss'].append(loss.item())
return loss
def _reconstruction_loss(data, recon_data, distribution="bernoulli", storer=None):
batch_size, n_channels, height, width = recon_data.size()
is_colored = n_channels == 3
if distribution == "bernoulli":
loss = F.binary_cross_entropy(recon_data, data, reduction="sum")
elif distribution == "gaussian":
loss = F.mse_loss(recon_data * 255, data * 255, reduction="sum") / 255
elif distribution == "laplace":
loss = F.l1_loss(recon_data, data, reduction="sum")
loss = loss * 3
loss = loss * (loss != 0)
else:
assert distribution not in RECON_DIST
raise ValueError("Unknown distribution: {}".format(distribution))
loss = loss / batch_size
if storer is not None:
storer['reconst_loss'].append(loss.item())
return loss
def _kl_normal_loss(mean, logvar, storer=None):
latent_dim = mean.size(1)
latent_kl = 0.5 * (-1 - logvar + mean.pow(2) + logvar.exp()).mean(dim=0)
total_kl = latent_kl.sum()
if storer is not None:
storer['kl_loss'].append(total_kl.item())
for i in range(latent_dim):
storer['kl_loss_' + str(i)].append(latent_kl[i].item())
return total_kl
def linear_annealing(init, fin, step, annealing_steps):
"""Linear annealing of a parameter."""
if annealing_steps == 0:
return fin
assert fin > init
delta = fin - init
annealed = min(init + delta * step / annealing_steps, fin)
return annealed |
24,785 | cb0edb62c023dc29f85e9dad5209beb2c2275621 | # Generated by Django 3.0.4 on 2020-05-29 05:31
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('blog', '0002_auto_20200529_1057'),
]
operations = [
migrations.AlterModelOptions(
name='comments',
options={'verbose_name_plural': 'Meetings'},
),
]
|
24,786 | 99d357f27fc6f2dc3cc421e9b50b3d9352d8fb03 | import numpy as np
import matplotlib.pyplot as plt
x = np.load('x.npy')
x1 = np.linspace(x[0], x[-1])
y = np.load('y.npy')
p1 = np.poly1d(np.polyfit(x, y, 1))
y1 = p1(x)
p2 = np.poly1d(np.polyfit(x, y, 2))
y2 = p2(x1)
p10 = np.poly1d(np.polyfit(x, y, 10))
y10 = p10(x1)
fig = plt.figure()
ax = fig.add_subplot(111)
ax.scatter(x, y, color='red')
ax.plot(x, y1, color='black', linestyle='dashed')
ax.plot(x1, y2, color='blue')
ax.plot(x1, y10, linestyle='dashed', color='green')
plt.show()
|
24,787 | e7affc8c925b9fc3019cb1a3bf8128ec93daae8b | from ._data import download_file
__all__ = [
'download_file'
]
|
24,788 | 98ff5f4dfa52ec79049c68d74e8c41157bead805 | from classes.om import Density
class Seismic(Density):
tid = 'seismic'
_TID_FRIENDLY_NAME = 'Seismic'
_SHOWN_ATTRIBUTES = [
('_oid', 'Object Id')
]
def __init__(self, data, **attributes):
super().__init__(data, **attributes)
def _get_max_dimensions(self):
return 5
|
24,789 | 6f78404aecfd6927ecf28fa5ea1f00edc1b9ca32 | #!/usr/bin/env python3
"""Initialize Bayesian Optimization"""
import numpy as np
from scipy.stats import norm
GP = __import__('2-gp').GaussianProcess
class BayesianOptimization:
"""Initialize Bayesian Optimization"""
def __init__(self, f, X_init, Y_init, bounds, ac_samples,
l=1, sigma_f=1, xsi=0.01, minimize=True):
self.f = f
self.X_s = np.linspace(bounds[0], bounds[1])[:, None]
self.xsi = xsi
self.minimize = minimize
self.gp = GP(X_init, Y_init, l=l, sigma_f=sigma_f)
self.Y_init = Y_init
def acquisition(self):
"""Calculate next best sample location"""
fs, _ = self.gp.predict(self.gp.X)
next_fs, vars = self.gp.predict(self.X_s)
opt = np.min(fs)
improves = opt - next_fs - self.xsi
Z = improves / vars
eis = improves * norm.cdf(Z) + vars * norm.pdf(Z)
return self.X_s[np.argmax(eis)], eis
def optimize(self, iterations=1000):
"""Optimize for black box function"""
prev = None
finalx = None
finaly = None
while iterations:
maxei, eis = self.acquisition()
new_y = self.f(maxei)
if maxei == prev:
break
self.gp.update(maxei, new_y)
pycodehack = finaly is None or self.minimize and finaly > new_y
if ((pycodehack or not self.minimize and finaly < new_y)):
finaly = new_y
finalx = maxei
prev = maxei
iterations -= 1
return finalx, finaly
|
24,790 | 57ba3b40009c5ae37afdc63b2586d88f3bbb85e5 | import pygame, os, random
pygame.init()
# Kolory/Rozmiary/Ikony
DARKRED = pygame.color.THECOLORS['darkred']
LIGHTRED = pygame.color.THECOLORS['palevioletred']
DARKGREEN = pygame.color.THECOLORS['darkgreen']
LIGHTBLUE = pygame.color.THECOLORS['lightblue']
BLACK = pygame.color.THECOLORS['black']
WHITE = pygame.color.THECOLORS['white']
RED = pygame.color.THECOLORS['red']
GRAY = pygame.color.THECOLORS['gray']
LIGHTGREEN = pygame.color.THECOLORS['lightgreen']
WIDTH = 1470
HEIGHT = 700
ICON = pygame.image.load('png\logo.png')
FPS = 60
# grafika postać
STAND = pygame.image.load('png\stand.png')
WALK_R1 = pygame.image.load('png\walk_R1.png')
WALK_R2 = pygame.image.load('png\walk_R2.png')
WALK_L1 = pygame.image.load('png\walk_L1.png')
WALK_L2 = pygame.image.load('png\walk_L2.png')
UP_1 = pygame.image.load('png\gora1.png')
UP_2 = pygame.image.load('png\gora2.png')
FIGHT_L3 = pygame.image.load('png\walkal3.png')
FIGHT_R3 = pygame.image.load('png\walkar3.png')
UP = [UP_1, UP_2]
WALK_R = [WALK_R1, WALK_R2]
WALK_L = [WALK_L1, WALK_L2]
PRINCESS = pygame.image.load('png\princess.png')
#grafika otoczenie
LADDER = pygame.image.load('png\ladder.png')
GRASS = pygame.image.load('png\grass.png')
GRASS1 = pygame.image.load('png\grass1.png')
DOOR = pygame.image.load('png\castledoors.png')
CHEST_CLOSED = pygame.image.load('png\chestclosed.png')
CHEST_OPEN = pygame.image.load('png\chestopen.png')
BACKGROUND = pygame.image.load('png\ckground.png')
# grafika wrogów
DRAGON_R1 = pygame.image.load('png\dragon_r1.png')
DRAGON_R2 = pygame.image.load('png\dragon_r2.png')
DRAGON_L1 = pygame.image.load('png\dragon_l1.png')
DRAGON_L2 = pygame.image.load('png\dragon_l2.png')
DRAGON_L = [DRAGON_L1, DRAGON_L2]
DRAGON_R = [DRAGON_R1, DRAGON_R2]
DRAGON_DEAD_L = pygame.image.load('png\dragon_dead_L.png')
DRAGON_DEAD_R = pygame.image.load('png\dragon_dead_R.png')
SKELETON_L1 = pygame.image.load('png\skeleton_L1.png')
SKELETON_L2 = pygame.image.load('png\skeleton_L2.png')
SKELETON_R1 = pygame.image.load('png\skeleton_R1.png')
SKELETON_R2 =pygame.image.load('png\skeleton_R2.png')
SKELETON_L = [SKELETON_L1, SKELETON_L2]
SKELETON_R = [SKELETON_R1, SKELETON_R2]
RDRAGON_R1 = pygame.image.load('png\Rdragon_R1.png')
RDRAGON_R2 = pygame.image.load('png\Rdragon_R2.png')
RDRAGON_L1 = pygame.image.load('png\Rdragon_L1.png')
RDRAGON_L2 = pygame.image.load('png\Rdragon_L2.png')
RDRAGON_L = [RDRAGON_L1, RDRAGON_L2]
RDRAGON_R = [RDRAGON_R1, RDRAGON_R2]
KILL_ENEMY = pygame.image.load('png\kill_enemy.png')
# grafika itemów
SHURIKEN = pygame.image.load('png\weapon.png')
SHURIKEN_R1 = pygame.image.load('png\weapon_r1.png')
SHURIKEN_R2 = pygame.image.load('png\weapon_r2.png')
SHURIKEN_R = [SHURIKEN_R1, SHURIKEN_R2]
SHURIKEN_L1 = pygame.image.load('png\weapon_l1.png')
SHURIKEN_L2 = pygame.image.load('png\weapon_l2.png')
SHURIKEN_L = [SHURIKEN_R1, SHURIKEN_R2]
KEY = pygame.image.load('png\key.png')
HEARTH = pygame.image.load('png\hearth.png')
SPIKE = pygame.image.load('png\spike.png')
DIAMOND = pygame.image.load('png\diamond.png')
DIAMOND_RED = pygame.image.load('png\diamond_red.png')
DIAMOND_ENEMY = pygame.image.load('png\diamond_enemy.png')
hitbox = pygame.image.load('png\kwadrat.png')
hitbox_sprite = pygame.sprite.Sprite()
hitbox_sprite.image = hitbox
hitbox_sprite.rect = hitbox_sprite.image.get_rect()
FIREBALL = pygame.image.load('png\cireball_1.png')
#dźwięki
music = pygame.mixer.music.load('sound\music.mp3')
throw_shuriken = pygame.mixer.Sound('sound\shuriken.wav')
jump = pygame.mixer.Sound('sound\jump.ogg')
hit_enemy = pygame.mixer.Sound('sound\hit_enemy.wav')
item_pick = pygame.mixer.Sound('sound\item_pick.wav')
door_lv_open = pygame.mixer.Sound('sound\door_lv_open.wav')
spikes = pygame.mixer.Sound('sound\spikes.wav')
take_diamond = pygame.mixer.Sound('sound\diamond.wav')
take_key = pygame.mixer.Sound('sound\key.ogg')
game_over = pygame.mixer.Sound('sound\game_over.ogg')
victory = pygame.mixer.Sound('sound\wictory.wav')
|
24,791 | 649d486bea4b586ac3ee17dc5bfb1c25a662c121 | #define PY_SSIZE_T_CLEAN above the #include <Python.h>.
from confluent_kafka import Producer
import datetime
import socket
import json
def Els(ip,title):
UesrRecord= {"UesrRecord":{
"title":title,
}}
ElsDict={}
Date = '%Y-%m-%d'
Time='%H:%M:%S'
ElsDict["ip"]=ip
ElsDict["Date"]=datetime.datetime.now().strftime(Date)
ElsDict["Time"]=datetime.datetime.now().strftime(Time)
ElsDict.update(UesrRecord)
return ElsDict
def producer(kafka_value):
ip_port ="kafka:9092"
value = json.dumps(kafka_value, ensure_ascii=False)
# 步驟1. 設定要連線到Kafka集群的相關設定
props = {
'bootstrap.servers': ip_port # <-- 置換成要連接的Kafka集群
}
# 步驟2. 產生一個Kafka的Producer的實例
producer = Producer(props)
# 步驟3. 指定想要發佈訊息的topic名稱
topicName = 'kafkatopic'
# produce(topic, [value], [key], [partition], [on_delivery], [timestamp], [headers])
producer.produce(topicName, value=value)#傳送進去 Topic,data
# 步驟5. 確認所在Buffer的訊息都己經送出去給Kafka了
producer.flush()
if __name__ == '__main__':
producer(Els("127.0.0.1","5")) |
24,792 | ba233d6b42706eb84c033974b13d13b83b165971 | from odoo import api, exceptions, fields, models, _
import logging
from datetime import date, timedelta
_logger = logging.getLogger(__name__)
class CommissionInvoices(models.Model):
_name = "sale.commission.invoice"
agent_id = fields.Many2many(
comodel_name="res.partner",
ondelete="restrict",
string="Agent",
domain="[('agent', '=', True)]"
)
day_term = fields.Integer(
string="Batas Jatuh Tempo",
required=True,
default=63
)
date_to = fields.Date('Up to', required=True, default=fields.Date.today())
@api.multi
def create_commission(self):
self.ensure_one()
settlement_obj = self.env['sale.commission.settlement']
settlement_line_obj = self.env['sale.commission.settlement.line']
settlement_ids = []
_date = fields.Date.from_string(self.date_to)
if isinstance(_date, basestring):
_date = fields.Date.from_string(_date)
date_to = date(month=_date.month, year=_date.year, day=1)
date_from = date_to - timedelta(days=self.day_term)
if not self.agent_id:
self.agent_id = self.env['res.partner'].search(
[('agent', '=', True)])
for agent in self.agent_id:
invoices = self.env['account.invoice'].search(
[('date_invoice', '>=', date_from),
('date_invoice', '<', date_to),
('user_id.name', '=', agent.name),
('state', '=', 'paid'),
('settled', '=', False)], order="date_invoice")
if invoices:
settlement = settlement_obj.create({
'agent': agent.id,
'date_from': date_from,
'date_to': date_to,
})
settlement_ids.append(settlement.id)
for invoice in invoices:
settlement_line_obj.create({
'settlement': settlement.id,
'invoice': invoice.id,
'date': invoice.date_invoice,
'total_invoice': invoice.amount_total})
if len(settlement_ids):
return {
'name': _('Created Settlements'),
'type': 'ir.actions.act_window',
'views': [[False, 'list'], [False, 'form']],
'res_model': 'sale.commission.settlement',
'domain': [['id', 'in', settlement_ids]],
}
else:
return {'type': 'ir.actions.act_window_close'} |
24,793 | e3e4e7c4b5e924ec82e61805624044fb92ea78b3 | # *** Create Users for Chat ***
# Code based on https://www.twilio.com/docs/chat/rest/users
# Download Python 3 from https://www.python.org/downloads/
# Download the Twilio helper library from https://www.twilio.com/docs/python/install
import os
from twilio.rest import Client
#from datetime import datetime | not required for this examples
import logging
#write requests & responses from Twilio to log file, useful for debugging:
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s %(levelname)s %(message)s',
filename='/usr/local/twilio/python/python3-twilio-sdkv6-examples/chat/logs/twilio_chat.log',
filemode='a')
# Your Account Sid and Auth Token from twilio.com/console & stored in Mac OS ~/.bash_profile in this example
account_sid = os.environ.get('$TWILIO_ACCOUNT_SID')
auth_token = os.environ.get('$TWILIO_AUTH_TOKEN')
client = Client(account_sid, auth_token)
# A list of chat users parameters & their permissable values
user = client.chat.services('ISxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx') \
.users \
.create(
attributes='{ "doer.doerPath": "/usr/local/twilio/doer"}', # Optional, valid json
identity='user2', # Required, e.g. username or email
friendly_name='Bill', # Optional, free text
role_sid="RLxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" # Optional, valid role_sid
)
#print list of all chat users properties to console, useful for learning info available you can work with?
print(user.account_sid)
print(user.attributes)
print(user.date_created)
print(user.date_updated)
print(user.friendly_name)
print(user.identity)
print(user.is_notifiable)
print(user.is_online)
print(user.joined_channels_count)
print(user.role_sid)
print(user.service_sid)
print(user.sid)
print(user.url)
#create variable for this record
cdr = (user.sid)
#open *.log file with cdr var as filename...
f = open("/usr/local/twilio/python/python3-twilio-sdkv6-examples/chat/logs/" + str( cdr ) + ".log", "a")
#write list of all chat users properties to above file...
f.write("Account SID : " + str(user.account_sid) + "\n")
f.write("Attributes : " + str(user.attributes) + "\n")
f.write("Date Created : " + str(user.date_created) + "\n")
f.write("Date Updated : " + str(user.date_updated) + "\n")
f.write("Friendly Name : " + str(user.friendly_name) + "\n")
f.write("Identity : " + str(user.identity) + "\n")
f.write("Is Notifiable : " + str(user.is_notifiable) + "\n")
f.write("Is Online : " + str(user.is_online) + "\n")
f.write("Joined Channels Count : " + str(user.joined_channels_count) + "\n")
f.write("Role SID : " + str(user.role_sid) + "\n")
f.write("Service SID : " + str(user.service_sid) + "\n")
f.write("SID : " + str(user.sid) + "\n")
f.write("URL : " + str(user.url) + "\n")
f.close()
|
24,794 | 681af8f2964e43e5f6b57629f4d6fc852eaf083b | # 9.2 데코레이터 작성 시 함수 메타데이터 보존
import time
from functools import wraps
def timethis(func):
'''
실행 시간을 보고하는 데코레이터
'''
@wraps(func)
def wrapper(*args, **kwargs):
start = time.time()
result = func(*args, **kwargs)
end = time.time()
print(func.__name__, end-start)
return result
return wrapper
@timethis
def countdown(n:int):
'''
Counts down
'''
while n > 0:
n -= 1
countdown(100000)
# countdown 0.007936954498291016
countdown.__name__
# 'countdown'
countdown.__doc__
# '\n\tCounts down\n\t'
countdown.__annotations__
# {'n': <class 'int'>}
# 토론
countdown.__name__
# 'wrapper'
countdown.__doc__
countdown.__annotations__
# {}
countdown.__wrapped__(100000)
from inspect import signature
print(signature(countdown))
# (n:int)
|
24,795 | 71ffb29c4075bec42fab7d6e6ed3eb75bef9e10a | import logging
import paramiko
from binascii import hexlify
logger = logging.getLogger(__name__)
def test_server_connection(server):
agent = paramiko.Agent()
keys = agent.get_keys()
assert len(keys) == 1
assert keys[0].get_name() == 'ssh-rsa'
for key in keys:
print('Trying ssh-agent key %s' % hexlify(key.get_fingerprint()))
agent.close()
def test_signature(server):
pass
|
24,796 | 141dde3f9944fa51dbb0fcc610fad7bc515f991c | # -*- coding: utf-8 -*-
INIT_FILE_TEMPLATE = """{encoding}{from_imports}{imports}"""
FILE_TEMPLATE = """\
{encoding}{from_imports}{imports}
class {class_name}{inheritence}:
{class_level_attributes}
{methods}
"""
METHOD_DOCSTRING = '''\
"""
Args:
{arguments_docstring}
"""
'''
METHOD_TEMPLATE = """\
def {method_name}(self{arguments}{kwarguments}):
{docstring}raise NotImplementedError('Please implement: {method_name}')
"""
TEST_METHOD_TEMPLATE = """\
def test_{method_name}(self):
raise NotImplementedError('Please implement: test_{method_name}')
"""
ALL_TEMPLATE = """{encoding}__all__ = [\n{components}\n]\n"""
|
24,797 | 70926d83d67223a5707813d5f98060ccc92ba711 | from PyQt4 import QtCore, QtGui
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from TelemetryLayer.lib.tlsettings import tlSettings as Settings
from TelemetryLayer.lib.tlsettings import tlSettings as Settings
from TelemetryLayer.lib.tlsettings import tlConstants as Constants
from TelemetryLayer.lib.tllogging import tlLogging as Log
from TelemetryLayer.tlbrokers import tlBrokers as Brokers
from TelemetryLayer.tlmqttclient import *
from TelemetryLayer.topicmanagers.agsense.agdevice import agDeviceList, agDevice, agParams, agParam
import os.path,sys,traceback,json
class tlTableParam(QObject):
"""
Todo: refactor this - place Table Parameter handling into
package of dialog widget utils
"""
kLabel = 0
kControl = 1
"""
Populates a table widget with a set of widgets (one one per row) defined by deviceType
Todo: add isDirty method - currently dirty regardless
"""
def __init__(self, tbl, row, param, default=None):
super(tlTableParam, self).__init__()
tbl.insertRow(row)
self.row = row
self.tbl = tbl
self.value = default
self.control = QWidget()
try:
self.name = param['name']
self.title = param['title']
self.tooltip = param['desc']
self.type = param['type']
self.default = default
self.readonly= param['readonly']
except TypeError as e:
Log.warn('Type error creating paramater widget ' + str(e))
item = QtGui.QLabel(self.title, self.tbl)
item.setStyleSheet("padding: 4px")
item.setWordWrap(True)
item.setToolTip(self.tooltip)
self.tbl.setCellWidget(row, self.kLabel, item)
item = QtGui.QTableWidgetItem(0)
item.setFlags(QtCore.Qt.NoItemFlags)
tbl.setItem(row, self.kLabel, item)
pass
def _setControl(self, height=None):
self.tbl.setCellWidget(self.row, self.kControl, self.control)
self.control.setEnabled(not self.readonly)
item = QtGui.QTableWidgetItem(0)
item.setFlags(QtCore.Qt.NoItemFlags)
self.tbl.setItem(self.row, self.kLabel, item)
self.tbl.horizontalHeader().setStretchLastSection(True)
if height is not None:
self.tbl.setRowHeight(self.row, height)
def getName(self):
try:
return self.name
except:
return None
def getTitle(self):
return self.title
def getValue(self):
return self.value
def getType(self):
return self.type
# Create a spin box
class tlTableParamSpinBox(tlTableParam):
def __init__(self, tbl, row, param, default=None):
super(tlTableParamSpinBox, self).__init__(tbl, row, param, default)
try:
self.min = param['min']
self.max = param['max']
self.int = param['interval']
self.units = param['units']
try:
self.step = param['step']
except:
self.step = "1"
self.control = QtGui.QSpinBox(self.tbl)
self.control.setMinimum(int(self.min) - 1) # Qt Bug Min is actual > not >=
self.control.setMaximum(int(self.max))
self.control.setSingleStep(int(self.step)) # ???
self.control.setToolTip(self.tooltip)
self.control.setSuffix('\x0A' + self.units)
self.control.setStyleSheet("padding: 4px")
self.control.valueChanged.connect(self.setValue)
self._setControl(40)
self.control.setValue(int(self.default))
# self.control.valueChanged.connect(self.setDirty)
except Exception as e:
Log.debug('Error loading parameter widget ' + str(e))
return
pass
def setValue(self, value):
self.value = value
class tlTableParamCheckBox(tlTableParam):
def __init__(self, tbl, row, param, default=None):
super(tlTableParamCheckBox, self).__init__(tbl, row, param, default)
try:
self.control = QtGui.QCheckBox(self.tbl)
self.control.setToolTip(self.tooltip)
self.control.setStyleSheet("padding: 4px")
self.control.stateChanged.connect(self.setValue)
self.control.setTristate(False);
self._setControl(40)
self.control.setChecked(self.default == 'On')
except Exception as e:
Log.debug('Error loading parameter widget ' + str(e))
return
pass
def setValue(self, value):
self.value = value
def getValue(self):
if self.control.isChecked():
return 'On'
else:
return 'Off'
# Create a Slider
class tlTableParamSlider(tlTableParam):
def __init__(self, tbl, row, param, default=None):
super(tlTableParamSlider, self).__init__(tbl, row, param, default)
try:
self.min = param['min']
self.max = param['max']
self.int = param['interval']
self.units = param['units']
try:
self.step = param['step']
except:
self.step = "1"
# Only handles Integers currently!
self.control = QtGui.QSlider(QtCore.Qt.Horizontal, self.tbl)
self.control.setStyleSheet("padding: 4px")
self.control.setFocusPolicy(QtCore.Qt.ClickFocus)
# self.control.setTickPosition(QtGui.QSlider.TicksBelow)
#self.control.setTickInterval(int(float(self.max)/50))
self.control.setSingleStep(int(self.step))
self.control.setMinimum(int(self.min))
self.control.setMaximum(int(self.max))
self.control.setToolTip(self.tooltip)
self.control.valueChanged.connect(self.setValue)
self._setControl(50)
self.control.setValue(int(self.default))
except Exception as e:
Log.warn('Error creating widget parameter ' + str(e))
return
def setValue(self, value):
self.value = value
item = self.tbl.cellWidget(self.row, self.kLabel)
item.setText(self.title + ' ' + str(value) + ' ' + self.units)
pass
# Create a Dropdown
class tlTableParamCombo(tlTableParam):
def __init__(self, tbl, row, param, default=None):
super(tlTableParamCombo, self).__init__(tbl, row, param, default)
# Only handles Integers currently!
self.control = QtGui.QComboBox(tbl)
self.control.setToolTip(self.tooltip)
idx = 0
defidx = 0
for option in param['options']:
self.control.insertItem(idx, option.text, option)
if hasattr(option,'value') and option['value'] == self.default:
defidx = idx
idx += 1
self.control.currentIndexChanged.connect(self.setValue)
self.control.setToolTip(self.tooltip)
self._setControl()
# self.tbl.setRowHeight(row,100)
self.control.setCurrentIndex(defidx)
def setValue(self, idx):
self.value = self.control.itemData(idx).get('value')
pass
"""
Populate a Combo Box with widgets to handle server
based parameters defined in device types, and return
values to be stored in the device map.
"""
class agParameterTable(QObject):
_params = []
def __init__(self, tableWidget, params):
super(agParameterTable, self).__init__()
_params = params
self._params =[]
tblParam = tableWidget
tblParam.horizontalHeader().setVisible(False)
tblParam.verticalHeader().setVisible(False)
tblParam.clearContents()
tblParam.setRowCount(0)
tblParam.setShowGrid(True)
tblParam.setColumnCount(2)
if _params is None or len(_params) == 0:
return
# Create a table of controls preset with existing values if required
try:
for param in _params:
if 'value' in param:
default = param['value']
else:
default = param['default']
if param['widget'] == 'slider':
self._params.append(tlTableParamSlider(tblParam, tblParam.rowCount(), param, default))
if param['widget'] == 'select':
self._params.append(tlTableParamCombo(tblParam, tblParam.rowCount(), param, default))
if param['widget'] == 'spinbox':
self._params.append(tlTableParamSpinBox(tblParam, tblParam.rowCount(), param, default))
if param['widget'] == 'checkbox':
self._params.append(tlTableParamCheckBox(tblParam, tblParam.rowCount(), param, default))
except KeyError as e:
Log.warn("Error parsing configuration parameters " + str(e))
exc_type, exc_value, exc_traceback = sys.exc_info()
Log.debug(repr(traceback.format_exception(exc_type, exc_value,
exc_traceback)))
def params(self):
params = {}
for param in self._params:
params[param.getName()] = param.getValue()
return params
def __iter__(self):
return self.params().iteritems()
class agConfigView(QObject):
def __init__(self, tabs, tLayer, feature): # change to broker?
super(agConfigView, self).__init__()
self._tabs = tabs
self._feature = feature
self._broker = tLayer.getBroker()
self._topicManager = tLayer.topicManager()
self._pTable = None
self._rebuild()
self._tabs.btnApply.clicked.connect(self._apply)
self._tabs.btnReload.clicked.connect(self._reload)
def _rebuild(self,mqtt =None, status = True, msg = None):
if not status:
Log.progress("There was an error reading the device configurations for this broker: " +str(msg));
return
try:
topic = self._broker.topic(self._feature['topic'])
self._params = topic['params']
self._pTable = agParameterTable(self._tabs.tblParams, self._params)
# self._tabs.btnApply.setEnabled(False)
except Exception as e:
Log.debug("Error loading Configuration tab " + str(e))
def _reload(self):
self._topicManager._requestDevices(self._rebuild)
def _updateBroker(self,mqtt, status = True, msg = None):
Log.debug("_updateBroker! " + str(status))
if not status:
Log.warn(msg)
return
self._topicManager.setDevices(agDeviceList(msg.payload))
self._broker.setTopics(self._topicManager.getTopics())
self._broker.setDirty(True)
Brokers.instance().update(self._broker)
Brokers.instance().sync(True)
Log.debug("Broker updated")
def _applied(self, client, status = True, msg = None):
if status == False:
Log.progress("Unable to update device settings - restoring")
self._rebuild()
else:
Log.progress("Configuration updated")
Log.debug("Updating Devices")
self._topicManager._requestDevices(self._updateBroker)
pass
def _apply(self):
_client = None
try:
params = {"topic":self._feature['topic']}
for key,val in self._pTable:
params[key] = val
payload = json.dumps(params)
request = "agsense/request/set"
Log.progress("Updating configuration")
_client = tlMqttSingleShot(self,
self._broker,
request,
["agsense/response/set"],
payload,
0, #qos
self._applied)
_client.run()
except Exception as e:
Log.debug("Error setting parameter " + str(e))
if _client:
_client.kill()
def show(self):
pass
def update(self, data):
pass
def _error(self, mqtt, msg=""):
Log.progress(msg)
|
24,798 | 478668c38763c995fba765ca8088a455a40a4cb5 | import psycopg2
def get_var_do_psql(var1):
try:
connection = psycopg2.connect(user = "dniemczok",
password = "postgres",
host = "127.0.0.1",
port = "5432",
database = "dniemczok")
cursor = connection.cursor()
# Print PostgreSQL Connection properties
#print ( connection.get_dsn_parameters(),"\n")
postgreSQL_select_Query = "select * from tmask_var where ZMIENNA = '%s'" % var1
cursor.execute(postgreSQL_select_Query)
#print("Selecting rows from tmask table using cursor.fetchall")
tmask_records = cursor.fetchall()
#print("Print each row and it's columns values")
for row in tmask_records:
#print("WARTOSC = ", row[2], "\n")
print(row[1])
except (Exception, psycopg2.Error) as error :
print ("Error while connecting to PostgreSQL", error)
finally:
#closing database connection.
if(connection):
cursor.close()
connection.close()
#print("PostgreSQL connection is closed")
get_var_do_psql('PATH')
|
24,799 | f3d2b79286623ee45b70dedbfe791d9f0ec89bc6 | import sys
import os
import tox
from tox import hookimpl
from tox import reporter
from tox.venv import cleanup_for_venv
import contextlib
def _init_pipenv_environ():
os.environ["PIPENV_ACTIVE"] = "1"
# Ignore host virtual env
os.environ["PIPENV_IGNORE_VIRTUALENVS"] = "1"
# Answer yes on recreation of virtual env
os.environ["PIPENV_YES"] = "1"
# don't use pew
os.environ["PIPENV_VENV_IN_PROJECT"] = "1"
def _clone_pipfile(venv):
if hasattr(venv, 'session'):
root_pipfile_path = venv.session.config.toxinidir.join("Pipfile")
else:
root_pipfile_path = venv.envconfig.config.toxinidir.join("Pipfile")
# venv path may not have been created yet
venv.path.ensure(dir=1)
venv_pipfile_path = venv.path.join("Pipfile")
if not os.path.exists(str(root_pipfile_path)):
with open(str(root_pipfile_path), "a"):
os.utime(str(root_pipfile_path), None)
if not venv_pipfile_path.check():
root_pipfile_path.copy(venv_pipfile_path)
return venv_pipfile_path
@contextlib.contextmanager
def wrap_pipenv_environment(venv, pipfile_path):
old_pipfile = os.environ.get("PIPENV_PIPFILE", None)
os.environ["PIPENV_PIPFILE"] = str(pipfile_path)
old_pipvenv = os.environ.get("PIPENV_VIRTUALENV", None)
os.environ["PIPENV_VIRTUALENV"] = os.path.join(str(venv.path))
old_venv = os.environ.get("VIRTUAL_ENV", None)
os.environ["VIRTUAL_ENV"] = os.path.join(str(venv.path))
yield
if old_pipfile:
os.environ["PIPENV_PIPFILE"] = old_pipfile
if old_pipvenv:
os.environ["PIPENV_VIRTUALENV"] = old_pipvenv
if old_venv:
os.environ["VIRTUAL_ENV"] = old_venv
@hookimpl
def tox_testenv_create(venv, action):
_init_pipenv_environ()
config_interpreter = venv.getsupportedinterpreter()
args = [sys.executable, "-m", "pipenv"]
if venv.envconfig.sitepackages:
args.append("--site-packages")
args.extend(["--python", str(config_interpreter)])
if hasattr(venv.envconfig, 'make_emptydir'):
venv.envconfig.make_emptydir(venv.path)
else:
# tox 3.8.0 removed make_emptydir, See tox #1219
cleanup_for_venv(venv)
basepath = venv.path.dirpath()
basepath.ensure(dir=1)
pipfile_path = _clone_pipfile(venv)
with wrap_pipenv_environment(venv, pipfile_path):
venv._pcall(args, venv=False, action=action, cwd=basepath)
# Return non-None to indicate the plugin has completed
return True
@hookimpl
def tox_testenv_install_deps(venv, action):
_init_pipenv_environ()
# TODO: If skip_install set, check existence of venv Pipfile
try:
deps = venv._getresolvedeps()
except AttributeError:
# _getresolvedeps was deprecated on tox 3.7.0 in favor of get_resolved_dependencies
deps = venv.get_resolved_dependencies()
basepath = venv.path.dirpath()
basepath.ensure(dir=1)
pipfile_path = _clone_pipfile(venv)
args = [sys.executable, "-m", "pipenv", "install", "--dev"]
if venv.envconfig.pip_pre:
args.append('--pre')
with wrap_pipenv_environment(venv, pipfile_path):
if deps:
action.setactivity("installdeps", "%s" % ",".join(list(map(str, deps))))
args += list(map(str, deps))
else:
action.setactivity("installdeps", "[]")
venv._pcall(args, venv=False, action=action, cwd=basepath)
# Return non-None to indicate the plugin has completed
return True
@hookimpl
def tox_runtest(venv, redirect):
_init_pipenv_environ()
pipfile_path = _clone_pipfile(venv)
action = venv.new_action("runtests")
with wrap_pipenv_environment(venv, pipfile_path):
action.setactivity(
"runtests", "PYTHONHASHSEED=%r" % os.environ.get("PYTHONHASHSEED")
)
for i, argv in enumerate(venv.envconfig.commands):
# have to make strings as _pcall changes argv[0] to a local()
# happens if the same environment is invoked twice
cwd = venv.envconfig.changedir
message = "commands[%s] | %s" % (i, " ".join([str(x) for x in argv]))
action.setactivity("runtests", message)
# check to see if we need to ignore the return code
# if so, we need to alter the command line arguments
if argv[0].startswith("-"):
ignore_ret = True
if argv[0] == "-":
del argv[0]
else:
argv[0] = argv[0].lstrip("-")
else:
ignore_ret = False
args = [sys.executable, "-m", "pipenv", "run"] + argv
try:
venv._pcall(
args,
venv=False,
cwd=cwd,
action=action,
redirect=redirect,
ignore_ret=ignore_ret
)
except tox.exception.InvocationError as err:
if venv.envconfig.ignore_outcome:
reporter.warning(
"command failed but result from testenv is ignored\n"
" cmd: %s" % (str(err),)
)
venv.status = "ignored failed command"
continue # keep processing commands
reporter.error(str(err))
venv.status = "commands failed"
if not venv.envconfig.ignore_errors:
break # Don't process remaining commands
except KeyboardInterrupt:
venv.status = "keyboardinterrupt"
reporter.error(venv.status)
raise
return True
@hookimpl
def tox_runenvreport(venv, action):
_init_pipenv_environ()
pipfile_path = _clone_pipfile(venv)
basepath = venv.path.dirpath()
basepath.ensure(dir=1)
with wrap_pipenv_environment(venv, pipfile_path):
action.setactivity("runenvreport", "")
# call pipenv graph
args = [sys.executable, "-m", "pipenv", "graph"]
output = venv._pcall(args, venv=False, action=action, cwd=basepath)
output = output.split("\n")
return output
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.