text stringlengths 38 1.54M |
|---|
T=int(input())
for i in range(1, T+1):
num=str(input())
numbers=[0,0,0,0,0,0,0,0,0,0]
for j in range(0, len(num)):
if num[j] == 'Z':
numbers[0] = numbers[0]+1
if num[j] == 'O':
numbers[1] = numbers[1]+1
if num[j] == 'W':
numbers[2] = numbers[2]+1
if num[j] == 'R':
numbers[3] = numbers[3]+1
if num[j] == 'U':
numbers[4] = numbers[4]+1
if num[j] == 'F':
numbers[5] = numbers[5]+1
if num[j] == 'X':
numbers[6] = numbers[6]+1
if num[j] == 'S':
numbers[7] = numbers[7]+1
if num[j] == 'G':
numbers[8] = numbers[8]+1
if num[j] == 'I':
numbers[9] = numbers[9]+1
if numbers[1] > (numbers[0]+numbers[2]+numbers[4]):
numbers[1] = numbers[1] - (numbers[0]+numbers[2]+numbers[4])
else:
numbers[1] = 0
if numbers[3] > (numbers[0]+numbers[4]):
numbers[3] = numbers[3] - (numbers[0]+numbers[4])
else:
numbers[3] = 0
if numbers[5] > (numbers[4]):
numbers[5] = numbers[5] - (numbers[4])
else:
numbers[5] = 0
if numbers[7] > (numbers[6]):
numbers[7] = numbers[7] - (numbers[6])
else:
numbers[7] = 0
if numbers[9] > (numbers[8]+numbers[6]+numbers[5]):
numbers[9] = numbers[9] - (numbers[8]+numbers[6]+numbers[5])
else:
numbers[9] = 0
print("Case #{}: ".format(i),end='')
for j in range(0,10):
for k in range(0,numbers[j]):
print("{}".format(j),end='')
print("") |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# Author:Alex Li
# Python中无块级作用域
# if 1 == 1:
# name = 'alex'
# for i in range(10):
# name = i
# print(name)
# Python中以函数为作用域
# def func():
# name = 'alex'
#
# print(name)
# def func():
# name = 'alex'
# func()
# print(name)
# name = 'alex'
#
# def f1():
# name = 'eirc'
# print(name)
# f1()
# Python作用域链,由内向外找,直到找不到报错
# name = 'alex'
# def f1():
# # name = 'a'
# def f2():
# # name = 'b'
# print(name)
# f2()
# f1()
# python的作用域在执行之前已经确定
"""
name = "alex"
def f1():
print(name)
def f2():
name = 'eric'
f1()
# f2()
name = 'alex'
def f1():
print(name)
def f2():
name = 'eric'
return f1
ret = f2()
ret()
"""
# li = [x for x in range(10)]
# li = [x+100 for x in range(10) if x > 6]
# print(li)
# def f1():
# return x
# li = [lambda :x for x in range(10)]
# li列表
# li列表中的元素:【函数,函数,函数...】
# 函数在没有执行前,内部代码不执行
# ?li[0],函数
# ?函数()
# 返回值是????
# r = li[0]()
# print(r)
# li = []
#
# for i in range(10):
# def f1(x=i):
# return x
# # li.append(i+1)
# # li是列表,内部元素是相同功能的函数
# # i
# print(li[0]())
# print(li[1]())
# print(li[2]())
|
import matplotlib.pyplot as plt
import pandas as pd
def scatter_array(arr, title=None, xlabel=None, ylabel=None, point_size=5):
"""
Plots scatter of an array.
:param arr: Array with data.
:param title: Title to write.
:param xlabel: X label.
:param ylabel: Y label.
:param point_size: Size of scatter point.
:return:
"""
ser = pd.Series(arr)
plt.scatter(ser.index, ser, s=[point_size for _ in range(len(arr))])
plt.suptitle(title)
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.show()
def plot_array(arr, title=None, xlabel=None, ylabel=None, xticks=None, figsize=None):
"""
Plots array.
:param arr: Array with data.
:param title: Title to write.
:param xlabel: X label.
:param ylabel: Y label.
:param xticks: X ticks.
:param figsize: Figure size in cm.
:return:
"""
pd.Series(arr).plot(grid=True, figsize=figsize)
plt.suptitle(title)
plt.xlabel(xlabel)
plt.ylabel(ylabel)
if xticks:
plt.xticks(range(len(xticks)), xticks)
plt.show()
def hist_array(arr, bins=50, title=None, xlabel=None, ylabel=None, figsize=None):
"""
Plot histogram of an array.
:param arr: Array with data
:param bins: Number of bins.
:param title: Title to write.
:param xlabel: X label.
:param ylabel: Y label.
:param figsize: Figure size in cm.
:return:
"""
pd.Series(arr).hist(bins=bins, grid=True, figsize=figsize)
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.suptitle(title)
plt.show()
|
from south.db import db
from django.db import models
from photos.models import *
class Migration:
def forwards(self, orm):
# Adding model 'Photo'
db.create_table('photos_photo', (
('id', orm['photos.Photo:id']),
('photo', orm['photos.Photo:photo']),
('width', orm['photos.Photo:width']),
('height', orm['photos.Photo:height']),
))
db.send_create_signal('photos', ['Photo'])
def backwards(self, orm):
# Deleting model 'Photo'
db.delete_table('photos_photo')
models = {
'photos.photo': {
'height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['photos']
|
import pygame
import time
pygame.init()
screen = pygame.display.set_mode((900,700))
finished = False # 0 < 10 - > True/ 10<10 ->False
x = 0
y = 350
x2 = 815
y2 = 350
#print pygame.K_SPACE
playerImage = pygame.image.load("images/Player1.png")
playerImage = pygame.transform.scale(playerImage, (105,160))
playerImage = playerImage.convert_alpha()
playerImage2 = pygame.image.load("images/Player1.png")
playerImage2 = pygame.transform.scale(playerImage2, (105,160))
playerImage2 = playerImage2.convert_alpha()
frame = pygame.time.Clock()
while finished == False:
for event in pygame.event.get():
if event.type == pygame.QUIT:
finished = True
pressedKeys = pygame.key.get_pressed()
if pressedKeys[pygame.K_UP] == 1:
y -= 5
if pressedKeys[pygame.K_DOWN] == 1:
y += 5
if pressedKeys[pygame.K_w] == 1:
y2 -= 5
if pressedKeys[pygame.K_s] == 1:
y2 += 5
if pressedKeys[pygame.K_RIGHT] == 1:
x += 5
if pressedKeys[pygame.K_LEFT] == 1:
x -= 5
if pressedKeys[pygame.K_d] == 1:
x2 += 5
if pressedKeys[pygame.K_a] == 1:
x2 -= 5
dividor = pygame.Rect(450, 0, 20, 900)
color = (255,255,255)
black =(0,0,0)
screen.fill(black)
screen.blit(playerImage,(x,y))
screen.blit(playerImage2,(x2, y2))
pygame.draw.rect(screen, color, dividor)
pygame.display.flip()
frame.tick(90) #FPS Limit
if x + 65 >= 450:
x = x - 400
if x2 <= 450:
x2 = x2 + 400
if x <= -35:
x = x + 200
if x2 >= 845:
x2 = x2 - 200
if y >= 590:
y = y - 295
if y2 >= 590:
y2 = y2 - 295
if y <= -45:
y = y + 295
if y2 <= -45:
y2 = y2 + 295 |
t = float(input("tempo gasto: "))
v = float(input("velociade media: "))
d = t * v
q = d / 12
print(d)
print(q) |
import settings
from flask import Flask, redirect, request, render_template
from validators import url as validate_url
app = Flask(__name__)
app.secret_key = settings.FLASK_SECRET_KEY
class endpoints():
HOME = '/'
REDIRECT = '/redirect'
@app.route(endpoints.HOME)
def home():
return render_template('index.html')
@app.route(endpoints.REDIRECT)
def do_redirect():
url = request.args.get('url', '')
if not validate_url(url):
return render_template('invalid-url.html', url=url)
return redirect(url)
if __name__ == '__main__':
app.run(host=settings.HOST, port=settings.PORT, debug=settings.DEBUG)
|
# This class provides methonds for filling in data between known [x,y] points using linear interpolation.
# method interp_x returns an [x,y] point at a desired x which lies between two given [x,y] points
# method interp_nSpaced returns a list of n number of [x,y] points that are evenly spaced between two given [x,y] points
class linear_interpolation:
# takes two points [x,y] and calculates the y value for any given x using linear interpolation, then returns the new [x,y] point
# given points must be in ascending x values
# x value must be between the two given points
def interp_x(self,x_new, point1, point2):
# checks that given points are in ascending order
if point1[0] > point2[0]:
print("coordinates not in ascending order")
# checks that desired x is between the two givne points
if x_new < point1[0] or x_new > point2[0]:
print("target x coordinate outside of scope")
else:
x_left = point1[0]
x_right = point2[0]
y_left = point1[1]
y_right = point2[1]
slope = (y_right - y_left)/(x_right - x_left)
intercept = y_left - slope * x_left
y_new = slope * x_new + intercept # y = mx + b
return [x_new,y_new]
# calculates n-number of evenly spaced points between two given [x,y] points
# number of desired points must be a positive whole number greater than 0
# returns evenly spaced points as a list of [x,y] values
def interp_nSpaced(self, n, point1, point2):
#check that n is whole number greater than 0
if n < 1 or n - int(n) != 0 :
print("n must be a positive interger greater than 0")
else:
# calculates the x-axis spacing of the desired points
spacing = (point2[0] - point1[0]) / ( n + 1 )
# list of new x values to interpolate y values for
x_new = []
#adds the first x value to x_new
x_new.append(point1[0]+spacing)
#adds the rest of x values if n > 1
for i in range(n - 1):
x_new.append(x_new[-1] + spacing)
new_points = []
for i in x_new:
new_points.append(self.interp_x(i,point1,point2))
return new_points
# creates instance of linear_interpolation class and names it data
data = linear_interpolation()
# returns [x,y] values at x = 5 between points [1,1] and [10,10]
print(data.interp_x(5,[1,1],[10,10]))
# returns 8 evenly spaced [x,y] points between [10,10] and [100,100]
print(data.interp_nSpaced(8,[10,10],[100,100]))
|
#
# mpirun -n 8 python 04-string.py
#
import qm3.mol
import qm3.problem
import qm3.elements
import qm3.engines.mol_mech
import qm3.engines.xtb
import qm3.engines.mmres
import qm3.actions.string
import qm3.actions.dynamics
import qm3.utils._mpi
import math
import os
import time
import pickle
# ---------------------------------------------------
# >> problem config
class my_problem( qm3.problem.template ):
def __init__( self, node ):
qm3.problem.template.__init__( self )
self.mol = qm3.mol.molecule( "../04.str_seed.%02d.pdb"%( node ) )
self.mol.boxl = [ 40., 40., 40. ]
self.emm = qm3.engines.mol_mech.simple_force_field( self.mol, 8 )
self.emm.cut_on = 12.0
self.emm.cut_off = 14.0
self.emm.cut_list = 18.0
self.emm.system_read( "../02.mm_data.pk", self.mol )
self.mol.mass = [ qm3.elements.mass[i] for i in self.mol.anum ]
f = open( "../01.sele_QM.pk", "rb" )
sqm = pickle.load( f )
f.close()
f = open( "../02.sele_MM.pk", "rb" )
smm = pickle.load( f )
f.close()
f = open( "../02.sele.pk", "rb" )
self.sele = pickle.load( f )
f.close()
f = open( "../02.fixed.pk", "rb" )
for i in pickle.load( f ):
self.emm.free[i] = False
f.close()
self.emm.qm_atoms( sqm )
self.eqm = qm3.engines.xtb.dl_xtb( self.mol, 0, 0, sqm, smm )
self.cvs = qm3.actions.string.string( node, "../04.string.def", 0.0 )
self.size = len( self.sele ) * 3
self.coor = []
self.mass = []
for i in self.sele:
i3 = i * 3
self.coor += self.mol.coor[i3:i3+3]
self.mass.append( self.mol.mass[i] )
self.log = open( "log", "wt" )
def logging( self, txt ):
self.log.write( txt + "\n" )
self.log.flush()
def update_coor( self ):
for i in range( len( self.sele ) ):
i3 = i * 3
I3 = self.sele[i] * 3
for j in [0, 1, 2]:
self.mol.coor[I3+j] = self.coor[i3+j]
def get_func( self ):
self.update_coor()
self.mol.func = 0.0
self.emm.get_func( self.mol )
self.eqm.get_func( self.mol )
self.dat = []
for itm in self.umb:
self.dat.append( itm.get_func( self.mol ) )
self.func = self.mol.func
def get_grad( self ):
self.update_coor()
self.mol.func = 0.0
self.mol.grad = [ 0.0 for i in range( self.mol.natm * 3 ) ]
self.emm.get_grad( self.mol )
self.eqm.get_grad( self.mol )
self.cvs.get_grad( self.mol )
self.func = self.mol.func
self.grad = []
for i in self.sele:
i3 = i * 3
self.grad += self.mol.grad[i3:i3+3]
ncrd = 2
nwin = 64
node, ncpu = qm3.utils._mpi.init()
size = nwin // ncpu
cwd = os.getcwd()
obj = []
dyn = []
for j in range( size ):
os.mkdir( "%s/scratch_%02d"%( cwd, size*node+j ) )
os.chdir( "%s/scratch_%02d"%( cwd, size*node+j ) )
obj.append( my_problem( size*node+j ) )
qm3.actions.dynamics.assign_velocities( obj[j], temperature = 300., project = True )
dyn.append( qm3.actions.dynamics.langevin_verlet( obj[j], step_size = 0.001,
temperature = 300., gamma_factor = 100., print_frequency = 10,
project = True, log_function = obj[j].logging ) )
def take_step( distrib = True ):
for j in range( size ):
os.chdir( "%s/scratch_%02d"%( cwd, size*node+j ) )
dyn[j].integrate()
qm3.utils._mpi.barrier()
if( node == 0 ):
ncrd2 = ncrd * ncrd
tmp_c = []
tmp_m = []
for j in range( size ):
tmp_c += obj[j].cvs.rcrd[:]
tmp_m += obj[j].cvs.cmet[:]
for i in range( 1, ncpu ):
for j in range( size ):
tmp_c += qm3.utils._mpi.recv_r8( i, ncrd )
tmp_m += qm3.utils._mpi.recv_r8( i, ncrd2 )
if( distrib ):
tmp_c = qm3.actions.string.string_distribute( ncrd, nwin, tmp_c, tmp_m )[0]
for j in range( size ):
obj[j].cvs.rcrd = tmp_c[j*ncrd:(j+1)*ncrd][:]
for i in range( 1, ncpu ):
for j in range( size ):
qm3.utils._mpi.send_r8( i, tmp_c[(size*i+j)*ncrd:(size*i+j+1)*ncrd] )
obj[0].cvs.fstr.write( "".join( [ "%20.10lf"%( tmp_c[j] ) for j in range( ncrd * nwin ) ] ) + "\n" )
obj[0].cvs.fstr.flush()
tmp_a = []
tmp_b = []
for i in range( nwin ):
tmp_i = qm3.maths.matrix.inverse( [ tmp_m[i*ncrd2+j] for j in range( ncrd2 ) ], ncrd, ncrd )
tmp_a += [ tmp_c[i*ncrd+j] - obj[0].cvs.icrd[i*ncrd+j] for j in range( ncrd ) ]
tmp_b += qm3.maths.matrix.mult( tmp_i, ncrd, ncrd, tmp_a[i*ncrd:(i+1)*ncrd], ncrd, 1 )
obj[0].cvs.fcnv.write( "%20.10lf\n"%( math.sqrt( sum( [ tmp_a[i] * tmp_b[i]
for i in range( ncrd * nwin ) ] ) / float( nwin ) ) ) )
obj[0].cvs.fcnv.flush()
else:
for j in range( size ):
qm3.utils._mpi.send_r8( 0, obj[j].cvs.rcrd )
qm3.utils._mpi.send_r8( 0, obj[j].cvs.cmet )
for j in range( size ):
obj[j].cvs.rcrd = qm3.utils._mpi.recv_r8( 0, ncrd )
for _ in range( 500 ):
take_step( False )
for j in range( size ):
if( size*node+j > 0 and size*node+j < ncpu - 1 ):
obj[j].cvs.tstp = 0.001
for _ in range( 1000 ):
take_step()
for j in range( size ):
dyn[j].stats()
os.chdir( "%s/scratch_%02d"%( cwd, size*node+j ) )
obj[j].mol.pdb_write( "../04.string.%02d.pdb"%( size*node+j ) )
obj[j].cvs.stop()
qm3.utils._mpi.barrier()
qm3.utils._mpi.stop()
|
import os
from collections import OrderedDict
from typing import Dict, List, Optional
import ConfigSpace.hyperparameters as CSH
from ConfigSpace.configuration_space import ConfigurationSpace
from autoPyTorch.datasets.base_dataset import BaseDatasetPropertiesType
from autoPyTorch.pipeline.components.base_choice import autoPyTorchChoice
from autoPyTorch.pipeline.components.base_component import (
ThirdPartyComponents,
autoPyTorchComponent,
find_components,
)
from autoPyTorch.pipeline.components.preprocessing.image_preprocessing.normalise.base_normalizer import BaseNormalizer
normalise_directory = os.path.split(__file__)[0]
_normalizers = find_components(__package__,
normalise_directory,
BaseNormalizer)
_addons = ThirdPartyComponents(BaseNormalizer)
def add_normalizer(normalizer: BaseNormalizer) -> None:
_addons.add_component(normalizer)
class NormalizerChoice(autoPyTorchChoice):
"""
Allows for dynamically choosing normalizer component at runtime
"""
def get_components(self) -> Dict[str, autoPyTorchComponent]:
"""Returns the available normalizer components
Args:
None
Returns:
Dict[str, autoPyTorchComponent]: all BaseNormalizer components available
as choices for encoding the categorical columns
"""
components = OrderedDict()
components.update(_normalizers)
components.update(_addons.components)
return components
def get_hyperparameter_search_space(self,
dataset_properties: Optional[Dict[str, BaseDatasetPropertiesType]] = None,
default: Optional[str] = None,
include: Optional[List[str]] = None,
exclude: Optional[List[str]] = None) -> ConfigurationSpace:
cs = ConfigurationSpace()
if dataset_properties is None:
dataset_properties = dict()
dataset_properties = {**self.dataset_properties, **dataset_properties}
available_preprocessors = self.get_available_components(dataset_properties=dataset_properties,
include=include,
exclude=exclude)
if len(available_preprocessors) == 0:
raise ValueError("no image normalizers found, please add an image normalizer")
if default is None:
defaults = ['ImageNormalizer', 'NoNormalizer']
for default_ in defaults:
if default_ in available_preprocessors:
if include is not None and default_ not in include:
continue
if exclude is not None and default_ in exclude:
continue
default = default_
break
updates = self._get_search_space_updates()
if '__choice__' in updates.keys():
choice_hyperparameter = updates['__choice__']
if not set(choice_hyperparameter.value_range).issubset(available_preprocessors):
raise ValueError("Expected given update for {} to have "
"choices in {} got {}".format(self.__class__.__name__,
available_preprocessors,
choice_hyperparameter.value_range))
preprocessor = CSH.CategoricalHyperparameter('__choice__',
choice_hyperparameter.value_range,
default_value=choice_hyperparameter.default_value)
else:
preprocessor = CSH.CategoricalHyperparameter('__choice__',
list(available_preprocessors.keys()),
default_value=default)
cs.add_hyperparameter(preprocessor)
# add only child hyperparameters of preprocessor choices
for name in preprocessor.choices:
preprocessor_configuration_space = available_preprocessors[name].\
get_hyperparameter_search_space(dataset_properties)
parent_hyperparameter = {'parent': preprocessor, 'value': name}
cs.add_configuration_space(name, preprocessor_configuration_space,
parent_hyperparameter=parent_hyperparameter)
self.configuration_space = cs
self.dataset_properties = dataset_properties
return cs
|
# Uses python3
# def calc_fib(n): ## This uses recursion
# if (n <= 1):
# return n
# return calc_fib(n - 1) + calc_fib(n - 2)
def calc_fib(n):
x = [0,1]
for i in range(2, n+1):
x.append(x[-1]+x[-2])
if n==0:
return 0
return x[-1]
n = int(input())
print(calc_fib(n))
|
#! /usr/bin/python
T = int(raw_input())
def iterateOneStep(probs, p):
probs = [0.0] + probs + [0.0]
return [(1.0 - p)*probs[i] + p*probs[i - 1] for i in range(1, len(probs))]
for t in range(1, T+1):
N, K = [int(inp) for inp in raw_input().split()]
P = sorted([float(inp) for inp in raw_input().split()])
maxP = 0.0
for k in range(K + 1):
distrib = [1.0]
for i in range(k):
distrib = iterateOneStep(distrib, P[i])
for j in range(K - k):
distrib = iterateOneStep(distrib, P[N - 1 - j])
if maxP < distrib[K/2]:
maxP = distrib[K/2]
print 'Case #' + str(t) + ': ' + str(maxP) |
from typing import List
class Solution:
def hanota(self, A: List[int], B: List[int], C: List[int]) -> None:
"""
Do not return anything, modify C in-place instead.
"""
def dfs(n, A, B, C):
if n == 0:
return
if n == 1:
C.append(A.pop())
return
dfs(n - 1, A, C, B)
C.append(A.pop())
dfs(n - 1, B, A, C)
dfs(len(A), A, B, C)
A = [2, 1, 0]
B = []
C = []
s = Solution()
s.hanota(A, B, C)
print(A, B, C)
|
from ps2 import *
robot = Robot(RectangularRoom(5,8), 1.0)
print robot.getRobotPosition()
for l in range(10):
x = random.randint(0,5)
print "x is: " + str(x)
y = random.randint(0,8)
print "y is: " + str(y)
if robot.room.isPositionInRoom(Position(x,y)):
robot.setRobotPosition(Position(x,y))
print robot.getRobotPosition()
else:
print "Robot not updated"
|
"""
Author: Rawley Collins
Program: Program.py
main gui file
"""
from tkinter import *
import tkinter.messagebox
import db_connection as db
"""
3 items: a button for each to add to cart, a button to remove from cart, a counter for how many you want. all disabled until sign in
a sign up button, and a login button
a user name box, a password box. Enabled when the login button is hit
a
"""
class dbmsGUI:
def __init__(self, window):
self.conn = db.create_connection('customerDBMS.db')
window.title("Local Store")
window.geometry("800x600")
#----
self.startProgram = Button(text="Start Program", width=16, height=1, fg="white", bg="green", command=lambda: self.create_db_table())
self.startProgram.grid()
# data entry widgets
self.userNameLabel = Label(app_window, text="Username:")
self.passwordLabel = Label(app_window, text="Password:")
self.userNameEntry = Entry(app_window)
self.passwordEntry = Entry(app_window)
# data entry placement
self.userNameLabel.grid(row=1, sticky=E)
self.passwordLabel.grid(row=2, sticky=E)
self.userNameEntry.grid(row=1, column=1)
self.passwordEntry.grid(row=2, column=1)
# log in buttons
self.createAccount = Button(text="Create account", width=16, height=1, fg="black")
self.createAccount.grid(row=3, column=0)
self.logIn = Button(text="log in", fg="white", width=16, height=1, bg="blue")
self.logIn.grid(row=3, column=1)
self.logOut = Button(text="log out", fg="black", width=16, height=1, bg="light gray")
self.logOut.grid(columnspan=2, sticky=N)
# create user buttons
def create_db_table(self):
db.create_tables(self.conn)
self.startProgram.configure(state=tkinter.DISABLED)
def create_customer(self):
# enable input buttons
pass
def log_in(self):
app_window = tkinter.Tk()
store_app = dbmsGUI(app_window)
app_window.mainloop()
|
# Generated by Django 3.2.4 on 2021-07-14 14:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('My_Books', '0002_auto_20210713_1531'),
]
operations = [
migrations.AddField(
model_name='form',
name='date_time',
field=models.DateTimeField(blank=True, null=True),
),
]
|
import os
import sys
import pytest
import allure
from allure_commons.types import AttachmentType
from random import randint
from selenium.webdriver import Chrome
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
print(os.getcwd())
from pages.Page import *
@pytest.fixture(scope='session')
def driver():
driver = Chrome(executable_path='../drivers/chromedriver.exe')
driver.maximize_window()
driver.implicitly_wait(20)
driver.get('https://sleepy-brook-65250.herokuapp.com/')
yield driver
driver.close()
@allure.story('Registration')
@pytest.mark.parametrize('fname,lname,email,mobile,pwd,attachment',
[('John', 'Sam', f'john{randint(1, 1000)}@xyz.com', '999999999', 'P@ssword',
'C:/Angappan/Automation/Practice/FoodToDoor/data/data.txt')])
def test_signup(driver, fname, lname, email, mobile, pwd, attachment):
allure.dynamic.description('Validate Registration')
home_page = HomePage(driver)
home_page.click_register()
register_page = RegisterPage(driver)
register_page.fill_info(fname, lname, email, mobile, pwd, attachment)
register_page.accept_terms()
assert register_page.get_confirmation_msg() == 'You are registered please Login!!'
|
from django.contrib.auth import authenticate ,login as django_login
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect, Http404
from django.shortcuts import render
# Create your views here.
from core.views import prepare_parameters
from dealer.forms.dealer_form import DealerForm
from restaurant.models import Item
@login_required
def current(request,item_id):
item_parameters = {}
try:
item = Item.objects.get(pk=item_id)
item_parameters = {'item': {
'id': item.id,
'name': item.name,
'description': item.description,
'price': item.price,
'restaurant_lat': item.restaurant.latitude,
'restaurant_log': item.restaurant.longitude,
}}
except Item.DoesNotExist:
raise Http404()
params = prepare_parameters(request)
params.update(item_parameters)
"""
dealers = Dealers.objects.filter()
dealers_serializer = DealerSerializer(restaurants, many=True)
params.update({
'dealers': dealers_serializer.data,
})
"""
return render(request, 'current_order.html', params)
@login_required
def dealer_h(request):
return render(request, 'dealer_home.html', prepare_parameters(request))
def dealer_sign_in_view(request):
dealer_form = DealerForm(request.POST or None)
if request.method == 'POST':
if dealer_form.is_valid():
dealer = dealer_form.save()
user = authenticate(request, username=dealer.user.username, password=dealer_form.cleaned_data['password'])
if user is not None:
django_login(request, user)
return HttpResponseRedirect('/home/')
else:
return render(request, 'login.html', {'error': True})
else:
# TODO: Error
pass
else:
return render(request, 'reg_client.html', {'form': dealer_form}) |
import os
import csv
import requests
from bs4 import BeautifulSoup
os.system("clear")
def get_each_brand(each_brand_url, max_page):
alba_list = []
for page in range(max_page):
brand_url = requests.get(f"{each_brand_url}?page={page+1}")
brand_soup = BeautifulSoup(brand_url.text, 'html.parser')
table = brand_soup.select_one('#NormalInfo > table')
tbody = table.find("tbody")
tr=tbody.find_all("tr", {"class":""})
for i in range(len(tr)):
each_info = []
try:
loc = tr[i].find("td", {"class":"local first"})
loc = loc.get_text()
loc = loc.replace("\xa0","")
except AttributeError:
loc = 'Empty'
each_info.append(loc)
try:
alba_title_td = tr[i].find("td", {"class":"title"})
alba_title_a = alba_title_td.find("a")
alba_title = alba_title_a.find("span", {"class":"company"})
alba_title = str(alba_title.string)
except AttributeError:
alba_title = 'Empty'
try:
worktime_td = tr[i].find("td", {"class":"data"})
worktime = worktime_td.find("span")
worktime = str(worktime.string)
except AttributeError:
worktime = 'Empty'
try:
pay_td = tr[i].find("td", {"class":"pay"})
pay_period = pay_td.find("span", {"class":"payIcon"})
pay = pay_td.find("span", {"class":"number"})
pay_period = str(pay_period.string)
pay = str(pay.string)
pay = pay_period + ' ' + pay
except AttributeError:
pay = 'Empty'
try:
reg_time = tr[i].find("td", {"class":"regDate"})
reg_time = reg_time.get_text()
except AttributeError:
reg_time = 'Empty'
each_info=[loc, alba_title, worktime, pay, reg_time]
alba_list.append(each_info)
return alba_list
|
##############################################################################
#
# Copyright (c) 2008 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""
$Id$
"""
import os.path
from zope import interface, component
from zope.app.file.image import getImageInfo
from zope.contenttype import guess_content_type
from zope.filerepresentation.interfaces import IFileFactory
from zojax.content.type.item import Item
from zojax.filefield.data import DownloadResultFly, File as OrigFile
from interfaces import ILocalFsFolder, ILocalFsFile
class FileMixin(object):
def show(self, *kv, **kw):
res = self._show(*kv, **kw)
if res != '':
return DownloadResultFly(self)
return res
class File(FileMixin, OrigFile):
pass
class LocalFsFile(Item):
interface.implements(ILocalFsFile)
data = None
def __init__(self, name, abspath, content_type, **kw):
self.__name__ = name
self.abspath = abspath
self.contentType = content_type
super(LocalFsFile, self).__init__(**kw)
@property
def data(self):
try:
f = File()
f.data = open(self.abspath).read()
f.mimeType = self.contentType
f.filename = self.__name__
f.size = self.size
#f.modified =
return f
except (IOError, OSError), e:
return ''
@property
def size(self):
try:
return os.path.getsize(self.abspath)
except (IOError, OSError), e:
return 0
@property
def title(self):
return self.__name__
@property
def disposition(self):
# return self.data.previewIsAvailable and 'inline' or 'attachment'
# canDownload = contentDisposition='attachment'
# canPreview = contentDisposition='inline'
return 'attachment'
@property
def canDownload(self):
return True
@property
def canPreview(self):
return False
class FileFactory(object):
component.adapts(ILocalFsFolder)
interface.implements(IFileFactory)
def __init__(self, context):
self.context = context
def __call__(self, name, content_type, data):
if not content_type and data:
content_type, width, height = getImageInfo(data)
if not content_type:
content_type, encoding = guess_content_type(name, '', '')
res = LocalFsFile(
name, os.path.join(self.context.abspath, name), content_type)
res.__parent__ = self.context
return res
|
import multiprocessing
import multiprocessing.managers
import requests
import re
import lxml
import lxml.etree
import time
def pagexpath(url):
headers = {"User-Agent": "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0);"}
response = requests.get(url,headers=headers).content.decode("gbk")
# print(response)
mytree = lxml.etree.HTML(response)
joblist = mytree.xpath("//*[@id=\"resultList\"]//div[@class=\"el\"]")
datalist = []
for line in joblist:
mystr = ""
job = line.xpath("./p/span/a/text()")[0].strip()
company = line.xpath("./span[1]/a/text()")[0].strip()
addr = line.xpath("./span[2]/text()")[0].strip()
money = line.xpath("./span[3]/text()")
if len(money) == 0:
money = ""
else:
money = money[0].strip()
datetime = line.xpath("./span[4]/text()")[0].strip()
mystr += job
mystr += " # "
mystr += company
mystr += " # "
mystr += addr
mystr += " # "
mystr += money
mystr += " # "
mystr += datetime
mystr += "\r\n"
datalist.append(mystr)
# print datalist
return datalist
class QueueManger(multiprocessing.managers.BaseManager):#继承,进程管理共享数据
pass
if __name__=="__main__":
QueueManger.register("get_task") # 注册函数调用服务器
QueueManger.register("get_result")
manger=QueueManger(address=("127.0.0.1",8888),authkey=b"100000")
manger.connect() #链接服务器
task= manger.get_task()
result =manger.get_result() # 任务,结果
for i in range(1000):
time.sleep(1)
try:
url=task.get()
print ("client get",url)
datalist= pagexpath(url)
for line in datalist: #结果队列
print (line)
result.put(line)
except:
print ("error")
pass
|
import FWCore.ParameterSet.Config as cms
process = cms.Process("READ")
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring("file:ref_merge_prod1.root",
"file:ref_merge_prod2.root")
)
testProcess = cms.Process("TEST")
process.subProcess = cms.SubProcess(testProcess)
testProcess.tester = cms.EDAnalyzer("OtherThingAnalyzer",
other = cms.untracked.InputTag("d","testUserTag"))
testProcess.out = cms.OutputModule("PoolOutputModule",
fileName = cms.untracked.string('refInSubProcess.root')
)
testProcess.e = cms.EndPath(testProcess.tester*testProcess.out)
|
def biggieSize(aList):
for i in range(len(aList)):
if(aList[i] > 0):
aList[i] = "big"
return aList
print(f"#1: {biggieSize([-1, 3, 5, -5])}")
def countPositives(aList):
sum = 0
for i in range(len(aList)):
if(aList[i] > 0):
sum += 1
aList[len(aList)-1] = sum
return aList
print(f"#2: {countPositives([-1,1,1,1])}")
print(f"#2: {countPositives([1,6,-4,-2,-7,-2])}")
def sumTotal(aList):
sum = 0
for i in range(len(aList)):
sum += aList[i]
return sum
print(f"#3: {sumTotal([1,2,3,4])}")
print(f"#3: {sumTotal([6,3,-2])}")
def average(aList):
sum = 0
for i in range(len(aList)):
sum += aList[i]
return sum/len(aList)
print(f"#4: {average([1,2,3,4])}")
def length(aList):
return len(aList)
print(f"#5: {length([37,2,1,-9])}")
print(f"#5: {length([])}")
def minimum(aList):
if(len(aList) == 0):
return False
else:
min = aList[0]
for i in range(len(aList)-1):
if(min > aList[i+1]):
min = aList[i+1]
return min
print(f"#6: {minimum([37,2,1,-9])}")
print(f"#6: {minimum([])}")
def maximum(aList):
if(len(aList) == 0):
return False
else:
max = aList[0]
for i in range(len(aList)-1):
if(max < aList[i+1]):
max = aList[i+1]
return max
print(f"#7: {maximum([37,2,1,-9])}")
print(f"#7: {maximum([])}")
def ultimateAnalysis(aList):
min = aList[0]
max = aList[0]
sum = aList[0]
for i in range(len(aList)-1):
if(min > aList[i+1]):
min = aList[i+1]
if(max < aList[i+1]):
max = aList[i+1]
sum += aList[i+1]
return {'sumTotal': sum, 'average': sum/len(aList), 'minimum': min, 'maximum': max, 'length': len(aList)}
print(f"#9: {ultimateAnalysis([37,2,1,-9])}")
def reverseList(aList):
length = len(aList) - 1
for i in range(length):
aList[i], aList[length] = aList[length], aList[i]
if(i >= length):
break
length -= 1
return aList
print(f"#9: {reverseList([37,2,1,-9,4,2,7,4,6,8,4])}")
|
import re
import os
import errno
def is_valid_ipv4(ip_string):
if not re.search(r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$', ip_string):
return False
for num in ip_string.split('.'):
if len(num) > 1 and num.startswith('0'):
return False
elif 0 <= int(num) <= 255:
continue
else:
return False
return True
class FileSaver(object):
base_dir = os.path.join(os.getcwd(), 'attachments')
def __init__(self):
self.resources_path = []
def download(self, filename, foldername, content, overwrite=True):
folderpath = os.path.join(self.base_dir, foldername)
filepath = os.path.join(folderpath, filename)
if os.path.exists(filepath) and not overwrite:
return
try:
os.makedirs(folderpath)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
with open(filepath, 'wb') as fp:
fp.write(content)
self.resources_path.append(filepath)
if __name__ == '__main__':
pass
|
from django.http import HttpResponse
from poll.models import Poll, Item, Vote, Choice
from django.db import transaction
from django.utils import simplejson
from utils import set_cookie
def authpass(user, queue):
# if queue != None:
# if queue.auth:
# if not user.is_authenticated():
# return False
return not(queue and queue.auth and not user.is_authenticated())
#TODO: Need to optimize
@transaction.commit_on_success
def poll_ajax_vote(request, poll_pk):
if request.is_ajax():
try:
poll = Poll.objects.get(pk=poll_pk)
if poll.queue:
if not authpass(request.user, poll.queue):
return HttpResponse('Non-authenticated users can\'t vote',
status=400)
chosen_items = simplejson.loads(request.POST['chosen_items'])
except:
return HttpResponse('Wrong parameters', status=400)
if request.user.is_authenticated():
user = request.user
else:
user = None
vote = Vote.objects.create(poll=poll,
ip=request.META['REMOTE_ADDR'],
user=user)
try:
for item_pk, value in chosen_items.items():
item = Item.objects.get(pk=item_pk)
if item.userbox:
Choice.objects.create(vote=vote, item=item,
uservalue=value)
else:
Choice.objects.create(vote=vote, item=item)
except:
return HttpResponse('Data recognition failed', status=400)
response = HttpResponse(status=200)
set_cookie(response, poll.get_cookie_name(), poll_pk)
return response
return HttpResponse(status=400)
def poll_ajax_result(request, poll_pk):
if request.is_ajax():
try:
poll = Poll.objects.get(pk=poll_pk)
except:
return HttpResponse('Wrong parameters', status=400)
#Send data for results
data = {}
for item in Item.objects.filter(poll=poll):
subdata = {
'index': item.index,
'title': item.value,
'count': Choice.objects.filter(item=item).count(),
}
data[item.pk] = subdata
data['total'] = Vote.objects.filter(poll=poll).count()
return HttpResponse(simplejson.dumps(data))
return HttpResponse(status=400)
|
#!/usr/bin/env python
#
# file: setup.py
# author: Cyrus Harrison <cyrush@llnl.gov>
#
# distutils gen + setup script for the 'visit_testing' module.
#
import sys
import os
import shutil
from os.path import join as pjoin
from distutils.core import setup
#
# Support run w/ visit's cli.
#
using_visit = False
try:
# the command line string passed to cli
# will confuse distutils, so modify
# sys.argv to only have args passed after
# '-s setup.py'
args = Argv()
sys.argv = [__file__]
sys.argv.extend(args)
using_visit = True
except:
pass
# Note ideal, but we auto gen a src dir to make
# a sane package from the script layout in the visit repo to use
#with distutils
def generate():
build_base = "py_src"
# special case for VisIt build:
# find if --build-base was passed by VisIt's CMake, if so
# use it to generate the source dir
#for arg in sys.argv:
# if arg.startswith("--build-base"):
# build_base = arg.split("=")[1]
# build_base = os.path.abspath(pjoin(build_base,"..","py_src"))
if os.path.isdir(build_base):
shutil.rmtree(build_base)
os.mkdir(build_base)
if using_visit:
src_path = __visit_script_file__
else:
src_path = __file__
src_dir = os.path.split(os.path.abspath(src_path))[0]
print("[generating module source at: %s]" % build_base)
module_items = ["HtmlDiff.py",
"HtmlPython.py",
"visit_test_common.py",
"visit_test_main.py",
"visit_test_reports.py",
"visit_test_suite.py",
"visit_test_ctest.py",
"nobaseline.pnm",
"notext.txt",
"report_templates"]
for itm in module_items:
itm_path = pjoin(src_dir,itm)
print("[copying %s]" % itm_path)
if os.path.isfile(itm_path):
shutil.copy(itm_path,build_base)
else:
des_dir = pjoin(build_base,itm)
if os.path.isdir(des_dir):
shutil.rmtree(des_dir)
shutil.copytree(itm_path,des_dir)
init_src ="\n".join(["#",
"# file: __init__.py",
"# description: module init for `visit_testing'",
"from visit_test_suite import run_visit_test, run_visit_tests",
""])
open(pjoin(build_base,"__init__.py"),"w").write(init_src)
generate()
setup(name='visit_testing',
version='0.1',
author = 'VisIt Team',
description='VisIt Testing Module',
package_dir = {'visit_testing':'py_src'},
packages=['visit_testing'],
package_data={'visit_testing': ["nobaseline.pnm",
"notext.txt",
pjoin("report_templates","css","*"),
pjoin("report_templates","js","*"),
pjoin("report_templates","index.html")]})
if using_visit:
sys.exit(0) |
from app_voetbalelo.uefa_euro2016.expand_elo import expand_elo
from app_voetbalelo.uefa_euro2016.get_games_data import get_games_data
from app_voetbalelo.uefa_euro2016.game_to_team import make_standing, rank
from app_voetbalelo.uefa_euro2016.montecarlo import montecarlo
from app_voetbalelo.uefa_euro2016.milp_montecarlo import milp_montecarlo
import copy
import datetime
import json
import pickle
import ftplib
import time
import numpy as np
import pandas as pd
import scipy.stats # For Poisson Distribution, numpy doesn't have it4
# Get Input data
games = get_games_data()
simulations = 20000
print("########################################")
print("Uefa Euro 2016 Algorithm")
print("########################################")
# Initialize output
team_data = dict()
group_data = dict()
# Expand games data with elo data
print("Get Elo Data from Sway's Country Elo Algorithm")
output = expand_elo(games)
games = output[0]
elo = output[1]
# Generate standing and rank it correctly according to the UEFA tiebreaking procedure
print("Generate Group Standing and Rank according to Uefa rules")
standing = make_standing(games)
standing = rank(standing, games)
# Montecarlo
print("Montecarlo Algorithm")
output_1 = montecarlo(games,elo,simulations)
game_data = output_1[1]
# Make Integers of output
groups = dict()
group_list = list(set(games.Group))
for group in group_list:
if "Group" in group:
groups[group] = list(set(games[games.Group == group].HomeTeam))
output_2 = milp_montecarlo(output_1[0],output_1[2],groups)
output_int = output_2[0]
knockout_odds = output_2[1]
# Write json teams
initial_ranking = pd.read_csv("app_voetbalelo/uefa_euro2016/data/initial_ranking.csv")
colors = pd.read_csv("app_voetbalelo/uefa_euro2016/data/teams.csv")
for team in output_int.keys():
team_data[team] = dict()
team_data[team]["knockout_odds"] = knockout_odds[team]
team_data[team]["odds"] = output_int[team]
team_data[team]["elo"] = int(list(set(list(set(games[games.HomeTeam == team].HomeElo)) + list(set(games[games.AwayTeam == team].AwayElo))))[0])
team_data[team]["group"] = list(set(list(set(games[games.HomeTeam == team].Group)) + list(set(games[games.AwayTeam == team].Group))))[0]
team_data[team]["color"] = colors[colors.country == team].color.iloc[0]
team_data[team]["info"] = [ ["Elo Rating",elo[team]["elo"]], \
["Head Coach",colors[colors.country == team].manager.iloc[0]], \
# ["Captain",colors[colors.country == team].captain.iloc[0]],\
# ["Top Scorer",colors[colors.country == team].topscorer.iloc[0]]]
["Appearances",colors[colors.country == team].euro_appearances.iloc[0].astype(str)],\
["Titles",colors[colors.country == team].euro_titles.iloc[0].astype(str)]]
team_data[team]["standing"] = dict()
for standing_key in standing.keys():
if standing_key != "Country":
if standing_key == "Group":
team_data[team]["standing"]["G"] = standing[standing.Country == team][standing_key].iloc[0]
# # THIS PART ONLY UNTIL TOURNAMENT STARTS
# elif standing_key == "R":
# for position in ["1","2","3","4"]:
# if initial_ranking[(initial_ranking.group == standing[standing.Country == team][standing_key].iloc[0])][position].iloc[0] == team:
# team_data[team]["standing"]["GP"] = position
# break
else:
team_data[team]["standing"][standing_key] = str(standing[standing.Country == team][standing_key].iloc[0])
team_data[team]["games"] = list()
for i in range(len(games)):
if "Group" in games.Group.iloc[i]:
if team in games.HomeTeam.iloc[i]:
team_data[team]["games"].append(dict())
team_data[team]["games"][-1]["HomeTeam"] = team
team_data[team]["games"][-1]["AwayTeam"] = games.AwayTeam.iloc[i]
team_data[team]["games"][-1]["Group"] = games.AwayTeam.iloc[i]
team_data[team]["games"][-1]["Date"] = games.index[i].value/10**9
team_data[team]["games"][-1]["Location"] = games.Location.iloc[i]
team_data[team]["games"][-1]["HomeElo"] = games.HomeElo.iloc[i]
team_data[team]["games"][-1]["AwayElo"] = games.AwayElo.iloc[i]
team_data[team]["games"][-1]["HomeWin"] = games.HomeWin.iloc[i]
team_data[team]["games"][-1]["AwayWin"] = games.AwayWin.iloc[i]
team_data[team]["games"][-1]["Draw"] = games.Draw.iloc[i]
elif team in games.AwayTeam.iloc[i]:
team_data[team]["games"].append(dict())
team_data[team]["games"][-1]["HomeTeam"] = games.HomeTeam.iloc[i]
team_data[team]["games"][-1]["AwayTeam"] = team
team_data[team]["games"][-1]["Group"] = games.AwayTeam.iloc[i]
team_data[team]["games"][-1]["Date"] = games.index[i].value/10**9
team_data[team]["games"][-1]["Location"] = games.Location.iloc[i]
team_data[team]["games"][-1]["HomeElo"] = games.HomeElo.iloc[i]
team_data[team]["games"][-1]["AwayElo"] = games.AwayElo.iloc[i]
team_data[team]["games"][-1]["HomeWin"] = games.HomeWin.iloc[i]
team_data[team]["games"][-1]["AwayWin"] = games.AwayWin.iloc[i]
team_data[team]["games"][-1]["Draw"] = games.Draw.iloc[i]
# Write json games
games_json = dict()
for i in range(len(games)):
# Only knockout games
if games.Group.iloc[i] == "1/8 Final":
games_json[games.Game.iloc[i]] = dict()
games_json[games.Game.iloc[i]]["HomeTeam"] = games.HomeTeam.iloc[i]
games_json[games.Game.iloc[i]]["AwayTeam"] = games.AwayTeam.iloc[i]
games_json[games.Game.iloc[i]]["Location"] = games.Location.iloc[i]
games_json[games.Game.iloc[i]]["Date"] = games.index[i].value/10**9
elif games.Group.iloc[i] == "Quarter Final":
games_json[games.Game.iloc[i]] = dict()
games_json[games.Game.iloc[i]]["HomeTeam"] = games.HomeTeam.iloc[i]
games_json[games.Game.iloc[i]]["AwayTeam"] = games.AwayTeam.iloc[i]
games_json[games.Game.iloc[i]]["Location"] = games.Location.iloc[i]
games_json[games.Game.iloc[i]]["Date"] = games.index[i].value/10**9
games_json[games.HomeTeam.iloc[i]]["To"] = games.Game.iloc[i] + " HomeTeam"
games_json[games.AwayTeam.iloc[i]]["To"] = games.Game.iloc[i] + " AwayTeam"
elif games.Group.iloc[i] == "Semi Final":
games_json[games.Game.iloc[i]] = dict()
games_json[games.Game.iloc[i]]["HomeTeam"] = games.HomeTeam.iloc[i]
games_json[games.Game.iloc[i]]["AwayTeam"] = games.AwayTeam.iloc[i]
games_json[games.Game.iloc[i]]["Location"] = games.Location.iloc[i]
games_json[games.Game.iloc[i]]["Date"] = games.index[i].value/10**9
games_json[games.HomeTeam.iloc[i]]["To"] = games.Game.iloc[i] + " HomeTeam"
games_json[games.AwayTeam.iloc[i]]["To"] = games.Game.iloc[i] + " AwayTeam"
elif games.Group.iloc[i] == "Final":
games_json[games.Game.iloc[i]] = dict()
games_json[games.Game.iloc[i]]["HomeTeam"] = games.HomeTeam.iloc[i]
games_json[games.Game.iloc[i]]["AwayTeam"] = games.AwayTeam.iloc[i]
games_json[games.Game.iloc[i]]["Location"] = games.Location.iloc[i]
games_json[games.Game.iloc[i]]["Date"] = games.index[i].value/10**9
games_json[games.HomeTeam.iloc[i]]["To"] = games.Game.iloc[i] + " HomeTeam"
games_json[games.AwayTeam.iloc[i]]["To"] = games.Game.iloc[i] + " AwayTeam"
# Topojson France
topo = json.load(open("app_voetbalelo/uefa_euro2016/data/topo_ADM0_FRA.json","r"))
topo_result = copy.deepcopy(topo)
topo_result["objects"]["geo_ADM0_FRA"]["geometries"] = []
for subunit in topo["objects"]["geo_ADM0_FRA"]["geometries"]:
if subunit["properties"]["n"] == "France":
topo_result["objects"]["geo_ADM0_FRA"]["geometries"].append(subunit)
# Cities
cities = pd.read_csv("app_voetbalelo/uefa_euro2016/data/cities.csv", index_col=False)
cities_result = []
for i in range(len(cities)):
cities_result.append( {"City":cities.city.iloc[i], \
"Stadium":cities.stadium.iloc[i], \
"Capacity":cities.capacity.iloc[i].astype(str), \
"Lat":cities.lat.iloc[i].astype(str), \
"Long":cities.long.iloc[i].astype(str)})
json.dump(topo_result,open("app_voetbalelo/uefa_euro2016/result/topo_france.json","w"))
json.dump(cities_result,open("app_voetbalelo/uefa_euro2016/result/cities.json","w"))
json.dump(team_data,open("app_voetbalelo/uefa_euro2016/result/team_data.json","w"))
json.dump(game_data,open("app_voetbalelo/uefa_euro2016/result/game_data.json","w"))
json.dump(games_json,open("app_voetbalelo/uefa_euro2016/result/games.json","w"))
teams = list(team_data.keys())
json.dump(teams,open("app_voetbalelo/uefa_euro2016/result/teams.json","w"))
# Write to FTP site
session = ftplib.FTP('ftp.sway-blog.be','sway-blog.be','Will0870')
session.cwd('/www/data/elo-uefa-euro2016')
# Open data as JSON buffered (only way ftplib works)
data = open("app_voetbalelo/uefa_euro2016/result/game_data.json","rb") # file to send
session.storbinary('STOR game_data.json', data) # send the file
data = open("app_voetbalelo/uefa_euro2016/result/team_data.json","rb") # file to send
session.storbinary('STOR data.json', data) # send the file
data = open("app_voetbalelo/uefa_euro2016/result/teams.json","rb") # file to send
session.storbinary('STOR teams.json', data) # send the file
data = open("app_voetbalelo/uefa_euro2016/result/games.json","rb") # file to send
session.storbinary('STOR games.json', data) # send the file
data = open("app_voetbalelo/uefa_euro2016/result/topo_france.json","rb") # file to send
session.storbinary('STOR topo_france.json', data) # send the file
data = open("app_voetbalelo/uefa_euro2016/result/cities.json","rb") # file to send
session.storbinary('STOR cities.json', data) # send the file
# Create dict with last update date
# Save as json and load buffered
last_update = {"date": datetime.datetime.now().strftime("%d/%m/%Y")}
json.dump(last_update,open("app_voetbalelo/uefa_euro2016/result/last_update.json","w"))
last_update = open("app_voetbalelo/uefa_euro2016/result/last_update.json","rb")
session.storbinary('STOR date.json', last_update)
session.quit() |
# 標準ライブラリ
from configparser import ConfigParser
import argparse
import datetime
import os
import time
import logging
# サードパーティライブラリ
from redash_dynamic_query import RedashDynamicQuery
import pandas as pd
# 自作ライブラリ
from utils.about_settings import make_setting
# ロガーの設置
logger = logging.getLogger(__name__)
# モジュールへのパス
MODULE_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# データ保管場所へのパス
DATA_PATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "data")
class RedashQueryGetter:
"""
このクラスは、Redashからデータを取得するためのクラスです。
Arguments
:param start_date: str 開始日を指定するパラメーター
:param end_date: str 終了日を指定するパラメーター
:param query_id: int RedashのクエリIDを指定するパラメーター
:param client: str 設定ファイルの取得項目を指定するパラメーター
:return None
"""
def __init__(self, start_date: str, end_date: str, query_id: int, client: str) -> None:
# 設定ファイルから設定データの取得
logger.info("setting.iniから情報を取得しています。")
while True:
os.chdir(MODULE_PATH)
config = ConfigParser()
try:
config.read("setting.ini")
api_key = config[client]["api_key"]
data_source_id = int(config[client]["data_source_id"])
end_point = config[client]["end_point"]
break
except KeyError:
logger.warning("setting.iniにデータが存在しません。")
make_setting(section=client, key=["end_point", "api_key", "data_source_id"])
# 3ヶ月以上の場合、データベース側の負荷も考えて処理を分割するために3ヶ月ごとに分ける。
self.start_date, self.end_date, self._sep_date = self._split_date(args_start_date=start_date,
args_end_date=end_date)
self.redash = RedashDynamicQuery(endpoint=end_point, apikey=api_key, data_source_id=data_source_id)
self.query_id = query_id
self.client = client
def _get_query(self):
"""
開始日と終了日の期間におけるデータを取得し、pandasのデータフレームを出力するインスタンスメソッドです。
:return: pandas.DataFrame
"""
result = None
# 3ヶ月以上の場合、処理を複数回に分けるが、3ヶ月以内の時、単独の処理とする。
if self._sep_date:
# TODO プロセスベースの並列処理を後で実装する。おそらく実行時間としては必要がないが学習のため
for i, date in enumerate(self._sep_date):
bind = {"start_date": date[0], "end_date": date[1], "gender": "M-F"}
result_tmp = self.redash.query(query_id=self.query_id, bind=bind, as_csv=True)
result_tmp_csv = self._conv_csv_to_dataframe(result_tmp)
logger.info("{}のクエリID{}のデータにおいて{}から{}までの{}列のデータを取得しました。".format(self.client,
self.query_id, date[0], date[1],
len(result_tmp_csv)))
if not i:
result = result_tmp_csv
else:
result = pd.concat([result_tmp_csv, result], ignore_index=True)
time.sleep(10)
logger.info("{}のクエリID{}のデータにおいて合計{}列のデータを取得しました。".format(self.client,
self.query_id, len(result)))
return result.reset_index(drop=True)
# 単独の処理
else:
bind = {"start_date": self.start_date, "end_date": self.end_date, "gender": "M-F"}
result_tmp = self.redash.query(query_id=self.query_id, bind=bind, as_csv=True)
result = self._conv_csv_to_dataframe(result_tmp)
logger.info("{}のクエリID{}のデータにおいて合計{}列のデータを取得しました。".format(self.client,
self.query_id, len(result)))
return result.reset_index(drop=True)
@staticmethod
def _save_df(dir_name: str, result: pd.DataFrame(), client: str, query_id: int,
name="Redash_data", excel=True) -> None:
"""
取得したデータフレームを指定されたディレクトリに保存する静的メソッドです。
:param dir_name: str
:param result: pd.DataFrame
:param client: str
:param query_id: int
:param name: str
:param excel: bool
:return: None
"""
os.chdir(dir_name)
date_now = datetime.datetime.now().strftime("%Y-%m-%d")
save_data_name = "{}_{}_{}_{}".format(name, client, query_id, date_now)
if excel:
result.to_excel(save_data_name + ".xlsx")
else:
result.to_csv(save_data_name + ".csv")
@staticmethod
def _conv_csv_to_dataframe(result: str) -> pd.DataFrame():
"""
取得した文字列をパースして、一時的に同ディレクトリのデータに保存し、pandas.read_csv()で取得する静的メソッドです。
:param result: str
:return: pandas.DataFrame
"""
os.chdir(DATA_PATH)
with open("data_tmp.csv", "w") as csvfile:
csvfile.write(result)
result_csv = pd.read_csv("data_tmp.csv")
return result_csv
@staticmethod
def _split_date(args_start_date: str, args_end_date: str) -> tuple:
"""
開始日、終了日が三カ月以上ならば、それを分割する静的メソッドです。
:param args_start_date:
:param args_end_date:
:return:
"""
# 日時の計算のためにdatetimeに変換する。
try:
start_date = datetime.datetime.strptime(args_start_date, "%Y-%m-%d")
end_date = datetime.datetime.strptime(args_end_date, "%Y-%m-%d")
except ValueError:
raise ValueError("'%Y-%m-%d'のフォーマットに調整して下さい。")
# 三カ月以上の取得は、サーバーの負担になるので取得する日付を分割する。
if (end_date - start_date).days >= 90:
st_date = start_date
result_list = []
while True:
ed_date = st_date + datetime.timedelta(days=60)
result_list.append([st_date, ed_date])
st_date = ed_date + datetime.timedelta(days=1)
if (ed_date - end_date).days >= 0:
result_list[-1][1] = end_date
break
_sep_date = result_list
else:
_sep_date = []
return start_date, end_date, _sep_date
@staticmethod
def save_mongodb():
pass
@classmethod
def get_and_save_all_data(cls, query_id: int, client: str, start_date: str, end_date: str, dir_path=None) \
-> pd.DataFrame():
"""
データ取得までの一連の処理を実装したクラスメソッドです。
:param query_id:
:param client:
:param start_date:
:param end_date:
:param dir_path:
:return:
"""
logger.info("Redashからデータを取得しています。")
inst = cls(query_id=query_id, start_date=start_date, end_date=end_date, client=client)
result = inst._get_query()
if dir_path:
try:
os.chdir(dir_path)
logger.info("{}にデータを保存します。".format(dir_path))
cls._save_df(dir_path, result, client=client, query_id=query_id)
except FileNotFoundError:
os.chdir(DATA_PATH)
logger.info("{}に保存が失敗したので、{}にデータを保存します。".format(dir_path, DATA_PATH))
cls._save_df(DATA_PATH, result, client=client, query_id=query_id)
logger.info("Redashからデータ取得を完了しました。")
return result
def handle_commandline():
parser = argparse.ArgumentParser(
prog="redash_getter.py",
usage="redash_getter.py start_date end_date query_id client --dir_name dir_path",
description="""このスクリプトは、Redashからデータをpandasデータフレーム形式でデータを取得します。
またオプションで取得したデータフレームをcsv, xlsx形式で保存する事が可能です。
引数として、データ取得開始日、データ取得終了日、クライアント、RedashのクエリID、保存するディレクトリ名を取ります。""",
epilog="end",
add_help=True
)
parser.add_argument("start_date", help="取得するデータの開始日を指定するパラメーターです。", type=str)
parser.add_argument("end_date", help="取得するデータの終了日を指定するパラメーターです。", type=str)
parser.add_argument("query_id", help="取得するクエリ番号を指定するパラメーターです。", type=int)
parser.add_argument("client", help="取得するデータのクライアントを指定するパラメータです。MatchとPairsを想定しています",
type=str, choices=["match", "pairs"])
parser.add_argument("-d", "--dir_name",
help="取得したデータフレームを保存するディレクトリを指定し、指定しない場合保存しません。",
type=str, default=None)
args = parser.parse_args()
return args
if __name__ == '__main__':
args = handle_commandline()
RedashQueryGetter.get_and_save_all_data(query_id=args.query_id, client=args.client, start_date=args.start_date,
end_date=args.end_date, dir_path=args.dir_name)
|
lista = []
maior = menor = 0
for i in range(0, 5):
lista.append(int(input(f'Digite o valor na posicao {i}: ')))
if i == 0:
maior = menor = lista[i]
else:
if lista[i] > maior:
maior = lista[i]
if lista[i] < menor:
menor = lista[i]
print(f'Voce digitou os valores {lista}')
print(f'O maior valor digitado foi {maior} nas posicoes ', end='')
for i, v in enumerate(lista):
if v == maior:
print(f'{i}... ', end='')
print(f'\nO menor valor digitado foi {menor} nas posicoes ', end='')
for i, v in enumerate(lista):
if v == menor:
print(f'{i}... ', end='')
|
class Solution:
def findUnsortedSubarray(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
n = len(nums)
start = -1
end = -2
max_tmp = nums[0]
min_tmp = nums[n-1]
for i in range(len(nums)):
max_tmp = max(max_tmp, nums[i])
min_tmp = min(min_tmp, nums[n-1-i])
if nums[i] < max_tmp:
end = i
if nums[n-1-i] > min_tmp:
start = n-1-i
result = end - start + 1
return result |
from distutils.core import setup
version = '1.0.0'
packages = [
'stack'
]
setup(
name='dsa',
version=version,
url='https://github.com/qzlzwhx/python_dsa.git',
author='charles qiao ',
author_email='qiaozhanlei@weizoom.com',
packages=packages
)
|
from airflow import DAG
from airflow.operators.bash_operator import BashOperator
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.sensors import SqlSensor
from airflow.operators.hive_operator import HiveOperator
from airflow.operators.mysql_operator import MySqlOperator
from settings import default_args
# - 销售合同
# - 退费
dag = DAG('dw_commerce_contract_d', default_args=default_args,
schedule_interval='4 1 * * *')
start = SqlSensor(
task_id='start',
conn_id='etl_db',
sql="SELECT * FROM etl.signal WHERE name='dw_sso_basic_d' AND value='{{ macros.ds(ti) }}';",
dag=dag
)
# contract
del_partiton_stg_contract = HiveOperator(
task_id='del_partiton_stg_contract',
hive_cli_conn_id='spark_thrift',
hql="alter table stg.newuuabc_contract drop if exists PARTITION (etl_date='{{ macros.ds(ti) }}');\n ",
dag=dag)
src_stg_contract = BashOperator(
task_id='src_stg_contract',
bash_command='dataship extract uuold_newuuabc.contract {{ macros.ds(ti) }} {{ macros.tomorrow_ds(ti) }}',
pool="embulk_pool",
dag=dag)
add_partiton_stg_contract = HiveOperator(
task_id='add_partiton_stg_contract',
hive_cli_conn_id='spark_thrift',
hql="alter table stg.newuuabc_contract add PARTITION (etl_date='{{ macros.ds(ti) }}');\n ",
dag=dag)
stg_ods_contract = HiveOperator(
task_id='stg_ods_contract',
hive_cli_conn_id='spark_thrift',
hql='scripts/stg2ods.newuuabc_contract_insert.sql',
dag=dag
)
# contract_template
del_partiton_stg_contract_template = HiveOperator(
task_id='del_partiton_stg_contract_template',
hive_cli_conn_id='spark_thrift',
hql="alter table stg.newuuabc_contract_template drop if exists PARTITION (etl_date='{{ macros.ds(ti) }}');\n ",
dag=dag)
src_stg_contract_template = BashOperator(
task_id='src_stg_contract_template',
bash_command='dataship extract uuold_newuuabc.contract_template {{ macros.ds(ti) }} {{ macros.tomorrow_ds(ti) }}',
pool="embulk_pool",
dag=dag)
add_partiton_stg_contract_template = HiveOperator(
task_id='add_partiton_stg_contract_template',
hive_cli_conn_id='spark_thrift',
hql="alter table stg.newuuabc_contract_template add PARTITION (etl_date='{{ macros.ds(ti) }}');\n ",
dag=dag)
stg_ods_contract_template = HiveOperator(
task_id='stg_ods_contract_template',
hive_cli_conn_id='spark_thrift',
hql='scripts/stg2ods.newuuabc_contract_template_insert.sql',
dag=dag
)
# contract_refund
delpart_stg_contract_refund = HiveOperator(
task_id='delpart_stg_contract_refund',
hive_cli_conn_id='spark_thrift',
hql="alter table stg.newuuabc_contract_refund drop if exists PARTITION (etl_date='{{ macros.ds(ti) }}');\n ",
dag=dag)
stg_contract_refund = BashOperator(
task_id='stg_contract_refund',
bash_command='dataship extract uuold_newuuabc.contract_refund {{ macros.ds(ti) }} {{ macros.tomorrow_ds(ti) }}',
pool="embulk_pool",
dag=dag)
addpart_stg_contract_refund = HiveOperator(
task_id='addpart_stg_contract_refund',
hive_cli_conn_id='spark_thrift',
hql="alter table stg.newuuabc_contract_refund add PARTITION (etl_date='{{ macros.ds(ti) }}');\n ",
dag=dag)
stg_ods_contract_refund = HiveOperator(
task_id='stg_ods_contract_refund',
hive_cli_conn_id='spark_thrift',
hql='scripts/stg2ods.newuuabc_contract_refund_insert.sql',
dag=dag)
# contract_details
delpart_stg_contract_details = HiveOperator(
task_id='delpart_stg_contract_details',
hive_cli_conn_id='spark_thrift',
hql="alter table stg.newuuabc_contract_details drop if exists PARTITION (etl_date='{{ macros.ds(ti) }}');\n ",
dag=dag)
stg_contract_details = BashOperator(
task_id='stg_contract_details',
bash_command='dataship extract uuold_newuuabc.contract_details {{ macros.ds(ti) }} {{ macros.tomorrow_ds(ti) }}',
pool="embulk_pool",
dag=dag)
addpart_stg_contract_details = HiveOperator(
task_id='addpart_stg_contract_details',
hive_cli_conn_id='spark_thrift',
hql="alter table stg.newuuabc_contract_details add PARTITION (etl_date='{{ macros.ds(ti) }}');\n ",
dag=dag)
stg_ods_contract_details = HiveOperator(
task_id='stg_ods_contract_details',
hive_cli_conn_id='spark_thrift',
hql='scripts/stg2ods.newuuabc_contract_details_insert.sql',
dag=dag)
# contract_payment
delpart_stg_contract_payment = HiveOperator(
task_id='delpart_stg_contract_payment',
hive_cli_conn_id='spark_thrift',
hql="alter table stg.newuuabc_contract_payment drop if exists PARTITION (etl_date='{{ macros.ds(ti) }}');\n ",
dag=dag)
stg_contract_payment = BashOperator(
task_id='stg_contract_payment',
bash_command='dataship extract uuold_newuuabc.contract_payment {{ macros.ds(ti) }} {{ macros.tomorrow_ds(ti) }}',
pool="embulk_pool",
dag=dag)
addpart_stg_contract_payment = HiveOperator(
task_id='addpart_stg_contract_payment',
hive_cli_conn_id='spark_thrift',
hql="alter table stg.newuuabc_contract_payment add PARTITION (etl_date='{{ macros.ds(ti) }}');\n ",
dag=dag)
stg_ods_contract_payment = HiveOperator(
task_id='stg_ods_contract_payment',
hive_cli_conn_id='spark_thrift',
hql='scripts/ods/stg2ods.newuuabc_contract_payment_insert.sql',
dag=dag)
wait = DummyOperator(
task_id='wait',
dag=dag)
dw_contract = HiveOperator(
task_id='dw_contract',
hive_cli_conn_id='spark_thrift',
hql='scripts/dw/commerce_contract__contract.sql',
dag=dag)
dw_refund = HiveOperator(
task_id='dw_refund',
hive_cli_conn_id='spark_thrift',
hql='scripts/dw/commerce_contract__refund.sql',
dag=dag)
end = MySqlOperator(
task_id='end',
mysql_conn_id='etl_db',
sql="INSERT INTO etl.signal VALUES('{1}', '{0}') ON DUPLICATE KEY UPDATE value='{0}'; ".format(
"{{ macros.ds(ti) }}", "{{ dag.dag_id }}"),
database='etl',
dag=dag
)
start >> del_partiton_stg_contract >> src_stg_contract >> add_partiton_stg_contract >> stg_ods_contract >> wait
start >> del_partiton_stg_contract_template >> src_stg_contract_template >> add_partiton_stg_contract_template >> stg_ods_contract_template >> wait
start >> delpart_stg_contract_refund >> stg_contract_refund >> addpart_stg_contract_refund >> stg_ods_contract_refund >> wait
start >> delpart_stg_contract_details >> stg_contract_details >> addpart_stg_contract_details >> stg_ods_contract_details >> wait
start >> delpart_stg_contract_payment >> stg_contract_payment >> addpart_stg_contract_payment >> stg_ods_contract_payment >> wait
wait >> [dw_contract, dw_refund] >> end
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author : mofei
# @Time : 2018/9/17 20:34
# @File : 045 Jump Game II.py
# @Software: PyCharm
"""
Given an array of non-negative integers, you are initially positioned at the first index of the array.
Each element in the array represents your maximum jump length at that position.
Your goal is to reach the last index in the minimum number of jumps.
Example:
Input: [2,3,1,1,4]
Output: 2
Explanation: The minimum number of jumps to reach the last index is 2.
Jump 1 step from index 0 to 1, then 3 steps to the last index.
Note:
You can assume that you can always reach the last index.
"""
class Solution(object):
def jump(self, nums):
"""
跳跃的步数
:type nums: List[int]
:rtype: int
"""
begin = 0
end = 1
step = 0
while end < len(nums):
next = -1
for i in range(begin, end):
next = max(next, nums[i] + i)
begin = end
end = next + 1
step += 1
return step
if __name__ == '__main__':
res = Solution().jump([1 for i in range(1000)])
print(res)
|
#var_type.py
a,b = 3,"Hello"
type_a = type(a)
type_b = type(b)
len_b = len(b)
print(type_a)
print(type_b)
print(len_b) |
def main():
n = input()
list1 = []
list2 = [1]
counter = 1
for i in n:
list1.append(i)
current = list1[0]
for i in range(len(list1) - 1):
if current == list1[i+1]:
counter += 1
list2.append(counter)
else:
counter = 1
current = list1[i+1]
list2.sort()
print(list2[len(list2) -1])
if __name__ == "__main__":
main()
|
from functools import partial
from os import listdir, mkdir
from shutil import move
from sys import argv, exit
from time import time
from PyQt5.QtWidgets import QApplication, QMainWindow
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(553, 86)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName("gridLayout")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.lineEdit = QtWidgets.QLineEdit(self.centralwidget)
self.lineEdit.setObjectName("lineEdit")
self.gridLayout.addWidget(self.lineEdit, 0, 1, 1, 1)
self.pushButton = QtWidgets.QPushButton(self.centralwidget)
self.pushButton.setObjectName("pushButton")
self.gridLayout.addWidget(self.pushButton, 0, 2, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 553, 23))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "文件整理工具"))
self.label.setText(_translate("MainWindow", "输入文件夹路径:"))
self.pushButton.setText(_translate("MainWindow", "整理"))
def start(ui):
path = ui.lineEdit.text()
print(path)
e = listdir(path)
allList = []
path = path+'\\'
for i in e:
try:
r = i.split('.')
print(i)
if r[0] == '' or len(r) == 1:
continue
print('.'.join(r))
allList.append('.'.join(r))
name = '.'.join(r)
try:
mkdir(path+'.'+r[-1])
except:
try:
move(path+name, path+'.'+r[-1])
except FileExistsError:
move(path+'('+str(time()*100000)+')'+name, path+'.'+r[-1])
continue
move(path+name, path+'.'+r[-1])
except:
pass
if __name__ == '__main__':
app = QApplication(argv)
MainWindow = QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
ui.pushButton.clicked.connect(partial(start,ui))
MainWindow.show()
exit(app.exec_())
|
# -*-coding: utf-8-*-
# Author : Christopher Lee
# License: Apache License
# File : utils.py
# Date : 2017-05-18 09-01
# Version: 0.0.1
# Description: description of this file.
import pickle
from string import ascii_uppercase, ascii_lowercase
import hashlib
ASCII_MAPPING = dict((k, '_{}'.format(v)) for k, v in zip(ascii_uppercase, ascii_lowercase))
def camel_to_underscore(key):
"""
Fast way to converse camel style to underscore style
:param key: word to be converted
:return: str
"""
return ''.join(ASCII_MAPPING.get(x) or x for x in key).strip('_')
def get_query_fingerprint(query, hash_method='md5'):
"""
Generate a unique fingerprint for the given query
:return: hash value
"""
sorted_query = []
for k in sorted(query.keys()):
value = query.get(k)
try:
sorted_query.append((k, sorted(value)))
except TypeError:
sorted_query.append((k, value))
method = getattr(hashlib, hash_method)
return method(pickle.dumps(sorted_query)).hexdigest()
def make_cache_key_for_object(obj):
import pickle
import hashlib
try:
hash_key = hashlib.md5(str(obj).encode('utf-8')).hexdigest()
return hash_key
except (pickle.PickleError, pickle.PicklingError):
raise
class Person(object):
def __init__(self, name, age):
self.name = name
self.age = age
def __repr__(self):
return '<Person name={}, age={}>'.format(self.name, self.age)
if __name__ == '__main__':
# d = {'select': ['folder_id', 'icon_url', 'name', 'create_at'], 'descending': True, 'limit': 20, 'where': {},
# 'order_by': ['folder_id', ]}
# a = d
# c = d
# print(get_query_fingerprint(d))
# print(get_query_fingerprint(a))
# print(get_query_fingerprint(c))
a = Person('chris', 24)
b = Person('chris', 25)
c = Person('chris', 26)
print(make_cache_key_for_object(a))
|
#-*- coding:utf-8 -*-
import os,re,urllib2,urllib,gzip
from io import BytesIO
import randomUA,random
import time
import cookielib
import socket
#不同的UA对应了不同的boundary
UA_Boundary = {'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.73 Safari/537.36':'----WebKitFormBoundary',\
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; rv:11.0) like Gecko':'-------------------------',\
'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:43.0) Gecko/20100101 Firefox/43.0':'---------------------------'}
def rand(flag):
text = ''
possible = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
if flag == 'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:43.0) Gecko/20100101 Firefox/43.0':
for i in range(0,14):
text += possible[random.randint(0,61)];
elif flag == 'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; rv:11.0) like Gecko':
for i in range(0,14):
text += possible[random.randint(0,61)];
else:
for i in range(0,15):
text += possible[random.randint(52,61)];
return text;
def post_request(rmdownloadurl,random_ua):
request2 = urllib2.Request(url = 'http://www.rmdown.com/download.php')
request2.add_header('User-Agent','Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.73 Safari/537.36')
request2.add_header('Connection','keep-alive')
request2.add_header('Accept','text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8')
request2.add_header('Referer',rmdownloadurl.replace('\n',''))
request2.add_header('Accept-Encoding','gzip, deflate')
return request2
def download_start():
#1024.txt存储了所有的下载链接
fs = open('1024.txt','r')
url_list = fs.readlines()
fs.close()
flag = 0
interval = 10
torrent_list = []
for rmdownloadurl in url_list:
print rmdownloadurl
random_ua = randomUA.random_ua()
request = urllib2.Request(rmdownloadurl)
request.add_header('User-Agent',random_ua)
request.add_header('Refer',rmdownloadurl)
request.add_header('Upgrade-Insecure-Requests','1')
request.add_header('Accept','text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8')
if flag != 0:
request.add_header('Cookie',cookie_value)
#创建cookie处理器
cookie = cookielib.CookieJar()
handler=urllib2.HTTPCookieProcessor(cookie)
opener = urllib2.build_opener(handler)
#此处的open方法同urllib2的urlopen方法,也可以传入request
try:
response = opener.open(request,timeout=30)
downloadpage = response.read()
#读取boundary reff
regex_download_pattern = re.compile('')
reff_reg = re.compile('<INPUT .*?reff.*?value="(.*?)">')
reff = re.findall(reff_reg,downloadpage)[0]
except socket.timeout as e:
print '-----------------------------------------'
except:
print '-----------------Game over---------------'
if flag == 0:
for item in cookie:
cookie_value = item.name+'='+item.value
break
flag = 1
#发起POST请求
post_req = post_request(rmdownloadurl,random_ua)
#boundary = '------WebKitFormBoundary'+rand()
boundary = UA_Boundary[random_ua]+rand(random_ua)
hashvalue = rmdownloadurl.split('=')[1]
#与浏览器相关
post_req.add_header('Content-type','multipart/form-data; boundary='+boundary)
post_req.add_header('Cookie',cookie_value)
payload = '--'+boundary+'\r\n'+'Content-Disposition: form-data; name="ref"'+'\r\n\r\n'+hashvalue.replace('\n','')+'\r\n'+'--'+boundary \
+'\r\n'+'Content-Disposition: form-data; name="reff"'+'\r\n\r\n'+reff+'\r\n'+'--'+boundary+'\r\n'+'Content-Disposition: form-data; name="submit"'+'\r\n\r\n' \
+'download'+'\r\n'+'--'+boundary+'--'+'\r\n'
post_req.add_data(payload)
try:
req = urllib2.urlopen(post_req,timeout=30)
data = BytesIO(req.read())
torrent_list.append(data)
except socket.timeout as e:
time.sleep(30)
print '-----------------------------------------'
url_list.append(rmdownloadurl)
except:
print 'boom!'
#StringIO 会出错
response.close()
req.close()
print len(torrent_list)
#interval += 10
#time.sleep(0.1)
for item in torrent_list:
gziper = gzip.GzipFile(fileobj = item)
decode = gziper.read()
torrent_fs = open('F:\\1024torrent\\'+rand('1')+'.torrent','wb')
torrent_fs.write(decode)
torrent_fs.close()
if __name__ == '__main__':
download_start()
|
from django.urls import path
from .views import LodgeListView, LodgeDetailView, LodgeCreateView, LodgeUpdateView, LodgeDeleteView
from . import views
urlpatterns = [
path('', LodgeListView.as_view(), name='lodgment-home'),
path('lodge/<int:pk>/', LodgeDetailView.as_view(), name='lodge-detail'),
path('lodge/new/', LodgeCreateView.as_view(), name='lodge-create'),
path('lodge/<int:pk>/update/', LodgeUpdateView.as_view(), name='lodge-update'),
path('lodge/<int:pk>/delete/', LodgeDeleteView.as_view(), name='lodge-delete'),
] |
import pandas as pd
import numpy as np
import random
from itertools import combinations
from .geo_dist import waypointGeo
import requests
import flexpolyline as fp
#import sys
def gatherData(waypoints):
all_waypoints = waypoints
waypoint_distances = {}
waypoint_durations = {}
coord_dict = waypointGeo(all_waypoints)
polyline_dict = {}
for (waypoint1, waypoint2) in combinations(all_waypoints, 2):
try:
lat_lng_1=coord_dict[waypoint1][0]+","+coord_dict[waypoint1][1]
lat_lng_2=coord_dict[waypoint2][0]+","+coord_dict[waypoint2][1]
src = "https://router.hereapi.com/v8/routes?transportMode=car&origin={}&destination={}&routingMode=short&return=polyline,travelSummary&apiKey=apikey".format(lat_lng_1, lat_lng_2)
route = requests.get(src)
route = route.json()
distance = route['routes'][0]['sections'][0]['travelSummary']['length']
duration = route['routes'][0]['sections'][0]['travelSummary']['duration']
polyline = route['routes'][0]['sections'][0]['polyline']
waypoint_distances[frozenset([waypoint1, waypoint2])] = distance
waypoint_durations[frozenset([waypoint1, waypoint2])] = duration
polyline_dict[frozenset([waypoint1, waypoint2])] = polyline
except Exception as e:
print(e)
print("Error with finding the route between %s and %s." % (waypoint1, waypoint2))
with open("my-waypoints-dist-dur.tsv", "w") as out_file:
out_file.write("\t".join(["waypoint1",
"waypoint2",
"distance_m",
"duration_s"]))
for (waypoint1, waypoint2) in waypoint_distances.keys():
out_file.write("\n" +
"\t".join([waypoint1,
waypoint2,
str(waypoint_distances[frozenset([waypoint1, waypoint2])]),
str(waypoint_durations[frozenset([waypoint1, waypoint2])])]))
return polyline_dict
def readData():
waypoint_distances = {}
waypoint_durations = {}
all_waypoints = set()
waypoint_data = pd.read_csv("my-waypoints-dist-dur.tsv", sep="\t")
for i, row in waypoint_data.iterrows():
waypoint_distances[frozenset([row.waypoint1, row.waypoint2])] = row.distance_m
waypoint_durations[frozenset([row.waypoint1, row.waypoint2])] = row.duration_s
all_waypoints.update([row.waypoint1, row.waypoint2])
return all_waypoints,waypoint_distances,waypoint_durations
def compute_fitness(solution, waypoint_distances):
"""
This function returns the total distance traveled on the current road trip.
The genetic algorithm will favor road trips that have shorter
total distances traveled.
"""
solution_fitness = 0.0
for index in range(len(solution)):
waypoint1 = solution[index - 1]
waypoint2 = solution[index]
solution_fitness += waypoint_distances[frozenset([waypoint1, waypoint2])]
return solution_fitness
def generate_random_agent(all_waypoints):
"""
Creates a random road trip from the waypoints.
"""
new_random_agent = list(all_waypoints)
random.shuffle(new_random_agent)
return tuple(new_random_agent)
def mutate_agent(agent_genome, max_mutations=3):
"""
Applies 1 - `max_mutations` point mutations to the given road trip.
A point mutation swaps the order of two waypoints in the road trip.
"""
agent_genome = list(agent_genome)
num_mutations = random.randint(1, max_mutations)
for mutation in range(num_mutations):
swap_index1 = random.randint(0, len(agent_genome) - 1)
swap_index2 = swap_index1
while swap_index1 == swap_index2:
swap_index2 = random.randint(0, len(agent_genome) - 1)
agent_genome[swap_index1], agent_genome[swap_index2] = agent_genome[swap_index2], agent_genome[swap_index1]
return tuple(agent_genome)
def shuffle_mutation(agent_genome):
"""
Applies a single shuffle mutation to the given road trip.
A shuffle mutation takes a random sub-section of the road trip
and moves it to another location in the road trip.
"""
agent_genome = list(agent_genome)
start_index = random.randint(0, len(agent_genome) - 1)
length = random.randint(2, 20)
genome_subset = agent_genome[start_index:start_index + length]
agent_genome = agent_genome[:start_index] + agent_genome[start_index + length:]
insert_index = random.randint(0, len(agent_genome) + len(genome_subset) - 1)
agent_genome = agent_genome[:insert_index] + genome_subset + agent_genome[insert_index:]
return tuple(agent_genome)
def generate_random_population(pop_size,all_waypoints):
"""
Generates a list with `pop_size` number of random road trips.
"""
random_population = []
for agent in range(pop_size):
random_population.append(generate_random_agent(all_waypoints))
return random_population
def run_genetic_algorithm(all_waypoints,waypoint_distances,generations=5000, population_size=100):
"""
The core of the Genetic Algorithm.
`generations` and `population_size` must be a multiple of 10.
"""
final_sequence = []
population_subset_size = int(population_size / 10.)
generations_10pct = int(generations / 10.)
# Create a random population of `population_size` number of solutions.
population = generate_random_population(population_size,all_waypoints)
# For `generations` number of repetitions...
for generation in range(generations):
# Compute the fitness of the entire current population
population_fitness = {}
for agent_genome in population:
if agent_genome in population_fitness:
continue
population_fitness[agent_genome] = compute_fitness(agent_genome,waypoint_distances)
# Take the top 10% shortest road trips and produce offspring each from them
new_population = []
for rank, agent_genome in enumerate(sorted(population_fitness,
key=population_fitness.get)[:population_subset_size]):
if (generation % generations_10pct == 0 or generation == generations - 1) and rank == 0:
print("Generation %d best: %d | Unique genomes: %d" % (generation,
population_fitness[agent_genome],
len(population_fitness)))
print(agent_genome)
final_sequence = agent_genome
print("")
# Create 1 exact copy of each of the top road trips
new_population.append(agent_genome)
# Create 2 offspring with 1-3 point mutations
for offspring in range(2):
new_population.append(mutate_agent(agent_genome, 3))
# Create 7 offspring with a single shuffle mutation
for offspring in range(7):
new_population.append(shuffle_mutation(agent_genome))
# Replace the old population with the new population of offspring
for i in range(len(population))[::-1]:
del population[i]
population = new_population
return final_sequence
'''
CALL THIS FUNCTION FROM YOUR CODE
'''
def getHamiltonian(all_waypoints):
'''
INPUT > List of waypoints. Example-["A","B","C"]
OUTPUT> complete polyline (string)
total distance (number)
total duration (number)
optimized sequence of waypoints ("B","A","C")
'''
complete_polyline=""
total_distance=0
total_duration=0
all_coordinates=[]
polyline_dict = gatherData(all_waypoints)
all_waypoints,waypoint_distances,waypoint_durations = readData()
seq = run_genetic_algorithm(all_waypoints,waypoint_distances,generations=500, population_size=20)
for i in range(len(seq)-1):
complete_polyline += polyline_dict[frozenset([seq[i], seq[i+1]])]
total_distance += waypoint_distances[frozenset([seq[i], seq[i+1]])]
total_duration += waypoint_durations[frozenset([seq[i], seq[i+1]])]
all_coordinates.extend(fp.decode(complete_polyline))
#return total_distance,total_duration,seq
return all_coordinates
'''
USE THIS FOR TESTING
print(getHamiltonian(["Mumbai","Delhi","Agra","Nagpur","Pune","Kalyan"]))
'''
|
import math
import numpy as np
def convolution_test(imgIn, kernel):
imgInHeight = imgIn.shape[0]
imgInWidth = imgIn.shape[1]
kernelHeight = kernel.shape[0]
kernelWidth = kernel.shape[1]
# Make sure the kernel has a center pixel.
# I.e. has an odd number of rows and columns.
if kernelWidth%2 == 0:
raise Exception("Kernel has an even number of columns")
if kernelHeight%2 == 0:
raise Exception("Kernel has an even number of rows")
padY = math.floor(kernelHeight/2)
padX = math.floor(kernelWidth/2)
imgConv = imgIn
iY = 0
while iY < imgInHeight:
iX = 0
while iX < imgInWidth:
Ystart = max(iY-padY, 0) # at least 0
Ystart = min(Ystart, imgInHeight-kernelHeight) # or max image height - kernel height
Yend = max(iY+padY, kernelHeight-1) # at least kernel height
Yend = min(Yend, imgInHeight-1) # or max image height
Xstart = max(iX-padX, 0) # at least 0
Xstart = min(Xstart, imgInWidth-kernelWidth) # or max image width - kernel width
Xend = max(iX+padX, kernelWidth-1) # at least kernel width
Xend = min(Xend, imgInWidth-1) # or max image width
# === ISSUES ============================================================
# 1) I need to understand what's going on. I think there is an issue with
# bit depth of the pixels that either cause rollover or saturation or
# something else that makes the result a big mess... Blur seems to
# work fine, as long as you take the mean and not the sum. However,
# the sum should create a clean convolution, so I'm not sure. Need to
# dig deeper into this! The problem is very obvious when trying edge
# detection matricies.
#
# 2) I need to fix proper boundary conditions so that the edges are
# handled in a better way. The current solution is ugly!
#
# 3) I fucking don't understand the colon-syntax for extracting elements
# Why do I need to add one?! (eg. Ystart:Yend+1)
# =======================================================================
convolutionRoi = np.multiply(imgIn[Ystart:Yend+1, Xstart:Xend+1], kernel)
#convolutionRoi = np.multiply(imgIn[Ystart:Yend+1, Xstart:Xend+1], np.flipud(np.fliplr(kernel)))
imgConv[iY, iX] = convolutionRoi.mean()
#imgConv[iY, iX] = convolutionRoi.sum()
# Increment column
iX = iX + 1
# Print every 100 rows to show progress
if iY%100 == 0:
print("Row " + str(iY) + "/" + str(imgInHeight) + " completed!")
# Increment row
iY = iY + 1
return imgConv |
from django.contrib import admin
from Register.models import Employee
class AdminEmployee(admin.ModelAdmin):
list_display = ['fullname', 'position', 'emp_code', 'phone' ]
admin.site.register(Employee, AdminEmployee)
|
# Generated by Django 4.2.2 on 2023-07-02 01:58
import uuid
import django.db.models.deletion
import django.utils.timezone
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
("auth", "0012_alter_user_first_name_max_length"),
]
operations = [
migrations.CreateModel(
name="User",
fields=[
("password", models.CharField(max_length=128, verbose_name="password")),
(
"last_login",
models.DateTimeField(
blank=True, null=True, verbose_name="last login"
),
),
(
"is_superuser",
models.BooleanField(
default=False,
help_text="Designates that this user has all permissions without explicitly assigning them.",
verbose_name="superuser status",
),
),
(
"uuid",
models.UUIDField(
default=uuid.uuid4, primary_key=True, serialize=False
),
),
("email", models.EmailField(max_length=254, unique=True)),
("is_staff", models.BooleanField(default=False)),
("is_active", models.BooleanField(default=True)),
("created_at", models.DateTimeField(default=django.utils.timezone.now)),
("attributes", models.JSONField(default=dict)),
],
options={
"abstract": False,
},
),
migrations.CreateModel(
name="Feed",
fields=[
(
"uuid",
models.UUIDField(
default=uuid.uuid4, primary_key=True, serialize=False
),
),
("feed_url", models.TextField(unique=True)),
("title", models.TextField()),
("home_url", models.TextField(null=True)),
(
"published_at",
models.DateTimeField(default=django.utils.timezone.now),
),
("updated_at", models.DateTimeField(null=True)),
(
"db_created_at",
models.DateTimeField(default=django.utils.timezone.now),
),
("db_updated_at", models.DateTimeField(null=True)),
(
"update_backoff_until",
models.DateTimeField(default=django.utils.timezone.now),
),
],
),
migrations.CreateModel(
name="FeedEntry",
fields=[
(
"uuid",
models.UUIDField(
default=uuid.uuid4, primary_key=True, serialize=False
),
),
("id", models.TextField(null=True)),
("created_at", models.DateTimeField(null=True)),
(
"published_at",
models.DateTimeField(default=django.utils.timezone.now),
),
("updated_at", models.DateTimeField(null=True)),
("title", models.TextField()),
("url", models.TextField()),
("content", models.TextField()),
("author_name", models.TextField(null=True)),
(
"db_created_at",
models.DateTimeField(default=django.utils.timezone.now),
),
("db_updated_at", models.DateTimeField(null=True)),
(
"feed",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="feed_entries",
to="api.feed",
),
),
],
),
migrations.CreateModel(
name="FeedSubscriptionProgressEntry",
fields=[
(
"uuid",
models.UUIDField(
default=uuid.uuid4, primary_key=True, serialize=False
),
),
(
"status",
models.IntegerField(
choices=[(0, "Not Started"), (1, "Started"), (2, "Finished")],
default=0,
),
),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
],
),
migrations.CreateModel(
name="UserCategory",
fields=[
(
"uuid",
models.UUIDField(
default=uuid.uuid4, primary_key=True, serialize=False
),
),
("text", models.TextField()),
(
"feeds",
models.ManyToManyField(
related_name="user_categories", to="api.feed"
),
),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="user_categories",
to=settings.AUTH_USER_MODEL,
),
),
],
),
migrations.CreateModel(
name="SubscribedFeedUserMapping",
fields=[
(
"uuid",
models.UUIDField(
default=uuid.uuid4, primary_key=True, serialize=False
),
),
("custom_feed_title", models.TextField(null=True)),
(
"feed",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.feed"
),
),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
],
),
migrations.CreateModel(
name="ReadFeedEntryUserMapping",
fields=[
(
"uuid",
models.UUIDField(
default=uuid.uuid4, primary_key=True, serialize=False
),
),
("read_at", models.DateTimeField(default=django.utils.timezone.now)),
(
"feed_entry",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="api.feedentry"
),
),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
],
),
migrations.CreateModel(
name="FeedSubscriptionProgressEntryDescriptor",
fields=[
(
"uuid",
models.UUIDField(
default=uuid.uuid4, primary_key=True, serialize=False
),
),
("feed_url", models.TextField()),
("custom_feed_title", models.TextField(null=True)),
("user_category_text", models.TextField(null=True)),
("is_finished", models.BooleanField(default=False)),
(
"feed_subscription_progress_entry",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="api.feedsubscriptionprogressentry",
),
),
],
),
migrations.AddIndex(
model_name="feed",
index=models.Index(
fields=["update_backoff_until"], name="api_feed_update__033cc4_idx"
),
),
migrations.AddField(
model_name="user",
name="favorite_feed_entries",
field=models.ManyToManyField(
related_name="favorite_user_set", to="api.feedentry"
),
),
migrations.AddField(
model_name="user",
name="groups",
field=models.ManyToManyField(
blank=True,
help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.",
related_name="user_set",
related_query_name="user",
to="auth.group",
verbose_name="groups",
),
),
migrations.AddField(
model_name="user",
name="read_feed_entries",
field=models.ManyToManyField(
related_name="read_user_set",
through="api.ReadFeedEntryUserMapping",
to="api.feedentry",
),
),
migrations.AddField(
model_name="user",
name="subscribed_feeds",
field=models.ManyToManyField(
related_name="subscribed_user_set",
through="api.SubscribedFeedUserMapping",
to="api.feed",
),
),
migrations.AddField(
model_name="user",
name="user_permissions",
field=models.ManyToManyField(
blank=True,
help_text="Specific permissions for this user.",
related_name="user_set",
related_query_name="user",
to="auth.permission",
verbose_name="user permissions",
),
),
migrations.AlterUniqueTogether(
name="usercategory",
unique_together={("user", "text")},
),
migrations.AlterUniqueTogether(
name="subscribedfeedusermapping",
unique_together={("user", "feed"), ("user", "custom_feed_title")},
),
migrations.AlterUniqueTogether(
name="readfeedentryusermapping",
unique_together={("feed_entry", "user")},
),
migrations.AddIndex(
model_name="feedentry",
index=models.Index(fields=["id"], name="api_feedent_id_6f9c14_idx"),
),
migrations.AddIndex(
model_name="feedentry",
index=models.Index(fields=["url"], name="api_feedent_url_a60cf6_idx"),
),
migrations.AddConstraint(
model_name="feedentry",
constraint=models.UniqueConstraint(
condition=models.Q(("updated_at__isnull", True)),
fields=("feed", "url"),
name="unique__feed__url__when__updated_at__null",
),
),
migrations.AddConstraint(
model_name="feedentry",
constraint=models.UniqueConstraint(
fields=("feed", "url", "updated_at"),
name="unique__feed__url__when__updated_at__not_null",
),
),
]
|
from cast import *
eye_point = Point(0.0,0.0,-14.0)
sphere_list = [Sphere(Point(1.0, 1.0, 0.0), 2.0, Color(0.0, 0.0, 1.0), Finish(Color(0.2, 0.2, 0.2), 0.4, 0.5, 0.05)),
Sphere(Point(0.5, 1.5, -3.0), 0.5, Color(1.0, 0.0, 0.0), Finish(Color(0.4, 0.4, 0.4), 0.4, 0.5, 0.05))]
light = Light(Point(-100.0, 100.0, -100.0), Color(1.5, 1.5, 1.5))
print 'P3'
print '1024 768'
print '255'
outfile = open('image.ppm', 'w')
outfile.write(cast_all_rays(-10.0, 10.0, -7.5, 7.5, 1024, 768, eye_point, sphere_list, Color(1.0, 1.0, 1.0), light)/n)
|
import mama
##
# Explore Mama docs at https://github.com/RedFox20/Mama
#
class libgtest(mama.BuildTarget):
workspace = 'build'
def dependencies(self):
self.nothing_to_build()
def configure(self):
pass
def package(self):
self.export_libs('lib', ['.a'], src_dir=True)
self.export_include('include')
|
import math, random
text = "Programming in Python is awesome!"
while len(text)%8 != 0:
text = text + " "
num_cubes = len(text)//8
cubes = [[text[j+i*8] for j in range(8)] for i in range(num_cubes)]
print("Number of cubes: {}".format(num_cubes))
for i in range(num_cubes):
print("Cube n. {}: {}".format(i+1, cubes[i]))
def left(cube):
shifted_cube = []
seq = [3,2,6,7,0,1,5,4]
for i in range(8):
shifted_cube.append(cube[seq[i]])
return shifted_cube
def right(cube):
shifted_cube = []
seq = [4,5,1,0,7,6,2,3]
for i in range(8):
shifted_cube.append(cube[seq[i]])
return shifted_cube
def up(cube):
shifted_cube = []
seq = [4,0,3,7,5,1,2,6]
for i in range(8):
shifted_cube.append(cube[seq[i]])
return shifted_cube
def down(cube):
shifted_cube = []
seq = [1,5,6,2,0,4,7,3]
for i in range(8):
shifted_cube.append(cube[seq[i]])
return shifted_cube
#5-10 random moves for each cube
possible_rotations = ["L","R","U","D"]
all_cubes_moves = [[random.choice(possible_rotations) for j in range(random.randint(5,10))] for i in range(num_cubes)]
for i in range(num_cubes):
print("Moves for cube n. {}: {}".format(i+1, all_cubes_moves[i]))
encrypt_dic = {"L":left, "R":right, "U":up, "D":down}
decrypt_dic = {"L":right, "R":left, "U":down, "D":up}
def encrypt(cubes, all_cubes_moves):
for i in range(len(all_cubes_moves)):
for move in all_cubes_moves[i]:
cubes[i] = encrypt_dic[move](cubes[i])
return cubes
def decrypt(cubes, all_cubes_moves):
for i in range(len(all_cubes_moves)):
for j in range(len(all_cubes_moves[i])):
cubes[i] = decrypt_dic[all_cubes_moves[i][-j-1]](cubes[i])
return cubes
def read(cubes):
text = ""
for cube in cubes:
text = text + "".join(cube)
#or: text = text + "".join("".join(cube).split())
return text
print("Original text: {}".format(read(cubes)))
cubes = encrypt(cubes,all_cubes_moves)
print("Encrypted text: {}".format(read(cubes)))
cubes = decrypt(cubes,all_cubes_moves)
print("Decrypted text: {}".format(read(cubes)))
|
from setuptools import setup, find_packages
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='ez-utils',
version='1.1.0',
packages=find_packages(),
url='https://www.github.com/nielsvaes/ez_utils',
long_description=long_description,
long_description_content_type="text/markdown",
license='MIT',
author='Niels Vaes',
author_email='nielsvaes@gmail.com',
description='Some super simple one-liners to do common Python tasks'
)
|
#!/usr/bin/python3
#The floor of x: the largest integer not greater than x
# import math
# math.fabs( x )
import math # This will import math module
a = 2.1
b = math.floor(a)
print(b)
print ("math.floor(-45.17) : ", math.floor(-45.17))
print ("math.floor(100.12) : ", math.floor(100.12))
print ("math.floor(100.72) : ", math.floor(100.72))
print ("math.floor(math.pi) : ", math.floor(math.pi)) |
import json
from preprocess import Preprocess
import numpy as np
from sklearn.feature_extraction.text import TfidfVectorizer
se = set()
with open("data/input.txt", "r", encoding="utf8") as f:
for line in f:
x = json.loads(line)
se.add(x["A"])
se.add(x["B"])
se.add(x["C"])
contents = list(se)
preprocess = Preprocess("data/stopwords.txt")
sentences = preprocess.preprocess_text(contents)
# 使用训练集构建tfidf模型
tfidf_model = TfidfVectorizer(token_pattern=r"(?u)\b\w+\b").fit(sentences)
sparse_data = tfidf_model.transform(sentences)
# print(sparse_data)
f = open("data/input.txt", "r", encoding="utf8")
ouf = open("data/output_tfidf.txt", "w", encoding="utf8")
for line in f:
x = json.loads(line)
y = preprocess.preprocess_text([x["A"], x["B"], x["C"]])
y = tfidf_model.transform(y)
y = y.todense()
v1 = np.sum(np.dot(y[0], np.transpose(y[1])))
v2 = np.sum(np.dot(y[0], np.transpose(y[2])))
if v1 > v2:
print("B", file=ouf)
else:
print("C", file=ouf)
f.close()
ouf.close() |
from collections import deque
m = int(input())
b = [int(i) for i in input().split()]
b.sort(reverse=True)
ans = deque()
ans.append(b[0])
for i in b[1:]:
if i < ans[-1]:
ans.append(i)
elif i < ans[0]:
ans.appendleft(i)
print(len(ans))
print(' '.join([str(i) for i in ans]))
|
from app import db
class User(db.Model):
id = db.Column(db.Integer, primary_key=True, autoincremnet=True)
login = db.Column(db.String(250), nullable=False)
def __init__(self, login):
self.login = login
|
"""
Turner, Mann, Clandinin: Compute connectivity matrices from Neuprint hemibrain data.
https://github.com/mhturner/SC-FC
mhturner@stanford.edu
"""
from neuprint import (Client)
import os
from scfc import anatomical_connectivity, bridge
import datetime
import time
import numpy as np
t0 = time.time()
data_dir = bridge.getUserConfiguration()['data_dir']
analysis_dir = bridge.getUserConfiguration()['analysis_dir']
token = bridge.getUserConfiguration()['token']
# start client
neuprint_client = Client('neuprint.janelia.org', dataset='hemibrain:v1.2', token=token)
# get rois of interest
mapping = bridge.getRoiMapping()
WeakConnections, MediumConnections, StrongConnections, Connectivity, WeightedSynapseNumber, TBars, body_ids = anatomical_connectivity.computeConnectivityMatrix(neuprint_client, mapping)
print('Finished computing connectivity matrix (total time = {:.1f} sec)'.format(time.time()-t0))
# %%
d = datetime.datetime.today()
datestring ='{:02d}'.format(d.year)+'{:02d}'.format(d.month)+'{:02d}'.format(d.day)
WeakConnections.to_pickle(os.path.join(data_dir, 'WeakConnections_computed_{}.pkl'.format(datestring)))
MediumConnections.to_pickle(os.path.join(data_dir, 'MediumConnections_computed_{}.pkl'.format(datestring)))
StrongConnections.to_pickle(os.path.join(data_dir, 'StrongConnections_computed_{}.pkl'.format(datestring)))
Connectivity.to_pickle(os.path.join(data_dir, 'Connectivity_computed_{}.pkl'.format(datestring)))
WeightedSynapseNumber.to_pickle(os.path.join(data_dir, 'WeightedSynapseNumber_computed_{}.pkl'.format(datestring)))
TBars.to_pickle(os.path.join(data_dir, 'TBars_computed_{}.pkl'.format(datestring)))
np.save(os.path.join(data_dir, 'body_ids_{}.npy'.format(datestring)), body_ids)
|
# -*- coding: utf-8 -*-
import argparse
from core.dispatch_center import Dispatcher
from settings.config import DEFAULT_USER_ID
def parse_args():
parser = argparse.ArgumentParser(description='WeiBo Terminator. Jin Fagang')
help_ = 'set user id. or if_file contains multi ids.'
parser.add_argument('-i', '--id', default=DEFAULT_USER_ID, help=help_)
help_ = 'set weibo filter flag. if filter is 0, then weibo are all original,' \
' if 1, weibo contains repost one. default is 0.'
parser.add_argument('-f', '--filter', default='1', help=help_)
help_ = 'debug mode for develop. set 1 on, set 0 off.'
parser.add_argument('-d', '--debug', default='1', help=help_)
args_ = parser.parse_args()
return args_
if __name__ == '__main__':
args = parse_args()
if args.debug == '1':
uid = args.id
if not '/' or '\\' in uid:
print('debug mode not support id file.')
else:
if not args.filter:
filter_flag = args.filter
else:
filter_flag = 0
print('[debug mode] crawling weibo from id {}'.format(uid))
dispatcher = Dispatcher(id_file_path=None, mode='single', uid=uid, filter_flag=filter_flag)
dispatcher.execute()
elif args.debug == '0':
uid = args.id
if not '/' or '\\' in uid:
if not args.filter:
filter_flag = args.filter
else:
filter_flag = 0
print('crawling weibo from id {}'.format(uid))
dispatcher = Dispatcher(id_file_path=None, mode='single', uid=uid, filter_flag=filter_flag)
dispatcher.execute()
else:
if not args.filter:
filter_flag = args.filter
else:
filter_flag = 0
print('crawling weibo from multi id file {}'.format(uid))
dispatcher = Dispatcher(id_file_path=uid, mode='multi', uid=None, filter_flag=filter_flag)
dispatcher.execute()
else:
print('debug mode error, set 1 on, set 0 off.')
|
# to use CPU
import os
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" # see issue #152
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
import random
import tensorflow as tf
config = tf.ConfigProto(intra_op_parallelism_threads=5,
inter_op_parallelism_threads=5,
allow_soft_placement=True,
device_count = {'CPU' : 1,
'GPU' : 0}
)
from model import get_model_emotions
from utils import clean_text, tokenize_words
from config import embedding_size, sequence_length
from preprocess import categories
from keras.preprocessing.sequence import pad_sequences
import pickle
import speech_recognition as sr
from gtts import gTTS
import numpy as np
#from mpyg321.mpyg321 import MPyg321Player()
Questionlist=[]
mytext1 = 'Good evening Tim! How was your day!'
mytext2 = 'Hi Tim! What did you do today?'
mytext3 = 'Hi Tim,Could you describe your day?'
mytext4 = 'Hi Tim,How are you really feeling today?'
mytext5 = 'Hello Tim, Did you enjoy listening to your music recommendation?'
mytext6 = 'Good evening Tim! Did you enjoy your boxing session?'
mytext7= 'Hi Tim,Did you read anything interesting today?'
Textlist=[mytext1,mytext2,mytext3,mytext4,mytext5,mytext6,mytext7]
Textlist=np.array(Textlist)
language = 'en'
myobj1 = gTTS(text=mytext1, lang=language, slow=False)
myobj1.save("question1.mp3")
Questionlist.append("question1.mp3")
myobj2 = gTTS(text=mytext2, lang=language, slow=False)
myobj2.save("question2.mp3")
Questionlist.append('question2.mp3')
myobj3 = gTTS(text=mytext3, lang=language, slow=False)
myobj3.save("question3.mp3")
Questionlist.append('question3.mp3')
myobj4 = gTTS(text=mytext4, lang=language, slow=False)
myobj4.save("question4.mp3")
Questionlist.append('question4.mp3')
myobj5 = gTTS(text=mytext5, lang=language, slow=False)
myobj5.save("question5.mp3")
Questionlist.append('question5.mp3')
myobj6 = gTTS(text=mytext6, lang=language, slow=False)
myobj6.save("question6.mp3")
Questionlist.append('question6.mp3')
myobj7 = gTTS(text=mytext6, lang=language, slow=False)
myobj7.save("question7.mp3")
Questionlist.append('question7.mp3')
#player = MPyg321Player()
# Record Audio
r = sr.Recognizer()
with sr.Microphone() as source:
print("Say something!")
index = random.randint(0,len(Questionlist)-1)
c=Questionlist[index]
os.system("mpg321 "+ c)
#player.play_song("question.mp3")
audio = r.listen(source)
print("Loading vocab2int")
vocab2int = pickle.load(open("Mood:Emotion Code/data/vocab2int.pickle", "rb"))
model = get_model_emotions(len(vocab2int), sequence_length=sequence_length, embedding_size=embedding_size)
model.load_weights("results/model_v1_0.59_0.76.h5")
if __name__ == "__main__":
import argparse
# parser = argparse.ArgumentParser(description="Emotion classifier using text")
# parser.add_argument("text", type=str, help="The text you want to analyze")
# args = parser.parse_args()
text = tokenize_words(clean_text(r.recognize_google(audio)), vocab2int)
x = pad_sequences([text], maxlen=sequence_length)
prediction = model.predict_classes(x)[0]
probs = model.predict(x)[0]
# print("hi:",index)
print("Question asked: ",Textlist[index])
print("You said: " + r.recognize_google(audio))
print("Probs:")
for i, category in categories.items():
print(f"{category.capitalize()}: {probs[i]*100:.2f}%")
print("The most dominant emotion:", categories[prediction]) |
import os
import sys
import ctypes
import arrow
import logging
import numpy as np
from PIL import ImageFont, ImageDraw, Image
from tinydb import TinyDB, Query, where
from datetime import datetime
ABSPATH = os.path.abspath(__file__)
CWD = os.path.dirname(ABSPATH)
os.chdir(CWD)
now = datetime.now()
dt_string = now.strftime("%d/%m/%Y %H:%M:%S")
arrow_now = arrow.now().for_json()
wallpaper_path = r'C:\Users\vivek.ravindran\Desktop\SideProject\red.jpg'
text_color = (255, 255, 0)
background_color = (13, 2, 8)
width = (1360, 768)
font_size = 15
todos = []
font_color = (255, 255, 0)
box_color = (87, 232, 107)
db = TinyDB('db.json')
table = db.table('todos')
followup = db.table('followups')
timer = db.table('timer')
font = 'fonts/whitrabt.ttf'
Timer_font = 'fonts/Minalis Double Demo.otf'
Timer_font_size = 55
img = Image.new('RGB', width, color=background_color )
fnt = ImageFont.truetype(font, font_size)
timerfont = ImageFont.truetype(Timer_font, Timer_font_size)
d = ImageDraw.Draw(img)
w, h = 1360, 768 #Get system resolutions
shape = [(0, h), (w - 1170, h - 180)]
d.rectangle(shape, fill=background_color, outline=font_color)
arg = sys.argv
filename = arg[0]
def differhuman(starttime, endtime):
# print("Fucnction differhuman")
# print("***************************************************")
# print(starttime)
# print(endtime)
diff = arrow.get(starttime) - arrow.get(endtime)
days = diff.days # Get Day
hours,remainder = divmod(diff.seconds,3600) # Get Hour
minutes,seconds = divmod(remainder,60) # Get Minute & Second
past = arrow.utcnow().shift(days=-days, hours=-hours, minutes=-minutes, seconds=-seconds )
humantime = past.humanize(only_distance=True)
# print("***************************************************")
return humantime
def redraw():
print("Redraw Function called")
Todo = Query()
todolist = table.search(Todo.status == 'pending')
Followup = Query()
followuplist = followup.search(Followup.status == 'pending')
off = 20
offset = 0
d.text((0, 0), "Must Finish Today! :", font=fnt, fill=font_color)
present = arrow.get(arrow_now)
future = present.shift(hours=9)
# time_left_string = "Finish work " + test
last_updated = "Last Updated :- " + dt_string
daily_tasks_tile = "Daily Task"
todays_followups = "Todays Follow ups"
bar = "--------------"
daily_task_list = ["Test Task 1", "Test Task 2", "Test Task 3", "Test Task 4"]
d.text((1000, h-50), last_updated, font=fnt, fill=font_color)
d.text((1000, h-300), todays_followups, font=fnt, fill=font_color)
d.text((50, h - 160), daily_tasks_tile, font=fnt, fill=font_color)
d.text((30, h - 140), bar, font=fnt, fill=font_color)
d.text((30, h - 180), bar, font=fnt, fill=font_color)
daily_task_display_offset = h - 150
daily_off = 20
followup_list_display_offset = h-300
followup_offset = 20
offset = 0
timer_data = timer.all()
print(timer_data)
for startworktime in timer_data:
print(startworktime['timestamp'])
test_arrow = arrow.get(startworktime['timestamp'])
print(test_arrow)
workend = test_arrow.shift(hours=+12)
time_left_string = "Focus!! Only " + workend.humanize(only_distance=True, granularity=["hour", "minute"]) + " Left!"
time_left_string = workend.humanize(only_distance=True, granularity=["hour", "minute"])
d.text((600, 10), time_left_string, font=timerfont, fill=font_color)
for followupitem in followuplist:
# print(followupitem)
# print(followupitem['todo'])
item = str(followupitem['todo'])
f_timestamp = followupitem['timestamp']
humantime = differhuman(arrow_now, f_timestamp)
followup_list_display_offset += followup_offset
display_followup = item + " - " + humantime
d.text((1000, followup_list_display_offset), display_followup, font=fnt, fill=font_color)
for daily_task in daily_task_list:
# print(daily_task)
daily_task_display_offset += daily_off
d.text((10, daily_task_display_offset), daily_task, font=fnt, fill=font_color)
for item in todolist:
summary = item['todo']
i = item['int']
# status = item['status']
times = item['timestamp']
savedAt = arrow.get(times)
humantime = differhuman(savedAt , arrow_now)
todo = str(i) + " " + humantime + " " + summary
offset += off + 1
print(todo)
d.text((10, offset), todo, font=fnt, fill=text_color)
img.save(wallpaper_path)
ctypes.windll.user32.SystemParametersInfoW(20, 0, wallpaper_path, 3)
def addEntry(newentry):
data = table.all()
index = len(data)
table.insert({'int' : index, 'timestamp' : arrow_now, 'todo': newentry, 'status': 'pending'})
def delEntry(itr):
print("LOG")
del_index = int(itr[0])
dTodo = Query()
table.update({'status': 'Done'}, dTodo.int == del_index)
def input_todo(arg):
print(arg)
pass
if len(arg) < 1:
clean = arg
elif len(arg) > 0:
arg.remove(arg[0])
arg.remove(arg[0])
clean = arg
else :
print("do nothing")
return clean
def addFollowup(item):
print(item)
followup_data = followup.all()
index = len(followup_data)
followup.insert({'int' : index, 'timestamp' : arrow_now, 'todo': item, 'status': 'pending'})
def reset():
print("Reset Function called")
table.purge()
followup.purge()
timer.purge()
def save():
print("save function called")
stopwork()
filename = now.strftime("%b-%d-%Y")
filename_ext = "data/" +filename + ".txt"
f_save = open(filename_ext,"w+")
todolist = table.all()
timer_data = timer.all()
followup_data = followup.all()
print(todolist)
f_save.write("TIMER" + '\n')
f_save.write("-------------------------------------------------------------" + '\n')
for times in timer_data:
f_save.write( times['timestamp'] +" - " + times['action'] + '\n')
f_save.write("-------------------------------------------------------------" + '\n')
for item in todolist:
f_save.write( item['status'] +" - " + item['todo'] + '\n')
f_save.write("-------------------------------------------------------------" + '\n')
for followup_item in followup_data:
f_save.write( followup_item['status'] +" - " + followup_item['todo'] + '\n')
f_save.close()
reset()
redraw()
def startwork():
timer.purge()
timer_data = timer.all()
index = len(timer_data)
print("startwork called")
timer.insert({'int' : index, 'timestamp' : arrow_now, 'action' : 'Started Work'})
# print(startworktime['timestamp'])
def stopwork():
timer_data = timer.all()
index = len(timer_data)
timer.insert({'int' : index, 'timestamp' : arrow_now, 'action' : 'Stop Work'})
if len(arg) > 1:
action = arg[1]
print(action)
print("************************************************")
if action == "a" :
print("Add Todos", action)
todo_item = input_todo(arg)
newentry = ' '.join(word for word in todo_item)
addEntry(newentry)
redraw()
elif action == "d":
print("Delete Todos", action)
# print(arg)
todo_item = input_todo(arg)
delEntry(todo_item)
redraw()
elif action == "save":
print("Save action", action)
# print(arg)
# todo_item = input_todo(arg)
save()
# redraw()
elif action == "f":
print("Create Follow up", action)
# print(arg)
todo_item = input_todo(arg)
newfollowup = ' '.join(word for word in todo_item)
addFollowup(newfollowup)
redraw() ,
elif action == "r":
print("Reset Todos", action)
redraw()
elif action == "startwork":
print("Start Work", action)
startwork()
elif action == "stopwork":
print("Stop Work", action)
stopwork()
print("************************************************") |
from django.urls import path
from . import views
urlpatterns = [
path('', views.chat, name = 'chat'),
path('messages/<int:pk>', views.messagechat, name = 'messagechat')
] |
#!/usr/bin/env python
from datetime import date
import pandas as pd
import numpy as np
import pyflux as pf
import click
import json
import uuid
import os
from SPARQLWrapper import SPARQLWrapper, JSON
GRAPHDB_CLIENT = SPARQLWrapper("http://ec2-3-120-140-142.eu-central-1.compute.amazonaws.com:7200//repositories/crys/statements")
FUSEKI_CLIENT = SPARQLWrapper("http://ec2-3-120-140-142.eu-central-1.compute.amazonaws.com:7201/crys/update")
INSERT_PREDICTION_TEMPLATE = """
PREFIX crys: <http://example.com/crys#>
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
INSERT DATA {{
crys:{prediction_id} rdf:type crys:Prediction ;
crys:for crys:{symbol} ;
crys:predicted_date "{predicted_date}"^^xsd:date ;
crys:predicted_price "{price}"^^xsd:float ;
crys:generated_at "{generated_at}"^^xsd:date .
}}
"""
@click.command()
@click.option('--symbol', '-s', help='Symbol of cryptocurrency to retrieve '
'price history for', multiple=True)
@click.option('--from-coinmarketcap', help='Use coinmarketcap json to get '
'symbols to retrieve price info for',
type=click.File(), required=False)
@click.option('--historical-price-directory', help='Path to directory containing'
' CSVs with historic data',
type=click.Path(file_okay=False), required=True)
@click.option('--publish/--no-publish', is_flag=True, default=False, help='If True, it will '
'insert the predictions '
'in the GraphDB database')
def cli(symbol, from_coinmarketcap, historical_price_directory, publish):
# full coinmarketcap json passed, ignore -s passed and parse symbols
if from_coinmarketcap:
coinmarketcap = json.load(from_coinmarketcap)
symbol = [coin['symbol'] for coin in sorted(coinmarketcap.values(),
key=lambda x: x['cmc_rank'])]
for indx, s in enumerate(symbol):
print('Predicting values for {} ({}/{})'.format(s, indx + 1, len(symbol)))
price_file_path = os.path.join(historical_price_directory,
s + '_daily.csv')
data = pd.read_csv(price_file_path)
data.index = data['timestamp'].values
data['timestamp'] = pd.to_datetime(data['timestamp'], unit='s')
training = data.iloc[:, [0, 1]]
# uncomment if you need to check performance
# training = data.iloc[:-7, [0, 1]]
# actual = data.iloc[-7:, [0, 1]]
model = pf.ARIMA(data=training, ar=4, ma=4, target='high',
family=pf.Normal())
x = model.fit("MLE")
predicted = model.predict(h=7)
predicted['timestamp'] = pd.to_datetime(predicted.index.values,
unit='s')
predicted = predicted[['timestamp', 'high']]
# uncomment if you need to check performance
# mape = np.mean(np.abs(predicted['high'] - actual['high']) / np.abs(
# actual['high'])) * 100 # MAPE
# print(s, mape)
for index, series in predicted.iterrows():
id = "predictions-" + str(uuid.uuid4())
predicted_date = str(series['timestamp']).split()[0]
price = float("{0:.2f}".format(series['high']))
generated_at = date.today()
query = INSERT_PREDICTION_TEMPLATE.format(prediction_id=id, symbol=s,
predicted_date=predicted_date,
price=price,
generated_at=generated_at)
if publish:
# insert in grapdb
GRAPHDB_CLIENT.setQuery(query)
GRAPHDB_CLIENT.queryType = "INSERT"
GRAPHDB_CLIENT.method = "POST"
GRAPHDB_CLIENT.setReturnFormat(JSON)
results = GRAPHDB_CLIENT.query().convert()
# insert in fuseki
FUSEKI_CLIENT.setQuery(query)
FUSEKI_CLIENT.queryType = "INSERT"
FUSEKI_CLIENT.method = "POST"
FUSEKI_CLIENT.setReturnFormat(JSON)
results = FUSEKI_CLIENT.query().convert()
else:
print(query)
if __name__ == '__main__':
cli()
|
def isfib(num):
if num==0 or num==1:
return True
a=0
b=1
while True:
c=a+b
if c==num:
return True
if c>num:
print(b,num,c)
if num-b <= c-num:
print(b)
if num-b >= c-num:
print(c)
return False
a=b
b=c
num=int(input())
print(isfib(num))
|
# Punto 8. Una empresa quiere hacer una compra de varias piezas de la misma
# clase a una fábrica de refacciones. La empresa, dependiendo del
# monto total de la compra, decidirá que hacer para pagar al fabricante.
# Si el monto total de la compra excede de $500.000 la empresa tendrá
# la capacidad de invertir de su propio dinero un 55% del monto de la
# compra, pedir prestado al banco un 30% y el resto lo pagará
# solicitando un crédito al fabricante. Si el monto total de la compra no
# excede de $500.00 la empresa tendrá capacidad de invertir de su
# propio dinero un 70% y el restante 30% lo pagará solicitando crédito
# al fabricante. El fabricante cobra por concepto de interes un 20%
# sobre la cantidad que se le pague a crédito. Obtener la cantidad a
# inverir, valor del préstamo, valor del crédito y los intereses.
monto = float( input('Monto total: ') )
if( monto > 500000 ):
inver_emp = float( monto * 0.55 )
pres_banco = float( monto * 0.3 )
cred_fabric = float( monto * 0.15 )
interes_fabri = float( cred_fabric * 0.2 )
print(F'Inversion: { inver_emp }, prestamo: { pres_banco }, credito a fabricante: { cred_fabric } e interes { interes_fabri }')
else:
inver_emp = float( monto * 0.7 )
cred_fabric = float( monto * 0.3 )
interes_fabri = float( cred_fabric * 0.2 )
print(F'Inversion: { inver_emp }, credito a fabricante: { cred_fabric } e interes { interes_fabri }') |
import requests
import mmh3
import sys
import base64
url = sys.argv[1]
if not url.endswith("favicon.ico"):
if url.endswith("/"):
url = url + "favicon.ico"
else:
url = url + "/favicon.ico"
req = requests.get(url=url,timeout=10,verify=False)
print("http.favicon.hash:%s"%(mmh3.hash(base64.encodebytes(req.content).decode())))
print("icon_hash=\"%s\""%(mmh3.hash(base64.encodebytes(req.content).decode())))
|
# coding: utf-8
import re
import time
from slackclient import SlackClient
from importlib import import_module
from message import *
from settings import *
class OmokBot(object):
def __init__(self):
self.client = SlackClient(TOKEN)
self.game = {}
self.commands = {}
def load(self):
for i in range(0, len(COMMANDS) / 2):
name = COMMANDS[i * 2]
func = COMMANDS[i * 2 + 1]
self.commands[name] = import_module('commands.' + func)
def dispatch(self, events):
messages = []
for e in events:
user = e.get('user', '')
channel = e.get('channel', '')
text = e.get('text', '')
command = text.split(' ')[0]
content = text.replace(command + ' ', '', 1)
if user and channel and command:
msg = Message()
msg.set(user, channel, command, content)
messages.append(msg)
if user in self.game and re.match("[A-O][1-9][0-9]?", text):
msg = Message()
msg.set(user, channel, u'놓기', text)
messages.append(msg)
return messages
def handle(self, messages):
for msg in messages:
if msg.command in self.commands:
self.commands[msg.command].run(self, msg)
def run(self):
self.client.rtm_connect()
while True:
events = self.client.rtm_read()
if events:
messages = self.dispatch(events)
self.handle(messages)
time.sleep(1)
if '__main__' == __name__:
bot = OmokBot()
bot.load()
bot.run()
|
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
"""
Cortex XSOAR CrowdSec Integration
"""
from CommonServerUserPython import * # noqa
import urllib3
from typing import Dict, Any
# Disable insecure warnings
urllib3.disable_warnings()
""" CONSTANTS """
DATE_FORMAT = "%Y-%m-%dT%H:%M:%SZ" # ISO8601 format with UTC, default in XSOAR
TABLE_HEADERS = [
"IP",
"IP Range",
"AS Name",
"AS Num",
"Country",
"Reverse DNS",
"Behaviors",
"First Seen",
"Last Seen",
"Activity in days",
"Attacks Details",
"Confidence",
"CrowdSec Score",
"Background Noise Score",
"CrowdSec Console Link",
"CrowdSec Taxonomy"
]
CROWDSEC_CTI_API_URL = "https://cti.api.crowdsec.net/v2/"
""" CLIENT CLASS """
class Client(BaseClient):
"""
Client class to interact with the service API
"""
def get_ip_information(self, ip: str) -> Dict:
"""
Returns a simple python dict with the enriched information
about the provided IP.
:type ip: ``str``
:param ip: ip to check against CrowdSec CTI
:return: dict as {"ip": ip, "ip_range": ip_range ...}
"""
response = self._http_request(
method="GET", url_suffix=f"/smoke/{ip}", resp_type="response", ok_codes=(200, 404)
)
if response.status_code == 429:
raise Exception(
"You have been rate limited by CrowdSec CTI API. Please upgrade to Pro or wait."
)
return response.json()
def test_module(self, ip: str):
return self._http_request(
method="GET", url_suffix=f"/smoke/{ip}", resp_type="response", ok_codes=(200, 403, 404)
)
""" HELPER FUNCTIONS """
def format_readable(ip: str, data: dict, status: int) -> str:
behaviors_readable = ""
for behavior in data.get("behaviors", list()):
behaviors_readable += behavior["label"] + "\n"
cves_readable = ""
for attack_detail in data.get("attack_details", list()):
cves_readable += attack_detail["label"] + "\n"
history = data.get("history", {})
overall_score = data.get("scores", {}).get("overall", {})
table_data = [
{
"IP": ip,
"Status": status,
"IP Range": data.get("ip_range"),
"AS Name": data.get("as_name"),
"AS Num": data.get("as_num"),
"AS Country": data.get("location", {}).get("country"),
"Reverse DNS": data.get("reverse_dns"),
"Behaviors": behaviors_readable,
"First Seen": history.get("first_seen", None),
"Last Seen": history.get("last_seen", None),
"Activity in days": history.get("days_age", None),
"Attacks Details": cves_readable,
"Confidence": f'{overall_score.get("trust", "0")}/5',
"CrowdSec Score": f'{overall_score.get("total", "0")}/5',
"Background Noise Score": f'{data.get("background_noise_score", 0)}/10',
"CrowdSec Console Link": f"https://app.crowdsec.net/cti/{ip}",
"CrowdSec Taxonomy": "https://docs.crowdsec.net/docs/next/cti_api/taxonomy"
}
]
ret = f"### IP {ip} status: {scoreToReputation(status)}\n"
ret += tableToMarkdown(
name="CrowdSec IP Enrichment",
t=table_data,
headers=TABLE_HEADERS,
removeNull=True,
)
return ret
""" COMMAND FUNCTIONS """
def test_module(client: Client) -> str:
"""Tests API connectivity and authentication'
For this we use a random IP to check if we can query and authenticate against
the CrowdSec CTI API.
:type client: ``Client``
:param Client: client to use
:return: 'ok' if test passed, anything else will fail the test.
:rtype: ``str``
"""
test_ip = "1.1.1.1"
message: str = ""
try:
response = client.test_module(test_ip)
if response.status_code in [200, 404]:
message = "ok"
elif response.status_code == 403:
message = "Authorization Error: make sure API Key is correctly set"
else:
message = "Something went wrong"
except DemistoException as e:
raise e
return message
def ip_command(
client: Client, reliability: str, args: Dict[str, Any]
) -> List[CommandResults]:
ips = argToList(args.get('ip'))
if not ips or len(ips) == 0:
raise ValueError("'ip' argument not specified")
command_results: List[CommandResults] = []
for ip in ips:
if not is_ip_valid(ip):
raise ValueError("Invalid IP '{}'".format(ip))
# Call the Client function and get the raw response
result = client.get_ip_information(ip)
if "message" in result and result["message"] == "IP address information not found":
score = Common.DBotScore.NONE
elif result["scores"]["overall"]["total"] > 3:
score = Common.DBotScore.BAD
elif result["scores"]["overall"]["total"] >= 2:
score = Common.DBotScore.SUSPICIOUS
else:
score = Common.DBotScore.GOOD
dbot_score = Common.DBotScore(
indicator=ip,
indicator_type=DBotScoreType.IP,
integration_name="CrowdSec",
score=score,
malicious_description="IP detected by CrowdSec",
reliability=reliability,
)
if score == Common.DBotScore.NONE:
ip_indicator = Common.IP(
ip=ip,
dbot_score=dbot_score,
)
else:
tags = [behavior["name"] for behavior in result.get("behaviors", list())]
tags.extend(
[
classification["name"]
for classification in result["classifications"].get(
"classifications", list()
)
]
)
ip_indicator = Common.IP(
ip=ip,
dbot_score=dbot_score,
asn=result["as_num"],
as_owner=result["as_name"],
hostname=result["reverse_dns"],
geo_country=result["location"]["country"],
geo_latitude=result["location"]["latitude"],
geo_longitude=result["location"]["longitude"],
tags=",".join(tags),
publications=[
Common.Publications(
title="CrowdSec CTI",
source="CrowdSec",
timestamp=datetime.now().strftime(DATE_FORMAT),
link=f"https://app.crowdsec.net/cti/{ip}",
),
Common.Publications(
title="CrowdSec CTI Taxonomy",
source="CrowdSec",
timestamp=datetime.now().strftime(DATE_FORMAT),
link="https://docs.crowdsec.net/docs/next/cti_api/taxonomy",
)
],
)
command_results.append(CommandResults(
outputs_prefix="CrowdSec.Info",
outputs_key_field="ip",
outputs=result,
indicator=ip_indicator,
readable_output=format_readable(ip, result, score),
))
return command_results
""" MAIN FUNCTION """
def main() -> None:
api_key = demisto.params().get("apikey")
verify_certificate = not demisto.params().get("insecure", False)
proxy = demisto.params().get("proxy", False)
demisto.debug(
f"Command being called is {demisto.command()} with args {demisto.args()} and params {demisto.params()}"
)
try:
reliability = demisto.params().get(
"integrationReliability", "B - Usually reliable"
)
if DBotScoreReliability.is_valid_type(reliability):
reliability = DBotScoreReliability.get_dbot_score_reliability_from_str(
reliability
)
else:
raise Exception(
"Please provide a valid value for the Source Reliability parameter."
)
headers: Dict = {"x-api-key": api_key}
client = Client(
base_url=CROWDSEC_CTI_API_URL,
verify=verify_certificate,
headers=headers,
proxy=proxy,
)
if demisto.command() == "test-module":
result = test_module(client)
return_results(result)
elif demisto.command() == "ip":
return_results(ip_command(client, reliability, demisto.args()))
else:
raise NotImplementedError(f'Command "{demisto.command()}" is not implemented.')
# Log exceptions and return errors
except Exception as e:
return_error(
f"Failed to execute {demisto.command()} command.\nError:\n{str(e)}"
)
""" ENTRY POINT """
if __name__ in ("__main__", "__builtin__", "builtins"):
main()
|
from MisClases import Punto
def lanzar_puntos(*, n):
pass
def aproximar_pi(*, n):
pass
def correr_experimento(*, veces, n):
pass
pi = correr_experimento(veces=1000, n=1000)
print(f"pi se aproxima a {pi:.5f}")
|
from keras.models import Model
from keras import layers
from keras.regularizers import l2
def get_post(x_in):
x = layers.LeakyReLU()(x_in)
x = layers.BatchNormalization()(x)
return x
def get_block(x_in, ch_in, ch_out, regularizer):
x = layers.Conv2D(ch_in,
kernel_size=(1, 1),
padding='same',
use_bias=False,
kernel_regularizer=regularizer)(x_in)
x = get_post(x)
x = layers.DepthwiseConv2D(kernel_size=(1, 3),
padding='same',
use_bias=False,
depthwise_regularizer=regularizer)(x)
x = get_post(x)
x = layers.MaxPool2D(pool_size=(2, 1),
strides=(2, 1))(x) # Separable pooling
x = layers.DepthwiseConv2D(kernel_size=(3, 1),
padding='same',
use_bias=False,
depthwise_regularizer=regularizer)(x)
x = get_post(x)
x = layers.Conv2D(ch_out,
kernel_size=(2, 1),
strides=(1, 2),
padding='same',
use_bias=False,
kernel_regularizer=regularizer)(x)
x = get_post(x)
return x
def Effnet(input_shape, nb_classes, include_top=True, weights=None, regularizer=l2(0.1)):
x_in = layers.Input(shape=input_shape)
x = get_block(x_in, 32, 64, regularizer)
x = get_block(x, 64, 128, regularizer)
x = get_block(x, 128, 256, regularizer)
if include_top:
x = layers.Flatten()(x)
x = layers.Dense(nb_classes, activation='softmax')(x)
model = Model(inputs=x_in, outputs=x)
if weights is not None:
model.load_weights(weights, by_name=True)
return model
|
#!/usr/bin/env python
'''**************************************************************************
File: lora_layer.py
Language: Python 3.6.8
Author: Juliette Zerick (jzerick@iu.edu)
for the WildfireDLN Project
OPeN Networks Lab at Indiana University-Bloomington
This contains the higher-level protocol and switch behaviors. It interfaces
with lora_c.cpp, which quickly filters relevant messages that are passed
to lora_layer.py.
A number of threads were used to logically separate production and consumption
of messages for ease of troubleshooting. The use of queues adds a cushion to
avoid lost messages, providing some resiliency.
Last modified: October 21, 2019
****************************************************************************'''
import argparse
import bridge
from vessel import *
#from sim import *
# Command-line option parsing solution based off an example in the docs, i.e.
# Python Documentation on argparse, available at
# <https://docs.python.org/3/library/argparse.html>
# last accessed: August 20, 2019
def handle_opts():
parser = argparse.ArgumentParser(description='Life, the universe, and everything.')
parser.add_argument('-m', '--manual', help='run lora_c manually',action="store_true")
parser.add_argument('-r', '--receiver', help='receive only',action="store_true")
parser.add_argument('-t', '--transmitter', help='transmit only',action="store_true")
parser.add_argument('-e', '--emcee', help='use the emcee',action="store_true")
parser.add_argument('-f', '--defcoords', help='set default GPS coordinates as \'(lat,long)\'')
parser.add_argument('-b', '--buoy', \
help='use fixed coordinates with artificial noise',action="store_true")
parser.add_argument('-s', '--sim', help='simulation mode',action="store_true")
args = parser.parse_args()
'''
e.g. output:
minion@runabout:~/repobin/minionfest/whisper$ python3 parse_test.py --transmitter -f '(10,10)'
Namespace(anim=False, buoy=False, defcoords='(10,10)', demo=False, receiver=False, sim=False, transmitter=True)
'''
# check for invalid combinations first
# can't be in receive-only/transmit-only/emcee-only modes simultaneously
# note that boolean addition (+) reverts to integer addition
if sum([args.receiver,args.transmitter,args.emcee]) > 1:
log.error('more than one mode was selected. ' + \
'if you are not using the default transceiver behavior, ' + \
'please select only ONE of the following modes:\n' + \
'\treceiver mode\t\t--receiver\n'
'\ttransmitter mode\t\t--transmitter\n'
'\temcee mode\t\t--emcee')
return False
# now sanity checks on input data
# check whether coordinates make sense
if args.defcoords != None:
if not are_plausible_GPS_coordinates(args.defcoords):
log.error('specified GPS coordinates are implausible.')
return False
# otherwise, extract the data here
arg_lat, arg_long = pluck_GPS_coordinates(args.defcoords)
bridge.DEFAULT_LATITUDE = arg_lat
bridge.BLOOMINGTON_LONGITUDE = arg_long
# extract the remaining data
bridge.SIM_MODE = args.sim
bridge.RECEIVE_ONLY = args.receiver
bridge.TRANSMIT_ONLY = args.transmitter
bridge.USE_EMCEE = args.emcee
bridge.USE_BUOY_EFFECT = args.buoy
bridge.USING_lora_c_HANDLER = not args.manual
bridge.update_proc_call()
return True
def begin():
signal.signal(signal.SIGINT, signal_handler)
if not preflight_checks():
log.critical('preflight checks failed, bailing!')
exit(1)
if bridge.SIM_MODE:
log.info('simulation starting')
sim = fleet()
sim.run()
else:
my_name = get_hostname()
M = vessel(my_name)
M.begin()
while not bridge.closing_time:
time.sleep(SNOOZE_TIME)
if __name__ == "__main__":
if not handle_opts():
log.critical('cannot understand command-line arguments, bailing!')
exit(1)
begin()
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jan 20 17:41:24 2018
@author: Ira
"""
import numpy as np
import cv2
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import glob
from moviepy.editor import VideoFileClip
window_width = 50
window_height = 80 # Break image into 9 vertical layers since image height is 720
margin = 100 # How much to slide left and right for searching
M = np.array([[-0.50772,-1.49582,951.33],
[-3.21965e-15,-1.98816,914.554],
[-4.98733e-18,-0.00238604,1]
])
Minv = np.array([[0.192187,-0.766859,518.5],
[1.77636e-15,-0.502977,460],
[-1.73472e-18,-0.00120012,1]
])
prv5L = []
prv5R = []
current_left_fit = []
current_right_fit = []
prv = 0
previous = 0
prv_left_fit = []
prv_right_fit = []
prv_left_fitx = []
prv_right_fitx = []
def direction_threshold(sobelx, sobely, thresh=(0, np.pi/2)):
absgraddir = np.arctan2(sobely, sobelx)
absgraddir_degree = (absgraddir / np.pi) * 180
binary_output = np.zeros_like(absgraddir)
binary_output[(absgraddir_degree >= 40) & (absgraddir_degree <= 75)] = 255
# Return the binary image
return binary_output
# Define a function that applies Sobel x and y,
# then computes the magnitude of the gradient
# and applies a threshold
def magnitude_thresh(img, sobel_kernel=3, mag_thresh=(0, 255), s_thresh=(170, 255)):
# 1) Convert to grayscale
img_gray = cv2.cvtColor(img,cv2.COLOR_RGB2GRAY)
# 2) Take the derivative in x or y given orient = 'x' or 'y'
sobelx = cv2.Sobel(img_gray, cv2.CV_64F, 1, 0,sobel_kernel)
sobely = cv2.Sobel(img_gray, cv2.CV_64F, 0, 1,sobel_kernel)
# 3) Calculate the magnitude
abs_sobelx = np.absolute(sobelx)
abs_sobely = np.absolute(sobely)
abs_sobelxy= np.sqrt(sobelx**2 + sobely**2)
# 4) Scale to 8-bit (0 - 255) and convert to type = np.uint8
scaled_sobelx = np.uint8(255*abs_sobelx/np.max(abs_sobelx))
scaled_sobely = np.uint8(255*abs_sobely/np.max(abs_sobely))
scaled_sobel = np.uint8(255*abs_sobelxy/np.max(abs_sobelxy))
# 5) Create a binary mask where mag thresholds are met
sxbinary_x = np.zeros_like(scaled_sobelx)
sxbinary_y = np.zeros_like(scaled_sobely)
sxbinary_x[(scaled_sobelx >= mag_thresh[0]) & (scaled_sobelx <= mag_thresh[1])] = 255
sxbinary_y[(scaled_sobely >= mag_thresh[0]) & (scaled_sobely <= mag_thresh[1])] = 255
sxbinary = np.zeros_like(scaled_sobel)
sxbinary[(scaled_sobel >= mag_thresh[0]) & (scaled_sobel <= mag_thresh[1])] = 255
return sxbinary, abs_sobelx, abs_sobely
def getCurvature(ploty,left_fit,right_fit,leftx,rightx):
# Define y-value where we want radius of curvature
# I'll choose the maximum y-value, corresponding to the bottom of the image
y_eval = np.max(ploty)
left_curverad = ((1 + (2*left_fit[0]*y_eval + left_fit[1])**2)**1.5) / np.absolute(2*left_fit[0])
right_curverad = ((1 + (2*right_fit[0]*y_eval + right_fit[1])**2)**1.5) / np.absolute(2*right_fit[0])
#print(left_curverad, right_curverad)
# Example values: 1926.74 1908.48
# Define conversions in x and y from pixels space to meters
ym_per_pix = 30/720 # meters per pixel in y dimension
xm_per_pix = 3.7/700 # meters per pixel in x dimension
# Fit new polynomials to x,y in world space
left_fit_cr = np.polyfit(ploty*ym_per_pix, leftx*xm_per_pix, 2)
right_fit_cr = np.polyfit(ploty*ym_per_pix, rightx*xm_per_pix, 2)
# Calculate the new radii of curvature
left_curverad = ((1 + (2*left_fit_cr[0]*y_eval*ym_per_pix + left_fit_cr[1])**2)**1.5) / \
np.absolute(2*left_fit_cr[0])
right_curverad = ((1 + (2*right_fit_cr[0]*y_eval*ym_per_pix + right_fit_cr[1])**2)**1.5) / \
np.absolute(2*right_fit_cr[0])
# Now our radius of curvature is in meters
#print(left_curverad, 'm', right_curverad, 'm')
return(left_curverad,right_curverad)
# Example values: 632.1 m 626.2 m
def region_of_interest(img, vertices):
"""
Applies an image mask.
Only keeps the region of the image defined by the polygon
formed from `vertices`. The rest of the image is set to black.
"""
#defining a blank mask to start with
mask = np.zeros_like(img)
#defining a 3 channel or 1 channel color to fill the mask with depending on the input image
if len(img.shape) > 2:
channel_count = img.shape[2] # i.e. 3 or 4 depending on your image
ignore_mask_color = (255,) * channel_count
else:
ignore_mask_color = 255
#filling pixels inside the polygon defined by "vertices" with the fill color
cv2.fillPoly(mask, vertices, ignore_mask_color)
#returning the image only where mask pixels are nonzero
masked_image = cv2.bitwise_and(img, mask)
return masked_image
def weighted_img(img, initial_img, α=0.8, β=1., λ=0.):
"""
`img` is the output of the hough_lines(), An image with lines drawn on it.
Should be a blank image (all black) with lines drawn on it.
`initial_img` should be the image before any processing.
The result image is computed as follows:
initial_img * α + img * β + λ
NOTE: initial_img and img must be the same shape!
"""
return cv2.addWeighted(initial_img, α, img, β, λ)
def calibrate_camera(Image_Path):
global counter
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001)
objpoints = []
imgpoints = []
objp = np.zeros((6*9,3), np.float32)
objp[:,:2] = np.mgrid[0:9,0:6].T.reshape(-1,2)
images = glob.glob(Image_Path) #
counter = 0
for fname in images:
img = cv2.imread(fname)
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
# Find the chess board corners
ret, corners = cv2.findChessboardCorners(gray, (9,6),None)
# If found, add object points, image points (after refining them)
if ret == True:
objpoints.append(objp)
cv2.cornerSubPix(gray,corners,(11,11),(-1,-1),criteria)
imgpoints.append(corners)
# Draw and display the corners
cv2.drawChessboardCorners(img, (9,6), corners,ret)
ret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(objpoints,imgpoints, gray.shape[::-1],None, None)
return ret, mtx, dist, rvecs, tvecs
def perspectiveTransform(img):
src_vertices = np.array([[(587, 446), (153, 673), (1126, 673), (691, 446)]],dtype=np.float32)
dst_vertices = np.array([[(200, 0), (200, 720), (1080, 720), (1080, 0)]],dtype=np.float32)
M = cv2.getPerspectiveTransform(src_vertices, dst_vertices)
Minv = cv2.getPerspectiveTransform(dst_vertices, src_vertices)
return(M,Minv)
def hls_mask(img):
white_lwr = np.array([0, 210, 0])
white_upr = np.array([255, 255, 255])
yellow_lwr = np.array([20, 0, 100])
yellow_upr = np.array([30, 220, 255])
# Convert the scale from RGB to HLS
hls_img = cv2.cvtColor(img, cv2.COLOR_RGB2HLS)
# white color mask
white_mask = cv2.inRange(hls_img, white_lwr, white_upr)
# yellow color mask
yellow_mask = cv2.inRange(hls_img, yellow_lwr, yellow_upr)
return white_mask, yellow_mask
def processFrame(image):
global start
global prv_left_fit
global prv_right_fit
global prv_curvature
y_eval = 700 #np.max(ploty)
midx = 640
xm_per_pix = 3.7/660.0 # meters per pixel in x dimension
ym_per_pix = 30/720 # meters per pixel in y dimension
nwindows = 9
margin = 100
minpix = 50
#undistort the image
dst = cv2.undistort(image,mtx, dist, None, mtx)
#find the magnitude of the gradient
mag_binary, sobel_absX, sobel_absY = magnitude_thresh(dst, \
sobel_kernel=3, \
mag_thresh=(30, 150), \
s_thresh=(170, 255))
#find the direction of the gradient
dir_binary = direction_threshold(sobel_absX,sobel_absY,thresh=(0.7,1.3))
combined_MagDir = np.zeros_like(mag_binary)
combined_MagDir[((mag_binary == 255) & (dir_binary == 255))] = 255
w_color, y_color = hls_mask(dst)
combined = np.zeros_like(w_color)
combined[((w_color == 255) | (y_color == 255))] = 255
combined[(combined == 255)] = 255
# temp = np.zeros_like(w_color)
# temp[((combined == 255)|(combined_MagDir== 255))] = 255
#
# combined = temp
warped = cv2.warpPerspective(combined, M, (1280, 720),flags=cv2.INTER_LINEAR)
window_height = np.int(warped.shape[0]/nwindows)
if start:
histogram = np.sum(warped[int(warped.shape[0]/2):,:], axis=0)
midpoint = np.int(histogram.shape[0]/2)
leftx_base = np.argmax(histogram[:midpoint])
rightx_base = np.argmax(histogram[midpoint:]) + midpoint
nonzero = warped.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
leftx_current = leftx_base
rightx_current = rightx_base
# Create empty lists to receive left and right lane pixel indices
left_lane_inds = []
right_lane_inds = []
# Step through the windows one by one
for window in range(nwindows):
win_y_low = warped.shape[0] - (window+1)*window_height
win_y_high = warped.shape[0] - window*window_height
win_xleft_low = leftx_current - margin
win_xleft_high = leftx_current + margin
win_xright_low = rightx_current - margin
win_xright_high = rightx_current + margin
good_left_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) & (nonzerox >= win_xleft_low) & \
(nonzerox < win_xleft_high)).nonzero()[0]
good_right_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) & (nonzerox >= win_xright_low) & \
(nonzerox < win_xright_high)).nonzero()[0]
left_lane_inds.append(good_left_inds)
right_lane_inds.append(good_right_inds)
if len(good_left_inds) > minpix:
leftx_current = np.int(np.mean(nonzerox[good_left_inds]))
if len(good_right_inds) > minpix:
rightx_current = np.int(np.mean(nonzerox[good_right_inds]))
# Concatenate the arrays of indices
left_lane_inds = np.concatenate(left_lane_inds)
right_lane_inds = np.concatenate(right_lane_inds)
# Extract left and right line pixel positions
leftx = nonzerox[left_lane_inds]
lefty = nonzeroy[left_lane_inds]
rightx = nonzerox[right_lane_inds]
righty = nonzeroy[right_lane_inds]
# Fit a second order polynomial to each
left_fit = np.polyfit(lefty, leftx, 2)
right_fit = np.polyfit(righty, rightx, 2)
prv_right_fit = right_fit
prv_left_fit = left_fit
y1 = (2*right_fit[0]*y_eval + right_fit[1])*xm_per_pix/ym_per_pix
y2 = 2*right_fit[0]*xm_per_pix/(ym_per_pix**2)
curvature = ((1 + y1*y1)**(1.5))/np.absolute(y2)
if (curvature) < 500:
prv_curvature = 0.75*curvature + 0.25*(((1 + y1*y1)**(1.5))/np.absolute(y2))
start = 0
else:
nonzero = warped.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
left_fit = prv_left_fit
right_fit = prv_right_fit
left_lane_inds = ((nonzerox > (left_fit[0]*(nonzeroy**2) + left_fit[1]*nonzeroy + left_fit[2] - margin)) & \
(nonzerox < (left_fit[0]*(nonzeroy**2) + left_fit[1]*nonzeroy + left_fit[2] + margin)))
right_lane_inds = ((nonzerox > (right_fit[0]*(nonzeroy**2) + right_fit[1]*nonzeroy + right_fit[2] - margin)) & \
(nonzerox < (right_fit[0]*(nonzeroy**2) + right_fit[1]*nonzeroy + right_fit[2] + margin)))
# Again, extract left and right line pixel positions
leftx = nonzerox[left_lane_inds]
lefty = nonzeroy[left_lane_inds]
rightx = nonzerox[right_lane_inds]
righty = nonzeroy[right_lane_inds]
# Fit a second order polynomial to each
left_fit = np.polyfit(lefty, leftx, 2)
right_fit = np.polyfit(righty, rightx, 2)
left_error = ((prv_left_fit[0] - left_fit[0]) ** 2).mean(axis=None)
right_error = ((prv_right_fit[0] - right_fit[0]) ** 2).mean(axis=None)
if left_error < 0.01:
prv_left_fit = 0.75 * prv_left_fit + 0.25 * left_fit
if right_error < 0.01:
prv_right_fit = 0.75 * prv_right_fit + 0.25 * right_fit
y1 = (2*right_fit[0]*y_eval + right_fit[1])*xm_per_pix/ym_per_pix
y2 = 2*right_fit[0]*xm_per_pix/(ym_per_pix**2)
curvature = ((1 + y1*y1)**(1.5))/np.absolute(y2)
prv_curvature = curvature
ploty = np.linspace(0, warped.shape[0]-1, warped.shape[0] )
left_fitx = left_fit[0]*ploty**2 + left_fit[1]*ploty + left_fit[2]
right_fitx = right_fit[0]*ploty**2 + right_fit[1]*ploty + right_fit[2]
warp_zero = np.zeros_like(warped).astype(np.uint8)
color_warp = np.dstack((warp_zero, warp_zero, warp_zero))
# Recast the x and y points into usable format for cv2.fillPoly()
pts_left = np.array([np.transpose(np.vstack([left_fitx, ploty]))])
pts_right = np.array([np.flipud(np.transpose(np.vstack([right_fitx, ploty])))])
pts = np.hstack((pts_left, pts_right))
# Draw the lane onto the warped blank image
cv2.fillPoly(color_warp, np.int_([pts]), (0,255, 0))
newwarp = cv2.warpPerspective(color_warp, Minv, (image.shape[1], image.shape[0]))
# Combine the result with the original image
result = cv2.addWeighted(image, 1, newwarp, 0.3, 0)
cv2.putText(result,'Radius of Curvature: %.2fm' % curvature,(20,40), \
cv2.FONT_HERSHEY_SIMPLEX, 1,(255,255,255),2)
x_left_pix = left_fit[0]*(y_eval**2) + left_fit[1]*y_eval + left_fit[2]
x_right_pix = right_fit[0]*(y_eval**2) + right_fit[1]*y_eval + right_fit[2]
position_from_center = ((x_left_pix + x_right_pix)/2.0 - midx) * xm_per_pix
if position_from_center < 0:
text = 'left'
else:
text = 'right'
cv2.putText(result,'Distance From Center: %.2fm %s' % (np.absolute(position_from_center), text),(20,80), \
cv2.FONT_HERSHEY_SIMPLEX, 1,(255,255,255),2)
return result
#calibrate the camera
ret, mtx, dist, rvecs, tvecs = calibrate_camera('.\\camera_cal\\*.jpg')
frameCnt = 0
start = 1
prv_left_fit = [np.array([False])]
prv_right_fit = [np.array([False])]
prv_curvature = 0
#test_images = glob.glob('.\\test_Images\\*.jpg')
#for fname in test_images:
# img = mpimg.imread(fname)
# temp = fname.split('\\')
# filename = temp[2].split('.jpg')
# temp1 ='.\\test_Images\\'+ filename[0]+'_out.jpg'
# result = processFrame(img)
# lab = hls_mask(img)
# cv2.imwrite(temp1,result)
# load the video and process frame by frame
undist_output = 'output_images/project_video_undistorted.mp4'
clip2 = VideoFileClip('project_video.mp4')
yellow_clip = clip2.fl_image(processFrame, apply_to=[])
yellow_clip.write_videofile(undist_output, audio=False)
|
#!/usr/bin/env python
# coding: utf-8
# In[ ]:
import random
import time
class Brute_Force:
def random_point_generator(self,test_size):
td1=[]
td2=[]
for i in range (test_size):
td1.append([random.randint(1,100),random.randint(1,100)])
td2.append([random.randint(1,100),random.randint(1,100)])
return td1,td2
def BF(self,size):
td1,td2=self.random_point_generator(size)
for i in range (len(td1)):
for j in range(len(td2)):
if td1[i]==td2[j]:
print('collision detected, collided point is ',td1[i])
return
B=Brute_Force()
t_bf=[]
test_size=[512,1024,2048,4096,8192,16384]
for i in range (len(test_size)):
time_start=time.time()
B.BF(test_size[i])
t_bf.append(time.time()-time_start)
print(t_bf)
|
import requests
import base64
def get_token():
token_url = 'https://aip.baidubce.com/oauth/2.0/token'
data = {
'grant_type': 'client_credentials',
'client_id': 'tID6Y7Es4uPQ11X4wBO3xogG',
'client_secret': 'ISVzQFzUuHiXrsqdqjhjxvqn3LfmqUru',
}
request = requests.post(token_url, data=data)
return request.json()['access_token']
def get_text_fromsound(atoken):
#车辆信息识别
# speed_url = 'https://aip.baidubce.com/rest/2.0/image-classify/v1/car?access_token=' + atoken
#菜品识别
# speed_url = 'https://aip.baidubce.com/rest/2.0/image-classify/v2/dish?access_token=' + atoken
#logo识别
speed_url = 'https://aip.baidubce.com/rest/2.0/image-classify/v2/logo?access_token=' + atoken
with open('04.jpg', 'rb') as f:
data = f.read()
args_data = {
# 'top_num': 5
}
args_data['image'] = base64.b64encode(data).decode('utf8')
header = {'Content-Type': 'application/x-www-form-urlencoded'}
# print(args_data)
resp = requests.post(speed_url, data=args_data, headers=header)
info = resp.text
print(info)
get_text_fromsound(get_token()) |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@author: yash
"""
import matplotlib.pyplot as plt
from matplotlib.pyplot import figure
from random import choice
from matplotlib import cm
import numpy as np
import networkx as nx
from random import choice
import datetime
from tqdm import tqdm
import random
type = int(input('Enter network type: 1 for a fully connected network, 2 for a watts-strogatz network'))
n = int(input('Enter # of neurons'))
m = int(input('Enter # of memory states'))
if (type == 2):
k = int(input('Enter # of nearest neighbours k'))
p = float(input('Enter probability of rewiring p'))
n_i = int(input('Enter # of iterations'))
MemoryMatrix = np.zeros((m, n))
W = np.zeros((n, n))
for a in range(0, m):
mem = []
for i in range(0, n):
x = choice([1, -1])
mem.append(x)
MemoryMatrix[a] = mem
x = 0
print(MemoryMatrix)
flip = round(0.25 * n)
Y = []
X = []
u = []
u = MemoryMatrix[1].copy() # copy target M to initial state
rs = random.sample(range(0, n), flip)
for z in list(rs): # randomly pick up 25 percent and flip them
u[z] = (MemoryMatrix[1][z] * -1)
if (type == 1):
g = nx.complete_graph(n)
if (type == 2):
g = nx.watts_strogatz_graph(n, k, p) # use for watts strogatz network
st = 0
for st in range(0, n):
g.nodes[st]['state'] = u[st]
alpha = (m/n)
if (n == 16):
g.pos = {}
for x in range(4):
for y in range(4):
g.pos[y * 4 + x] = (x, -y)
nx.draw(g, pos = g.pos, cmap = cm.jet, vmin = -1, vmax = 1, node_color = [g.nodes[i]['state'] for i in
g.nodes])
plt.show()
print ("Initial Graph")
# train the network with m memory states
for i, j in g.edges:
weight = 0
for zeta in range(0, m):
weight = weight + (MemoryMatrix[zeta][i] * MemoryMatrix[zeta][j])
g.edges[i, j]['weight'] = (weight / n)
W[i][j] = g.edges[i, j]['weight']
W[j][i] = W[i][j]
plt.imshow(W, cmap='gray')
plt.show()
hamming_distance = np.zeros((n_i, m))
# evolve according to hopfield dynamics, n_i iterations
for z in range(0, n_i):
i = choice(list(g.nodes))
s = sum([g.edges[i, j]['weight'] * g.nodes[j]['state'] for j in g.neighbors(i)])
g.nodes[i]['state'] = 1 if s > 0 else -1 if s < 0 else g.nodes[i]['state']
for q in range(m):
for i in list(g.nodes):
hamming_distance [z, q] += (abs(g.nodes[i]['state'] - MemoryMatrix[q][i])/2)
z = z + 1
if (n == 16):
nx.draw(g, pos = g.pos, cmap = cm.jet, vmin = -1, vmax = 1, node_color = [g.nodes[i]['state'] for i in g.nodes])
plt.show()
print ("Final Graph")
fig = plt.figure(figsize = (8, 8))
plt.plot(hamming_distance)
plt.xlabel('No of Iterations')
plt.ylabel('Hamming Distance')
plt.ylim([0, n])
plt.plot(hamming_distance)
plt.show()
|
# Generated by Django 2.1.7 on 2019-12-21 06:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dashboard_app', '0015_query_reason'),
]
operations = [
migrations.RemoveField(
model_name='query',
name='reason',
),
migrations.AddField(
model_name='appointment',
name='reason',
field=models.TextField(blank=True, null=True),
),
]
|
# Ruchella Kock
# 12460796
# this program will take a given height and print a pyramid with two spaces in between
from cs50 import get_int
# prompt user for positive integer
while True:
height = get_int("height: ")
if height > 23 or height < 0:
continue
else:
break
# do this height times
for x in range(height):
# calculate the number of spaces
number_of_spaces = height - (x + 1)
for y in range(number_of_spaces):
print(" ", end="")
# calculate the number of #'s
hashtag = 1 + x
for y in range(hashtag):
print("#", end="")
# print two spaces in between
print(" ", end="")
# print the other hashes
for y in range(hashtag):
print("#", end="")
# make a space to go to the next line
print() |
# coding: utf -8
# remove letras alternadas
# raquel ambrozio
palavra = raw_input()
nova = ""
for i in range(len(palavra)):
if i % 2 == 0:
nova += palavra[i]
print nova
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# python 学习笔记 之 常用内建模块(1)
# 日期和时间标准库 (datetime)
# 快速打印
from util import p
import re
from datetime import datetime, timedelta, timezone
# 当前时间
now = datetime.now()
p(now)
p(type(now))
# 指定时间创建datetime
p1 = datetime(2017, 12, 24, 00, 59)
p(p1)
# -------- datetime 转换为timestamp
pt1 = p1.timestamp() # 将datetime转换为timestamp
p(pt1)
p(datetime.fromtimestamp(pt1)) # 将timestamp转换为datetime
p(datetime.utcfromtimestamp(pt1)) # 将timestamp转换为UTC时间
# -------- str转换为datetime
cd = datetime.strptime('2017-11-16 11:08:22', '%Y-%m-%d %H:%M:%S')
p(cd)
# -- datetime 转换为str
cs = cd.strftime('%a, %b %d %H:%M')
p(cs)
# ----- datetime加减
cdt = cd + timedelta(hours=5) # 5 hours after
p(cdt)
cdp = cd - timedelta(days=2) # 2 days before
p(cdp)
cdf = cd + timedelta(days=2, hours=5) # 2d5h after
p(cdf)
# -----本地时间转UTC时间
now_utc8 = timezone(timedelta(hours=8)) # 东八区
tz_utc8 = now.replace(tzinfo=now_utc8) # 替换时区为东八区
p(tz_utc8)
# -----时区转换
utc_dt = datetime.utcnow().replace(tzinfo=timezone.utc) # 替换时区为0时区
p(utc_dt)
pk_dt = utc_dt.astimezone(timezone(timedelta(hours=8))) # 将时区转换为北京时间
p('Peking time', pk_dt)
tky_dt = utc_dt.astimezone(timezone(timedelta(hours=9))) # 将时区转换为东京时间
p('Tokyo time', tky_dt)
k2p_dt = tky_dt.astimezone(timezone(timedelta(hours=8))) # 将东京时间转为北京时间
p2k_dt = pk_dt.astimezone(timezone(timedelta(hours=9))) # 将北京时间转为东京时间
p(p2k_dt.timestamp()) # 0时区时间秒值(任意"时刻"恒定)
idst = input('输入时间(%Y-%m-%d %H:%M:%S):') # 提示输入
try:
idt = datetime.strptime(idst, '%Y-%m-%d %H:%M:%S') # 转换时间
i0dt = idt.replace(tzinfo=timezone.utc) # 指定0时区
time_mill = i0dt.timestamp() # 取时间秒值作为基准待用
itz = input('输入时区(UTC+5:00):') # 提示输入时区
rex = re.compile(r'^UTC([+\-])(0?[0-9]|1[12]):00$') # 东西12区正则截取
mc = rex.match(itz) # 验证输入
if mc and mc.group(1) is '+': # 匹配 且 为东12区
utc_num = int(mc.group(2)) # 取时区值
total = time_mill - utc_num * 60 * 60 # 0时区基准时间相对于utc_num时区为未来时间,故减去时区差时
p('timezone = %d, timestamp = %s' % (utc_num, total))
elif mc and mc.group(1) is '-': # 匹配 且 为西12区
utc_num = int(mc.group(2)) # 取时区值
total = time_mill + utc_num * 60 * 60 # 0时区基准时间相对于utc_num时区为过去时间,故减去时区差时
p('timezone = %d, timestamp = %s' % (utc_num, total))
else: # 不匹配
p('时区输入有误')
except Exception as e:
p(e) # debug
|
from django.utils.safestring import mark_safe
from eulexistdb.manager import Manager
from eulexistdb.models import XmlModel
from eulxml.xmlmap.core import XmlObject
from eulxml.xmlmap.dc import DublinCore
from eulxml.xmlmap.fields import StringField, NodeField, StringListField, NodeListField
from eulxml.xmlmap.teimap import Tei, TeiDiv, _TeiBase, TEI_NAMESPACE, xmlmap, TeiInterpGroup, TeiInterp
class Text(XmlModel):
ROOT_NAMESPACES = {'tei' : TEI_NAMESPACE}
objects = Manager('/tei:text')
text = xmlmap.StringField('tei:text')
intro = xmlmap.StringField('tei:div[@type="introduction"]')
|
opcao = 'S';
count = 0;
soma = 0;
while(opcao == 'S'):
numero = int(input("Informe o valor: "))
if (count == 0):
menorValor = maiorValor = numero;
if(numero < menorValor):
menorValor = numero;
elif(numero > maiorValor):
maiorValor = numero;
count += 1;
soma += numero;
opcao = str(input("Deseja continuar escrevendo valore?[S/N]")).upper()
print("O maior valor foi {};\nO menor valor foi {}.\nA Media dos valores Digitados foi {}".format(maiorValor, menorValor, soma/count))
|
import os
import subprocess
import urllib.request
import re
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException,WebDriverException
from selenium.webdriver.support.ui import Select
from selenium.webdriver.chrome.options import Options
chrome_options = Options()
###
chrome_options.add_argument("--headless")
driver = webdriver.Chrome(options=chrome_options)
###
#driver = webdriver.Chrome(executable_path=r'H:\Programs\chromedriver_win32 (2)\chromedriver.exe')
#
#
def get_File(lat,lon,year,month,day,hour,height):
driver.get ("https://ready.arl.noaa.gov/hypub-bin/trajsrc.pl")
select = Select(driver.find_element_by_name('metdata'))
#Selecting GFS 192h (always the same option)
select.select_by_value('GFS')
#Now entering sources in First Page src1 , src2
src1=driver.find_element_by_name('Lat')#
src1.send_keys(lat)
src2=driver.find_element_by_name('Lon')#
src2.send_keys(lon)
#Clicking on next First Click
driver.find_element_by_xpath("//input[@value='Next>>']").click()
#Second Click
driver.find_element_by_xpath("//input[@value='Next>>']").click()
print('Before Radio Button')
#driver.find_element_by_xpath('//input[text()="Isobaric"]')
driver.find_element_by_xpath("//input[@value='1']").click()
#Now entering Year , Month , Day , hour Data .....
#Year
select = Select(driver.find_element_by_name('Start year'))
select.select_by_index(year)
#Month
select = Select(driver.find_element_by_name('Start month'))
select.select_by_index(month)
#Day
select = Select(driver.find_element_by_name('Start day'))
select.select_by_index(day)
#Hour
select = Select(driver.find_element_by_name('Start hour'))
select.select_by_index(hour)
print('Completed Year,Month,Day,Hour Info')
#Entering Level1 height Info ...
level_1=driver.find_element_by_name('Source hgt1')
level_1.clear()
level_1.send_keys(height)
#PDF File 'No'
driver.find_element_by_xpath("//input[@name='pdffile'][@value='No']").click()
print('Hurrah')
#Pressing 'Trajectory '
driver.find_element_by_xpath("//input[@type='submit'][@value='Request trajectory (only press once!)']").click()
print('Clicked on Trajectory')
print('Now start waiting for 30 seconds')
driver.implicitly_wait(35)
down_link=driver.find_element_by_xpath("//*[@id='page_center']/table/tbody/tr/td/div[4]/table[1]/tbody/tr/td/font/ul/li[2]/b/a")
print(down_link.get_attribute("href"))
down_link = down_link.get_attribute("href")
Path=re.search('\(([^)]+)', down_link).group(1)
print('Path Without splitting ..',Path)
print('Path After splitting ..',Path[1:27])
url = Path[1:27]
#Downloading File on System...
url = 'https://ready.arl.noaa.gov'+url
print('Complete URL',url)
print('Now Downloading File')
urllib.request.urlretrieve(url, 'C:/Users/Sulal/Desktop/New Challenge/YourFile.txt')#Here You need to give your address , where you want to save the file
print('Your File Downloaded ...')
print('Calling Function')
#Parameters
#Latitude , Longitude ,Year , Month , Day , Hour , (Level 1 Height)
get_File(33,9,0,1,5,2,13500) #This is the Function , Where you will put the parameters
print('Funtion Finished')
|
__version__ = (1, 1, 0)
__author__ = 'Nathaniel Wilson'
__email__ = 'unlisted'
class Stepper:
def __init__(self, scale):
self._current = 0
self._limit = scale
self._loop = False
def step(self, by=1):
if (self._current + by) <= self._limit:
self._current += by
elif self._loop:
self.reset()
def reset(self, default=0):
self._current = default
@property
def inprogess(self):
return not self.finished
@property
def finished(self):
return self._current == self._limit
@property
def value(self):
return self._current
@property
def as_per(self):
return self._current / self._limit
class Point2D:
def __init__(self, x, y):
self.components = {'x': x, 'y': y}
@property
def x(self):
return self.components['x']
@x.setter
def x(self, val):
self.components['x'] = val
@property
def y(self):
return self.components['y']
@y.setter
def y(self, val):
self.components['y'] = val
def lerp(self, otherPoint, time_division):
assert 1.0 >= time_division >= 0
tX = (1 - time_division) * self.x + otherPoint.x*time_division
tY = (1 - time_division) * self.y + otherPoint.y*time_division
return Point2D(int(tX), int(tY))
def self_lerp(self, otherPoint, time_division):
self = self.lerp(otherPoint, time_division)
@property
def as_array(self):
return [ self.components[x] for x in self.components]
@as_array.setter
def as_array(self, val):
assert len(val) == len(self.components)
for i, k in enumerate(self.components):
self.components[k] = val[i]
def findDistance(self, otherPoint):
dX = (self.x - otherPoint.x) ** 2
dY = (self.y - otherPoint.y) ** 2
return (dX + dY) ** 0.5
def __repr__(self):
return str(self.components)
class Point3D:
def __init__(self, x, y, z, w):
self.components = {'x': x, 'y': y, 'z': z}
@property
def x(self):
return self.components['x']
@x.setter
def x(self, val):
self.components['x'] = val
@property
def y(self):
return self.components['y']
@y.setter
def y(self, val):
self.components['y'] = val
@property
def z(self):
return self.components['z']
@z.setter
def z(self, val):
self.components['z'] = val
def lerp(self, otherPoint, time_division):
assert 1.0 >= time_division >= 0
tX = (1 - time_division) * self.x + otherPoint.x*time_division
tY = (1 - time_division) * self.y + otherPoint.y*time_division
tZ = (1 - time_division) * self.z + otherPoint.z*time_division
return Point3D(tX, tY, tZ)
def self_lerp(self, otherPoint, time_division):
self = self.lerp(otherPoint, time_division)
def findDistance(self, otherPoint):
dX = (self.x - otherPoint.x) ** 2
dY = (self.y - otherPoint.y) ** 2
dZ = (self.z - otherPoint.z) ** 2
return (dX + dY + dZ) ** 0.5
def __repr__(self):
return str(self.components)
AA = Point2D(0, 0)
BB = Point2D(100,0)
step = Stepper(100)
while step.inprogess:
tmp = AA.lerp(BB,step.as_per)
print(step.value, tmp)
step.step()
else:
tmp = AA.lerp(BB,step.as_per)
print(step.value, tmp)
|
import FWCore.ParameterSet.Config as cms
#!
#! PROCESS
#!
process = cms.Process("JEC")
#!
#! INPUT
#!
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(1000) )
process.source = cms.Source(
'PoolSource',
fileNames = cms.untracked.vstring('/store/relval/CMSSW_3_8_0_pre7/RelValTTbar/GEN-SIM-RECO/START38_V4-v1/0002/DC19EA07-4286-DF11-BD2B-0030487CD16E.root')
)
#!
#! SERVICES
#!
#!
#! JET CORRECTION
#!
process.load('Configuration.StandardSequences.Services_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
process.GlobalTag.globaltag = 'START38_V8::All'
#process.GlobalTag.connect = 'sqlite_file:START38_V6.db'
process.load('JetMETCorrections.Configuration.DefaultJEC_cff')
process.load('JetMETCorrections.Configuration.JetCorrectionProducersAllAlgos_cff')
process.TFileService=cms.Service("TFileService",fileName=cms.string('histos.root'))
#!
#! MAKE SOME HISTOGRAMS
#!
jetPtHistogram = cms.PSet(min = cms.untracked.double( 50),
max = cms.untracked.double( 500),
nbins = cms.untracked.int32 ( 50),
name = cms.untracked.string('JetPt'),
description = cms.untracked.string( ''),
plotquantity = cms.untracked.string( 'pt')
)
process.ak5CaloL2L3Histos = cms.EDAnalyzer(
'CandViewHistoAnalyzer',
src = cms.InputTag('ak5CaloJetsL2L3'),
histograms = cms.VPSet(jetPtHistogram)
)
process.ak7CaloL2L3Histos = process.ak5CaloL2L3Histos.clone(src = 'ak7CaloJetsL2L3')
process.kt4CaloL2L3Histos = process.ak5CaloL2L3Histos.clone(src = 'kt4CaloJetsL2L3')
#process.kt6CaloL2L3Histos = process.ak5CaloL2L3Histos.clone(src = 'kt6CaloJetsL2L3')
process.ak5PFL2L3Histos = process.ak5CaloL2L3Histos.clone(src = 'ak5PFJetsL2L3')
process.ak7PFL2L3Histos = process.ak5CaloL2L3Histos.clone(src = 'ak7PFJetsL2L3')
process.kt4PFL2L3Histos = process.ak5CaloL2L3Histos.clone(src = 'kt4PFJetsL2L3')
#process.kt6PFL2L3Histos = process.ak5CaloL2L3Histos.clone(src = 'kt6PFJetsL2L3')
process.ak5JPTL2L3Histos = process.ak5CaloL2L3Histos.clone(src = 'ak5JPTJetsL2L3')
process.ak5TrackL2L3Histos = process.ak5CaloL2L3Histos.clone(src = 'ak5TrackJetsL2L3')
#
# RUN!
#
process.run = cms.Path(
#------ create the corrected calojet collection and run the histogram module ------
process.ak5CaloJetsL2L3 * process.ak5CaloL2L3Histos * process.ak7CaloJetsL2L3 * process.ak7CaloL2L3Histos *
process.kt4CaloJetsL2L3 * process.kt4CaloL2L3Histos *
#process.kt6CaloJetsL2L3 * process.kt6CaloL2L3Histos *
#------ create the corrected pfjet collection and run the histogram module --------
process.ak5PFJetsL2L3 * process.ak5PFL2L3Histos * process.ak7PFJetsL2L3 * process.ak7PFL2L3Histos *
process.kt4PFJetsL2L3 * process.kt4PFL2L3Histos *
#process.kt6PFJetsL2L3 * process.kt6PFL2L3Histos *
#------ create the corrected jptjet collection and run the histogram module -------
process.ak5JPTJetsL2L3 * process.ak5JPTL2L3Histos *
#------ create the corrected trackjet collection and run the histogram module -----
process.ak5TrackJetsL2L3 * process.ak5TrackL2L3Histos
)
|
import random
import numpy as np
def main():
numbers = []
for _ in range(1000):
num = random.randint(1, 6)
numbers.append(num)
numbers = np.array(numbers)
average = np.mean(numbers)
center = np.median(numbers)
count = np.bincount(numbers)
mode = np.argmax(count)
std = np.std(numbers)
print("平均:" + str(average))
print("中央値:" + str(center))
print("最頻値:" + str(mode))
print("標準偏差:" + str(std))
if __name__ == '__main__':
main()
|
import argparse
import os
import random
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim as optim
import torch.utils.data
import torchvision.datasets as dset
import torchvision.transforms as transforms
import torchvision.utils as vutils
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
dataroot = "./simpsons_dataset"
batch_size = 1
dataset = dset.ImageFolder(root = dataroot,transform=transforms.Compose([
transforms.RandomResizedCrop(224),
transforms.ToTensor()
]))
dataloader = torch.utils.data.DataLoader(dataset, batch_size=batch_size ,shuffle=False)
means = np.array([0.0,0.0,0.0]).astype(float)
std = np.array([0.0,0.0,0.0]).astype(float)
for data in dataloader:
for i in range(3):
#print(data[0].size())
means[i] += data[0][:,i,:,:].mean()
std[i] += data[0][:,i,:,:].std()
print(means, std)
print("Done")
print("Mean: ", means/len(dataset))
print("Std: ",std/len(dataset))
'''
real_batch = next(iter(dataloader))
print(real_batch[0][0].size())
#plt.imshow(np.transpose(vutils.make_grid(real_batch[0]),(1,2,0)))
print(torch.transpose(real_batch[0][0],0,2).size())
plt.imshow(torch.transpose(real_batch[0][1],0,2).numpy())
plt.show()
'''
#Total 20932 |
from flask_wtf import Form
from wtforms import SubmitField,PasswordField,StringField,validators
from wtforms.fields.html5 import DateTimeLocalField
from flask_wtf.file import FileField, FileAllowed, FileRequired
from flask_uploads import UploadSet, IMAGES
images = UploadSet('images', IMAGES)
class PwdForm(Form):
username = StringField('Username', [validators.Length(min=4, max=25)])
password = PasswordField("Password", [validators.Length(min=4, max=25)])
submit = SubmitField("Login")
class PostForm(Form):
title = StringField("Title")
body = StringField("Body")
post_image = FileField('image', validators=[FileAllowed(images, 'Images only!')])
submit = SubmitField("Post")
class UserRegistration(Form):
username = StringField("Username", [validators.required(True)])
password = PasswordField("Password", [validators.required(True)])
first_name = StringField("Name", [validators.required(True)])
last_name = StringField("Last Name", [validators.required(True)])
profile_pic = FileField('image', validators=[FileAllowed(images, 'Images only!')])
email = StringField("Email", [validators.required(True)])
submit = SubmitField("register", [validators.required(True)])
class DeleteButton(Form):
id_hidden = StringField("Hidden Field")
delete = SubmitField("delete")
class RegisterWeights(Form):
time_date = DateTimeLocalField("Date And Time", [validators.required(True)], format='%Y-%m-%dT%H:%M')
video = FileField("Video Proof", )
weight = StringField("Weight", [validators.required(True)])
submit = SubmitField("submit") |
import tvm
import tensorizer
import logging
import sys
import numpy as np
from tvm import relay
from tvm import autotvm
import topi
from tvm.relay import op
#t0, t1 = eval(input())
#n, c, h, w = map(int, t0)
#oc, ic, kh, kw = map(int, t1)
n, c, h, w, oc, ic, kh, kw, sh, sw = map(int, input().split())
oh = (h - kh) // sh + 1
ow = (w - kw) // sw + 1
import time
timing = -1
def tracer(module, info, is_before):
global timing
if bool(is_before):
timing = time.time()
else:
print('Executes: ', info.name, (time.time() - timing) * 1000)
from tensorizer import tune
tune.enable = False
result = info = 1e9
for i in [None, 'fuse', 'pad'] if ow < 32 else [None]:
j = 16
while True:
diffc = diffoc = diffh = diffw = 0
#if c % 64:
# diffc = 64 - c % 64
#if oc % 32:
# diffoc = 32 - oc % 32
#can_fuse = can_pad = True
#if i == 'pad':
# can_fuse = False
#if i == 'fuse':
# can_pad = False
#if not ((oh * ow % 32 == 0 and 32 % ow == 0) or ow % 32 == 0):
# first_h = sh - (h - kh) % sh
# first_w = sw - (w - kw) % sw
# max_diff_h = 32 - oh % 32
# max_diff_w = 32 - ow % 32
# diffh = diffw = 1e9
# for i in range(max_diff_h + 1):
# for j in range(max_diff_w + 1):
# if (((oh + i) * (ow + j) % 32 == 0 and 32 % (ow + j) == 0 and can_fuse) or ((ow + j) % 32 == 0 and can_pad)) and i + j < diffh + diffw:
# def to_pad(padding, first, stride):
# if padding == 0:
# return 0
# assert padding >= 1
# return (padding - 1) * stride + first
# diffh, diffw = to_pad(i, first_h, sh), to_pad(j, first_w, sw)
# #assert (height + diffh - kh + 1) * (width + diffw - kw + 1) % 32 == 0
#var_x = relay.var('x', shape=(n, (c + diffc) // 16, (h + diffh), (w + diffw), 16), dtype='float16')
#var_w = relay.const(tvm.nd.array((np.random.randn((oc + diffoc) // 16, (c + diffc) // 16, kh, kw, 16, 16) * 128).astype('float16')))
#conv2d = relay.nn.conv2d(var_x, var_w, out_dtype='float32', kernel_size=(kh, kw), channels=oc + diffoc, strides=(sh, sw), data_layout='NCHW16c', kernel_layout='OIHW16i16o')
#if diffc or diffoc or diffh or diffw:
# y = relay.strided_slice(conv2d,
# begin=relay.const(tvm.nd.array([0, 0, 0, 0])),
# end=relay.const(tvm.nd.array([n, oc, oh, ow])))
#else:
# y = conv2d
var_x = relay.var('x', shape=(n, c, h, w), dtype='float32')
var_w = relay.const(tvm.nd.array((np.random.randn(oc, ic, kh, kw) * 128).astype('float32')))
var_b = relay.const(tvm.nd.array((np.random.randn(1, oc, 1, 1) * 128).astype('float32')))
conv2d = relay.nn.conv2d(var_x, var_w, out_dtype='float32', kernel_size=(kh, kw), channels=oc, strides=(sh, sw), out_layout='NCHW16c')
y = conv2d
func = relay.Function([var_x], y)
module = tvm.IRModule()
module['main'] = func
tune.padding = i
tune.splitk = j
passes = [(1, tensorizer.rewrite)]
with tvm.transform.PassContext(opt_level=0, trace=tracer, config={'tir.add_lower_pass': passes}):
#with tvm.transform.PassContext(opt_level=4, trace=tracer):
#graph, lib, params = tvm.relay.build(module, target='cuda -libs=cublas,cudnn')
graph, lib, params = tvm.relay.build(module, target='nvptx -libs=cublas,cudnn')
from tvm.contrib import graph_runtime as runtime
from tvm.contrib.debugger import debug_runtime as runtime
func = runtime.create(graph, lib, tvm.gpu())
x_ =(np.random.randn(n, c, h, w) * 128).astype('float32')
func.set_input('x', x_)
timer = func.module.time_evaluator('run', ctx=tvm.gpu(), number=2, repeat=10)
timed = timer()
while np.var(timed.results) > 1e-5:
timed = timer()
if timed.mean < result:
result = timed.mean
info = (i, j)
relay.backend.compile_engine.get().clear()
j <<= 1
if j > tune.total_idx:
break |
class Script:
@staticmethod
def main():
crime_rates = [749, 371, 828, 503, 1379, 425, 408, 542, 1405, 835, 1288, 647, 974, 1383, 455, 658, 675, 615, 2122, 423, 362, 587, 543, 563, 168, 992, 1185, 617, 734, 1263, 784, 352, 397, 575, 481, 598, 1750, 399, 1172, 1294, 992, 522, 1216, 815, 639, 1154, 1993, 919, 594, 1160, 636, 752, 130, 517, 423, 443, 738, 503, 413, 704, 363, 401, 597, 1776, 722, 1548, 616, 1171, 724, 990, 169, 1177, 742]
five_hundred_list = []
_g = 0
while (_g < len(crime_rates)):
cr = (crime_rates[_g] if _g >= 0 and _g < len(crime_rates) else None)
_g = (_g + 1)
if (cr > 500):
five_hundred_list.append(cr)
print(str(five_hundred_list))
class python_internal_ArrayImpl:
@staticmethod
def _get(x,idx):
if ((idx > -1) and ((idx < len(x)))):
return x[idx]
else:
return None
Script.main() |
"""
对链表进行插入排序。
插入排序的动画演示如上。从第一个元素开始,该链表可以被认为已经部分排序(用黑色表示)。
每次迭代时,从输入数据中移除一个元素(用红色表示),并原地将其插入到已排好序的链表中。
插入排序算法:
插入排序是迭代的,每次只移动一个元素,直到所有元素可以形成一个有序的输出列表。
每次迭代中,插入排序只从输入数据中移除一个待排序的元素,找到它在序列中适当的位置,并将其插入。
重复直到所有输入数据插入完为止。
示例 1:
输入: 4->2->1->3
输出: 1->2->3->4
示例 2:
输入: -1->5->3->4->0
输出: -1->0->3->4->5
"""
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
"""
超出时间限制
"""
def insertionSortList(self, head: ListNode) -> ListNode:
if head is None:
return head
dummy_node = ListNode(-1)
dummy_node.next = ListNode(head.val)
cur = head.next
while cur:
#print(cur.val)
sort_node = dummy_node
insert_node = ListNode(cur.val)
while sort_node.next:
if sort_node.next.val > cur.val:
# 会发生死循环,先修改了dummy_node的下一个指针的值,sort_node.next就会一直是修改后的值,造成死循环
# dummy_node.next = insert_node
# insert_node.next = sort_node.next
insert_node.next = sort_node.next
dummy_node.next = insert_node
break
else:
if sort_node.next.next:
nexts = sort_node.next.next
if cur.val <= nexts.val:
insert_node.next = nexts
sort_node.next.next = insert_node
break
else:
sort_node = sort_node.next
#print(sort_node.val)
else:
sort_node.next.next = insert_node
break
cur = cur.next
return dummy_node.next
class Solution:
def insertionSortList(self, head: ListNode) -> ListNode:
if head is None:
return None
dummy_node = ListNode(-1)
dummy_node.next = head
cur = head
while True:
# 如果遍历下去,是顺序排列的话,那最简单了,curNode
# 指针向前就行了
# 这一步是一个循环的过程
# 暂存当前结点的下一结点
while cur.next != None and cur.val <= cur.next.val:
cur = cur.next
if cur.next == None:
break
else:
#否则就一定满足 curNode.val > curNode.next.val; 为真
# pre 打回到起点
pre = dummy_node
next = cur.next
while pre.next.val < next.val:
pre = pre.next
#穿针引线
cur.next = next.next
next.next = pre.next
pre.next = next
return dummy_node.next
a = ListNode(-1)
b = ListNode(5)
c = ListNode(3)
d = ListNode(4)
f = ListNode(0)
a.next = b
b.next = c
c.next = d
d.next = f
print(a)
e = Solution().insertionSortList(a)
cur = e
while cur:
print(cur.val)
cur = cur.next |
from django.urls import include, path
from . import views
from . import api
app_name = 'job'
urlpatterns = [
path('',views.job_list,name='job_list'),
path('add',views.add_job,name='add_job'),
path('<str:slug>',views.job_detail,name='job_detail'),
## api
path('api/jobs', api.job_list_api, name='job_list_api'),
path('api/jobs/<int:id>', api.job_detail_api, name='job_detail_api'),
## class based views
path('api/v2/jobs',api.JobListApi.as_view(), name='job_list_api'),
path('api/v2/jobs/<int:id>', api.JobDetail.as_view(), name='job_detail_api'),
]
|
"""
version__ = '0.0.2'
date__ = '2019.7.18'
version__ = '0.0.3'
date__ = '2019.9.7'
version__ = '0.0.6'
date__ = '2019.10.9'
version__ = '0.0.7'
date__ = '2019.11.18'
version__ = '0.0.8'
date__ = '2019.12.18'
"""
from .get_snuid import get_snuid # noqa: F401
from .sogou_tr import sogou_tr
# from sogou_tr.sogou_tr import main as __main__
__version__ = "0.0.11"
__date__ = "2020.09.23"
VERSION = __version__.split(".")
__all__ = ["get_snuid", "sogou_tr"]
|
import sys
from qa_code import QuestionParser
if __name__ == '__main__':
qp1 = QuestionParser()
qp2 = QuestionParser(model='logistic')
sys.stdout.write("Ця програма може відповісти на деякі запитання про різноманітні географічні факти.\n")
sys.stdout.write('Будь ласка, ставте запитання!\n')
sys.stdout.write("(щоб завершити роботу, введіть 'exit' або 'геть')\n\n")
while True:
q_text = input()
if q_text.strip() == 'exit' or q_text.strip() == 'геть':
sys.stdout.write('Завершення роботи.\n')
break
answer = qp1.answer_the_question(q_text)
if not answer or 'не знайшлась' in answer:
answer = qp2.answer_the_question(q_text)
sys.stdout.write(answer + '\n\n') |
# from dreal import *
import numpy as np
import time
import matplotlib.pyplot as plt
import matplotlib.lines as mlines
import matplotlib.patches as mpatches
import matplotlib.animation as animation
from utils import samplePointsOnAARectangle
from data import get_dataloader
from tensorboardX import SummaryWriter
np.random.seed(1024)
# plot
from examples.vanderpol import TC_Simulate
benchmark_name = 'vanderpol'
T_MAX = 4.0
# centers = [[0.15, 0.15], [1.4, 2.3]]
# rs = [0.075, 0.15, 0.3]
# from examples.jet_engine import TC_Simulate
# benchmark_name = 'jet_engine'
# T_MAX = 20.0
#
# centers = [[0.8, 0.8], [0.4, 0.8], [0.4, 1.2]]
# rs = [0.075, 0.15, 0.3]
# from examples.Brusselator import TC_Simulate
# benchmark_name = 'Brusselator'
# T_MAX = 10.0
#
# centers = [[0.9, 0.15], [0.4, 0.15], [1.4, 0.15], [0.4, 0.3], [1.4, 0.3], [0.9, 0.3]]
# rs = [0.075, 0.15, 0.3]
#
# center = [0.15, 0.15]
# r = 0.15
#
# center = [0.15, 0.15]
# r = 0.075
#
# center = [0.15, 0.15]
# r = 0.075
c = [0.5, 0.5] # vandepol on limit circle
r = 0.3
# c = centers[0]
# dist = 0.15
thetas = np.arange(0,1,0.01) * 2 * np.pi
cmap = plt.get_cmap('gnuplot')
# colors = [cmap(i) for i in np.linspace(0, 1, len(thetas))]
colors = [cmap(0) for i in np.linspace(0, 1, len(thetas))]
time_step = 10
traces = [np.array(TC_Simulate('Default', c, T_MAX))[::time_step],]
plt.plot(traces[0][:, 1], traces[0][:, 2], 'o', markersize=1.0, color='r', label='ref')
for i, theta in enumerate(thetas):
point = r * np.array([np.cos(theta), np.sin(theta)])
point = np.array(c) + point
traces.append(np.array(TC_Simulate('Default', point.tolist(), T_MAX))[::time_step])
plt.plot(traces[-1][:, 1], traces[-1][:, 2], 'o', markersize=1.0, color=colors[i])
# point = dist * 2 * np.array([np.cos(theta), np.sin(theta)])
# point = np.array(c) + point
# traces.append(np.array(TC_Simulate('Default', point.tolist(), T_MAX)))
# plt.plot(traces[-1][:, 1], traces[-1][:, 2], 'o', markersize=1.0, color=colors[i])
for t in range(traces[0].shape[0]):
plt.arrow(traces[0][t, 1], traces[0][t, 2], traces[-1][t, 1] - traces[0][t, 1], traces[-1][t, 2] - traces[0][t, 2], color=colors[i])
plt.show()
# from examples.drone import TC_Simulate
# set_lower = np.array([1-0.5, 1-0.5, 4.5, -0.1, -0.1, -0., -0., -0., 0.0])
# set_higher = np.array([1+0.5, 1+0.5, 5.5, 0.1, 0.1, 0., 0., 0., 2 * np.pi - 1e-6])
# # goal = [2,2,6]
# # import ipdb; ipdb.set_trace()
# initial_states = samplePointsOnAARectangle(np.array([set_lower, set_higher]).T.reshape(-1), K=10)
# traces = []
# for i in range(initial_states.shape[0]):
# initial_state = initial_states[i,:]
# trace = TC_Simulate(initial_state, 10.)#, goal)
# traces.append(trace)
# traces = np.array(traces)
# import matplotlib as mpl
# from mpl_toolkits.mplot3d import Axes3D
# import numpy as np
# import matplotlib.pyplot as plt
# mpl.rcParams['legend.fontsize'] = 10
# plt.ion()
# fig = plt.figure()
# ax = fig.gca(projection='3d')
# ax.set_xlim3d([0, 3])
# ax.set_ylim3d([0, 3])
# ax.set_zlim3d([4, 7])
# ax.scatter(initial_states[:,0], initial_states[:,1], initial_states[:,2], marker='o', color='k')#, s=12)
# current = ax.plot(initial_states[:,0], initial_states[:,1], initial_states[:,2], 'o', color='b')[0]
# for t in range(traces[0].shape[0]):
# current.set_data(traces[:,t,1], traces[:,t,2])
# current.set_3d_properties(traces[:,t,3])
# fig.canvas.draw()
# fig.canvas.flush_events()
# # fig = plt.figure()
# # ax = fig.gca(projection='3d')
# # import ipdb; ipdb.set_trace()
# # ax.legend()
# # plt.show()
# # dimensions = len(trace[0])
# # init_delta_array = [0.5,0.5,0.5] + [0.1] * (dimensions - 4)
# # k = [1] * (dimensions - 1)
# # gamma = [0] * (dimensions - 1)
# # tube = bloatToTube(k, gamma, init_delta_array, trace, dimensions)
# # gazebotube = tube[:][1:4]
# # gazebotrace = trace[:][1:4]
# # print(tube)
# # plt.plot(trace[:,1], trace[:,3])
# # safety, reach = _verify_reach_tube(np.zeros((9,)), "[2; 2; 5]", 2.5, [])
# #print("reach: ", reach.tube)
# # plt.show()
|
input = open("input.txt","r+")
cases = int(input.readline())
solution = open("solve.txt","w")
caseNum = 1
def solve(word):
newWord = []
for x in list(word.strip()):
if len(newWord)==0:
newWord.append(x)
else:
if (x>=newWord[0]):
newWord.insert(0,x)
else:
newWord.append(x)
return ''.join(newWord)
for x in range(0,cases):
ans = solve(input.readline())
solution.write("Case #"+str(caseNum)+": "+ans+"\n")
caseNum+=1 |
#imports
import random
import time
#modules
def playerChoice():
boxChoice = 0
Choice = ""
if gameRunning == True:
time.sleep(0.5)
print("make first move")
Choice = str(input("Enter a number between 1 and 9: "))
while boxChoice == 0:
if Choice == "1" or Choice == "2" or Choice == "3" or Choice == "4" or Choice == "5" or Choice == "6" or Choice == "7" or Choice == "8" or Choice == "9":
boxChoice = int(Choice)
if bo[boxChoice] == "-":
bo[int(boxChoice)] = "X"
return
else:
boxChoice = 0
Choice = str(input("Position already taken, Re-enter a number: "))
else:
Choice = str(input("Incorrect Value, Re-enter a number between 1 and 9: "))
def aiChoice():
if gameRunning == True:
time.sleep(0.5)
print("ai Selecting")
if (bo[1] == "X" and bo[2] == "X" and bo[3] == "-"): # top row
boxChoice = 3
bo[boxChoice] = "O"
elif (bo[2] == "X" and bo[3] == "X" and bo[1] == "-"):
boxChoice = 1
bo[boxChoice] = "O"
elif (bo[1] == "X" and bo[3] == "X" and bo[2] == "-"):
boxChoice = 2
bo[boxChoice] = "O"
elif (bo[4] == "X" and bo[5] == "X" and bo[6] == "-"): # middle row
boxChoice = 6
bo[boxChoice] = "O"
elif (bo[5] == "X" and bo[6] == "X" and bo[4] == "-"):
boxChoice = 4
bo[boxChoice] = "O"
elif (bo[4] == "X" and bo[6] == "X" and bo[5] == "-"):
boxChoice = 5
bo[boxChoice] = "O"
elif (bo[7] == "X" and bo[8] == "X" and bo[9] == "-"): #bottom row
boxChoice = 9
bo[boxChoice] = "O"
elif (bo[7] == "X" and bo[9] == "X" and bo[7] == "-"):
boxChoice = 8
bo[boxChoice] = "O"
elif (bo[4] == "X" and bo[8] == "X" and bo[9] == "-"):
boxChoice = 7
bo[boxChoice] = "O"
elif (bo[6] == "X" and bo[9] == "X" and bo[3] == "-"): #right column
boxChoice = 3
bo[boxChoice] = "O"
elif (bo[3] == "X" and bo[9] == "X" and bo[6] == "-"):
boxChoice = 6
bo[boxChoice] = "O"
elif (bo[3] == "X" and bo[6] == "X" and bo[9] == "-"):
boxChoice = 9
bo[boxChoice] = "O"
elif (bo[5] == "X" and bo[8] == "X" and bo[2] == "-"): #middle column
boxChoice = 2
bo[boxChoice] = "O"
elif (bo[2] == "X" and bo[8] == "X" and bo[5] == "-"):
boxChoice = 5
bo[boxChoice] = "O"
elif (bo[2] == "X" and bo[5] == "X" and bo[8] == "-"):
boxChoice = 8
bo[boxChoice] = "O"
elif (bo[4] == "X" and bo[7] == "X" and bo[1] == "-"): #left column
boxChoice = 1
bo[boxChoice] = "O"
elif (bo[1] == "X" and bo[7] == "X" and bo[4] == "-"):
boxChoice = 4
bo[boxChoice] = "O"
elif (bo[1] == "X" and bo[4] == "X" and bo[7] == "-"):
boxChoice = 7
bo[boxChoice] = "O"
elif (bo[5] == "X" and bo[9] == "X" and bo[1] == "-"): #diagonal
boxChoice = 1
bo[boxChoice] = "O"
elif (bo[1] == "X" and bo[9] == "X" and bo[5] == "-"):
boxChoice = 5
bo[boxChoice] = "O"
elif (bo[1] == "X" and bo[5] == "X" and bo[9] == "-"):
boxChoice = 9
bo[boxChoice] = "O"
elif (bo[5] == "X" and bo[7] == "X" and bo[3] == "-"): #diagonal
boxChoice = 3
bo[boxChoice] = "O"
elif (bo[3] == "X" and bo[7] == "X" and bo[5] == "-"):
boxChoice = 5
bo[boxChoice] = "O"
elif (bo[3] == "X" and bo[5] == "X" and bo[7] == "-"):
boxChoice = 7
bo[boxChoice] = "O"
else:
#ai try to win
if (bo[1] == "O" and bo[2] == "O" and bo[3] == "-"): # top row
boxChoice = 3
bo[boxChoice] = "O"
elif (bo[2] == "O" and bo[3] == "O" and bo[1] == "-"):
boxChoice = 1
bo[boxChoice] = "O"
elif (bo[1] == "O" and bo[3] == "O" and bo[2] == "-"):
boxChoice = 2
bo[boxChoice] = "O"
elif (bo[4] == "O" and bo[5] == "O" and bo[6] == "-"): # middle row
boxChoice = 6
bo[boxChoice] = "O"
elif (bo[5] == "O" and bo[6] == "O" and bo[4] == "-"):
boxChoice = 4
bo[boxChoice] = "O"
elif (bo[4] == "O" and bo[6] == "O" and bo[5] == "-"):
boxChoice = 5
bo[boxChoice] = "O"
elif (bo[7] == "O" and bo[8] == "O" and bo[9] == "-"): #bottom row
boxChoice = 9
bo[boxChoice] = "O"
elif (bo[7] == "O" and bo[9] == "O" and bo[7] == "-"):
boxChoice = 8
bo[boxChoice] = "O"
elif (bo[4] == "O" and bo[8] == "O" and bo[9] == "-"):
boxChoice = 7
bo[boxChoice] = "O"
elif (bo[6] == "O" and bo[9] == "O" and bo[3] == "-"): #right column
boxChoice = 3
bo[boxChoice] = "O"
elif (bo[3] == "O" and bo[9] == "O" and bo[6] == "-"):
boxChoice = 6
bo[boxChoice] = "O"
elif (bo[3] == "O" and bo[6] == "O" and bo[9] == "-"):
boxChoice = 9
bo[boxChoice] = "O"
elif (bo[5] == "O" and bo[8] == "O" and bo[2] == "-"): #middle column
boxChoice = 2
bo[boxChoice] = "O"
elif (bo[2] == "O" and bo[8] == "O" and bo[5] == "-"):
boxChoice = 5
bo[boxChoice] = "O"
elif (bo[2] == "O" and bo[5] == "O" and bo[8] == "-"):
boxChoice = 8
bo[boxChoice] = "O"
elif (bo[4] == "O" and bo[7] == "O" and bo[1] == "-"): #left column
boxChoice = 1
bo[boxChoice] = "O"
elif (bo[1] == "O" and bo[7] == "O" and bo[4] == "-"):
boxChoice = 4
bo[boxChoice] = "O"
elif (bo[1] == "O" and bo[4] == "O" and bo[7] == "-"):
boxChoice = 7
bo[boxChoice] = "O"
elif (bo[5] == "O" and bo[9] == "O" and bo[1] == "-"): #diagonal
boxChoice = 1
bo[boxChoice] = "O"
elif (bo[1] == "O" and bo[9] == "O" and bo[5] == "-"):
boxChoice = 5
bo[boxChoice] = "O"
elif (bo[1] == "O" and bo[5] == "O" and bo[9] == "-"):
boxChoice = 9
bo[boxChoice] = "O"
elif (bo[5] == "O" and bo[7] == "O" and bo[3] == "-"): #diagonal
boxChoice = 3
bo[boxChoice] = "O"
elif (bo[3] == "O" and bo[7] == "O" and bo[5] == "-"):
boxChoice = 5
bo[boxChoice] = "O"
elif (bo[3] == "O" and bo[5] == "O" and bo[7] == "-"):
boxChoice = 7
bo[boxChoice] = "O"
else:
#check for middle
if (bo[5] == "-"):
boxChoice = 5
bo[boxChoice] = "O"
else:
#pick random 1-9
boxChoice = random.randint(1, 9)
while boxChoice <1 or boxChoice >9 or bo[boxChoice] != "-":
boxChoice = random.randint(1, 9)
if bo[boxChoice] != "-":
bo[boxChoice] = "O"
bo[boxChoice] = "O"
def showUI():
print(bo[1] , bo[2], bo[3])
print(bo[4], bo[5], bo[6])
print(bo[7], bo[8],bo[9])
def winCheckP():
if ((bo[7] == "X" and bo[8] == "X" and bo[9] == "X") or # across the top
(bo[4] == "X" and bo[5] == "X" and bo[6] == "X") or # across the middle
(bo[1] == "X" and bo[2] == "X" and bo[3] == "X") or # across the bottom
(bo[7] == "X" and bo[4] == "X" and bo[1] == "X") or # down the left side
(bo[8] == "X" and bo[5] == "X" and bo[2] == "X") or # down the middle
(bo[9] == "X" and bo[6] == "X" and bo[3] == "X") or # down the right side
(bo[7] == "X" and bo[5] == "X" and bo[3] == "X") or # diagonal
(bo[9] == "X" and bo[5] == "X" and bo[1] == "X")):
return True
else:
return False
def winCheckA():
if ((bo[7] == "O" and bo[8] == "O" and bo[9] == "O") or # across the top
(bo[4] == "O" and bo[5] == "O" and bo[6] == "O") or # across the middle
(bo[1] == "O" and bo[2] == "O" and bo[3] == "O") or # across the bottom
(bo[7] == "O" and bo[4] == "O" and bo[1] == "O") or # down the left side
(bo[8] == "O" and bo[5] == "O" and bo[2] == "O") or # down the middle
(bo[9] == "O" and bo[6] == "O" and bo[3] == "O") or # down the right side
(bo[7] == "O" and bo[5] == "O" and bo[3] == "O") or # diagonal
(bo[9] == "O" and bo[5] == "O" and bo[1] == "O")):
return True
else:
return False
def isBoardFull():
if "-" in bo:
return False
else:
return True
#vars
bo = ["-", "-", "-", "-", "-", "-", "-", "-", "-", "-"]
gameRunning = True
#game running
while gameRunning == True:
playerChoice()
showUI()
if winCheckP() == True:
showUI()
print("Player Wins")
exit()
if winCheckA() == True:
showUI()
print("AI Wins")
exit()
if isBoardFull() == True:
showUI()
print('The game is a tie!')
exit()
if gameRunning == True:
aiChoice()
showUI()
if winCheckP() == True:
showUI()
print("Player Wins")
exit()
if winCheckA() == True:
showUI()
print("AI Wins")
exit()
showUI()
if isBoardFull() == True:
showUI()
print('The game is a tie!')
exit()
|
def ordem4():
vector = []
y = []
count = 0
for el in range(1,17):
x = int(input("Digite um numero: "))
y.append(x)
if el % 4 == 0:
vector.append(y)
y = []
average = 0
for i in vector:
for a in i:
average += a
print(vector)
print("Average: {}".format(average/16))
diagonal(vector)
def diagonal(vector):
x = 0
summ = 0
for el in range(0,len(vector)):
for i in range(0,len(vector[0])):
if i == el:
print(vector[i][el])
summ += vector[i][el]
print(summ)
def matriz_5x5():
matriz = []
line = []
for el in range(1,26):
x = int(input("Digite o elemento da matriz: "))
line.append(x)
if el % 5 == 0:
matriz.append(line)
line = []
row4 = 0
col2 = 0
all_in = 0
sumdia = 0
dia = 4
for i in range(len(matriz[0])):
print(matriz[i])
row4 += matriz[3][i]
col2 += matriz[i][1]
all_in += sum(matriz[i])
sumdia += matriz[i][dia]
dia -= 1
print("A soma da linha 4 é",row4)
print("A soma da coluna 2 é",col2)
print("A soma da diagonal secundária é",sumdia)
print("A soma de todos os elementos é", all_in)
def gemeo():
a = int(input("Digite o primeiro valor: "))
b = int(input("Digite o segundo valor: "))
for el in range(a, b+1):
primo1 = primo(el)
primo2 = primo(el + 2)
if primo1 == True and primo2 == True and el + 2 <= b:
print(el, el+2)
def primo(x):
div = 0
cont = 1
while cont <= x:
if x % cont == 0:
div += 1
cont += 1
if div == 2:
return True
else:
return False
#matriz_5x5()
|
import os
import random
import pyowm
import telebot
bot = telebot.TeleBot('1922989323:AAHBy0wEnTgfrFYhbcZlbG6jgwsPWHk0Bos')
@bot.message_handler(content_types=['new_chat_members'])
def send_welcome(message):
chatID = message.chat.id
bot.reply_to(message,
""" Привіт! \nРаді бачити вас у нашому ламповому чатику, що об'єднює ходорівську громаду! \nЯ - Бот, що імітує всім відомого Женіка, любителя сказати "Е".Вам розв'язані руки для обговорення що до будь-які теми, та \nпам'ятайте, у кожної теми є свої межі(правила), почувайтеся як у дома,та хорошого спілкування!""")
f = open("ezgif.com-add-text.gif.mp4", "rb")
bot.send_animation(chatID, f)
@bot.message_handler(content_types="text")
def send_text(message):
chatID = message.chat.id
text = message.text.lower()
if text == "!h" or text == "!д":
bot.send_message(chatID,
"""Привіт усім новим користувачам.\n\nАдміністрація чату рада вас вітати!\n\nПросимо
дотримуватися усіх правил. \n\nУ випадку виявлення порушень з боку інших учасників просимо
повідомити нашу командуs.\n\nПриємного спілкування!""")
elif text == "!info" or text == "!інфо":
bot.send_message(chatID,
"Привіт, я Женік!\n\nЯ створений задля покращення цього чату.\n\nЯ унікальний бот, "
"який з часом буде ставати лише кращим")
elif text == "сергій" or text == "сергій":
bot.reply_to(message, "Найкращий в світі!!!")
elif text == "!к" and "!к" or text == "!команди" and "!команди":
f = open("Команди.txt", "rb")
bot.send_document(chatID, f)
elif text == "шо робиш" and "шо робиш" or text == "шо робиш?" and "шо робиш?" or text == "що робиш" and "що робиш" or text == "що робиш?" and "що робиш?":
f = open("ібу_і_пю.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "хуй" and "хуй" or text == "пизда" and " пизда":
f = open("хуя_собі_свого_зніми.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "сука" and "сука" or text == "блять" and "блять" or text == "блядь":
f = open("тикурвайобанаблять.mp3", "rb")
bot.send_audio(chatID, f)
elif text == " їбати" and " їбати" or text == "єбати" and "єбати" or text == "єбу" and "єбу" or text == "я єбу" and "я єбу" or text == "в'єбати" and "в'єбати" or text == "вєбати" and "вєбати":
f = open("я_ше_можу_вебати.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "ого" and "ого":
f = open("ніхуясобіблять.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "марічка" and "марічка":
bot.reply_to(message, "Найкраща в світі!!!")
elif text == "тихо" and "тихо" or text == "рот закрий" and "рот закрий":
f = open("завалю_по_єбалу_точно.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "уєбав" and "уєбав" or text == "пішов ти" and "пішов ти":
f = open("уебав.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "мля" and "мля" or text == "бля" and "бля":
f = open("тваюматьблять....mp3", "rb")
bot.send_audio(chatID, f)
elif text == "блять" and "блять" or text == "блядь" and "блядь" or text == "соси" and "соси" and "блядь" or text == "курва" and "курва":
f = open("зара_точно_перехуяру.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "шо" and "шо" or text == "не поняв" and "не поняв" or text == "ясно" and "ясно":
f = open("издєуиууиуиу_непонятне_бубніння.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "закрий рот" and "закрий рот" or text == "тихо буть" and "тихо буть" or text == "тихо будь" and "тихо будь":
f = open("завалю_по_єбалу_точно.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "е" and "е" or text == "e" and "e":
f = open("е.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "даун?" and "даун?" or text == "дурачок?" and "дурачок?":
f = open("бля_ти_далбайоб.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "хуйня" and "хуйня" or text == "пизда" and "пизда" or text == "не знімай" and "не знімай" or text == "зніми" and "зніми" or text == "відео" and "відео" or text == "хуя собі свого зніми" and "хуя собі свого зніми":
f = open("хуя_собі_свого_зніми.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "ти дебіл" and "ти дебіл" or text == "ти тупий" and "ти тупий" or text == "далбайоб" and "далбайоб" or text == "ти дебіл?" and "ти дебіл?" or text == "ти тупий?" and "ти тупий?" or text == "довбойоб" and "довбойоб":
f = open("бля_ти_далбайоб.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "йди нахуй" and "йди нахуй" or text == "йди ти" and "йди ти":
f = open("диннахуй.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "не нервуй" and "не нервуй" or text == "пішов ти" and "пішов ти":
f = open("йди_ннахуй_заєбав.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "куда?" and "куда?" or text == "куда" and "куда":
f = open("куда.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "ти поганий" and "ти поганий" or text == "ти погана" and "ти погана":
f = open("тикурвайобанаблять.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "шо сказав?" and "шо сказав?" or text == "що сказав?" and "що сказав?" or text == "шо бля?" and "шо бля":
f = open("куда.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "жостко" and "жостко" or text == "ніхуя собі" and "ніхуя собі" or text == "ніхуя собі блять" and "ніхуя собі блять":
f = open("ніхуясобіблять_тивєбав_е.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "міст" and "міст" or text == "моста" and "моста" or text == "кличко" and "кличко" or text == "впав" and "впав":
f = open("який_міст_впизду_бля.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "вікна" and "вікна" or text == "йоб тваю мать з тими вікнками" and "йоб тваю мать з тими вікнками":
f = open("йобтваюматьзтимивікнкаминахуйблять.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "ало" and "ало" or text == "альо" and "альо":
f = open("альобля.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "га" and "га" or text == "га?" and "га?":
f = open("га.mp3", "rb")
bot.send_audio(chatID, f)
elif text == "!w" and "!w" or text == "!п" and "!п" or text == "!погода" and "!погода":
owm = pyowm.OWM('0cee35c4d29f2fcc16a2c9ae2d965d3d')
mgr = owm.weather_manager()
place = "Ходорів"
observation = mgr.weather_at_place(place)
w = observation.weather
temp = w.temperature('celsius')["temp"]
if temp <= 0:
bot.send_message(chatID, "Ходорів: " + str(temp) + "❄")
elif 0 < temp < 10:
bot.send_message(chatID, "Ходорів: " + str(temp) + "☁")
elif 10 < temp < 20:
bot.send_message(chatID, "Ходорів: " + str(temp) + "⛅")
elif temp > 20:
bot.send_message(chatID, "Ходорів: " + str(temp) + "☀")
if __name__ == "__main__":
bot.get_updates()
bot.polling(none_stop=True)
|
import matplotlib.pyplot as plt
import numpy as np
from Automaton import Automaton
from dfs_bfs import dfs, bfs
from astar import astar
def save_image(
output_name : str,
board : Automaton) -> None:
fig = plt.figure(figsize=(3, 3))
ax = plt.axes()
ax.set_axis_off()
ax.imshow(np.array(board.board), interpolation='none', cmap='RdPu')
plt.savefig('{output_name}.png'.format(output_name=output_name), dpi=300)
if __name__ == '__main__':
board = Automaton(50, 50, 254)
output_name = "test"
board.transform_to_rgb()
#save_image(output_name, board)
has_path, path = dfs(board)
board.mark_visited(path)
save_image(output_name+"dfs", board) |
# Generated by Django 3.0.2 on 2020-01-18 17:41
from django.conf import settings
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('tos', '0001_initial'),
]
operations = [
migrations.RenameModel(
old_name='TermsOfServices',
new_name='TermsOfService',
),
]
|
# -*- coding: utf-8 -*-
#
# python-netfilter - Python modules for manipulating netfilter rules
# Copyright (C) 2007-2009 Bolloré Telecom
# See AUTHORS file for a full list of contributors.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import logging
import re
import types
import netfilter.parser
# define useful regexps
re_extension_opt = re.compile(r'^--(.*)$')
class Extension:
"""The Extension class is the base class for iptables match and target
extensions.
"""
def __init__(self, name, options, rewrite_options = {}):
self.__name = name
self.__options = {}
self.__rewrite_options = rewrite_options
if options:
self.__parse_options(options)
def __eq__(self, other):
if isinstance(other, Extension):
return self.__name == other.__name and \
self.__options == other.__options
else:
return NotImplemented
def __ne__(self, other):
result = self.__eq__(other)
if result is NotImplemented:
return result
return not result
def __parse_options(self, options):
if isinstance(options, types.ListType):
bits = options
else:
bits = netfilter.parser.split_words(options)
pos = 0
cur_opt = []
while pos < len(bits):
if bits[pos] == '!':
cur_opt.append(bits[pos])
pos += 1
continue
# get option name
m = re_extension_opt.match(bits[pos])
if not m:
raise Exception("expected option, got: %s" % bits[pos])
pos += 1
# rewrite option to its canonical name
tmp_opt = m.group(1)
if self.__rewrite_options.has_key(tmp_opt):
tmp_opt = self.__rewrite_options[tmp_opt]
cur_opt.append(tmp_opt)
# collect value(s)
vals = []
while pos < len(bits) and not re_extension_opt.match(bits[pos]):
vals.append(bits[pos])
pos += 1
# store option
opt = ' '.join(cur_opt)
self.__options[opt] = vals
# reset current option name
cur_opt = []
def log(self, level, prefix = ''):
"""Writes the contents of the Extension to the logging system.
"""
logging.log(level, "%sname: %s", prefix, self.__name)
logging.log(level, "%soptions: %s", prefix, self.__options)
def name(self):
"""Accessor for the Extension's name.
"""
return self.__name
def options(self):
"""Accessor for the Extension's options.
"""
return self.__options
def specbits(self):
"""Returns the array of arguments that would be given to
iptables for the current Extension.
"""
bits = []
for opt in self.__options:
# handle the case where this is a negated option
m = re.match(r'^! (.*)', opt)
if m:
bits.extend(['!', "--%s" % m.group(1)])
else:
bits.append("--%s" % opt)
optval = self.__options[opt]
if isinstance(optval, types.ListType):
bits.extend(optval)
else:
bits.append(optval)
return bits
class Match(Extension):
"""The Match class represents an iptables match extension, for
instance 'multiport'.
"""
def __init__(self, name, options = None):
Extension.__init__(self, name, options, {
'destination-port': 'dport',
'destination-ports': 'dports',
'source-port': 'sport',
'source-ports': 'sports'})
class Target(Extension):
"""The Target class represents an iptables target, which can be
used in the 'jump' statement of a rule.
"""
def __init__(self, name, options = None):
Extension.__init__(self, name, options)
class Rule:
"""The Rule represents an iptables rule.
"""
def __init__(self, **kwargs):
# initialise rule definition
self.protocol = None
self.destination = None
self.source = None
self.goto = None
self.jump = None
self.in_interface = None
self.out_interface = None
self.matches = []
# initialise counters
self.packets = 0
self.bytes = 0
# assign supplied arguments
for k, v in kwargs.iteritems():
self.__setattr__(k, v)
def __eq__(self, other):
if isinstance(other, Rule):
return other.protocol == self.protocol and \
other.in_interface == self.in_interface and \
other.out_interface == self.out_interface and \
other.source == self.source and \
other.destination == self.destination and \
other.goto == self.goto and \
other.jump == self.jump and \
other.matches == self.matches
else:
return NotImplemented
def __ne__(self, other):
result = self.__eq__(other)
if result is NotImplemented:
return result
return not result
def __setattr__(self, name, value):
if name == 'source' or name == 'destination':
# produce "canonical" form of a source / destination
# FIXME: we need to handle arbitrary netmasks here
if value is not None and value.endswith('/32'):
value = value[:-3]
elif name == 'goto' or name == 'jump':
if value is not None and not isinstance(value, Target):
value = Target(value)
elif name == 'matches':
if not isinstance(value, types.ListType):
raise Exception("matches attribute requires a list")
self.__dict__[name] = value
def find(self, rules):
"""Convenience method that finds the current Rule in a list.
"""
for rule in rules:
if self == rule:
return rule
return None
def log(self, level, prefix = ''):
"""Writes the contents of the Rule to the logging system.
"""
logging.log(level, "%sin interface: %s", prefix, self.in_interface)
logging.log(level, "%sout interface: %s", prefix, self.out_interface)
logging.log(level, "%ssource: %s", prefix, self.source)
logging.log(level, "%sdestination: %s", prefix, self.destination)
logging.log(level, "%smatches:", prefix)
for match in self.matches:
match.log(level, prefix + ' ')
if self.jump:
logging.log(level, "%sjump:", prefix)
self.jump.log(level, prefix + ' ')
def specbits(self):
"""Returns the array of arguments that would be given to
iptables for the current Rule.
"""
bits = []
if self.protocol:
bits.extend(['-p', "%s" % self.protocol])
if self.in_interface:
bits.extend(['-i', "%s" % self.in_interface])
if self.out_interface:
bits.extend(['-o', "%s" % self.out_interface])
if self.source:
bits.extend(['-s', "%s" % self.source])
if self.destination:
bits.extend(['-d', "%s" % self.destination])
for mod in self.matches:
bits.extend(['-m', mod.name()])
bits.extend(mod.specbits())
if self.goto:
bits.extend(['-g', self.goto.name()])
bits.extend(self.goto.specbits())
elif self.jump:
bits.extend(['-j', self.jump.name()])
bits.extend(self.jump.specbits())
return bits
|
#somar numeros
s = 0
con = 0
for c in range(1, 501, 2):
if c % 3 == 0:
con += 1
s += c
print(s)
print(con) |
import pytest
from cfme.infrastructure import pxe
from utils.randomness import generate_random_string
from utils.update import update
pytestmark = [pytest.mark.usefixtures("logged_in")]
def test_system_image_type_crud():
"""
Tests a System Image Type using CRUD operations.
"""
sys_image_type = pxe.SystemImageType(
name=generate_random_string(size=8),
provision_type='Vm')
sys_image_type.create()
with update(sys_image_type):
sys_image_type.name = sys_image_type.name + "_update"
sys_image_type.delete(cancel=False)
|
#!/usr/bin/env python
import numpy as np
import matplotlib.pyplot as plt
import esutil
from scipy import ndimage
import sys
import galsim
import matplotlib.cm as cm
import cdmodel_functions
#file_path="/Users/amalagon/WFIRST/WFC3_data/data/omega-cen-all_data/omega-cen-ima-files/ibcj09ksq_ima.fits"
#file_path="/Users/amalagon/WFIRST/WFC3_data/data/omega-cen-all_data/omega-cen-ima-files/ibcj09kkq_ima.fits"
#file_path="/Users/amalagon/WFIRST/WFC3_data/multiaccum_ima_files_omega_cen/ibcf81qkq_ima.fits"
#file_path="/Users/amalagon/WFIRST/WFC3_data/data/standard-stars-hst/GD153/ibcf0cvmq_ima.fits"
file_path="/Users/amalagon/WFIRST/WFC3_data/data/standard-stars-hst/GD71_G191B2B/ibcf90i1q_ima.fits"
#file_path = "../../Data/ibcj09ksq_ima.fits" # path to file on Huff's machine.
#file_path = "../../Data/ibcf0cvmq_ima.fits" # path to file on Huff's machine.
def apply_cdmodel (im, factor=1):
"""
Uses galsim to apply "cdmodel" to an input image.
"cdmodel" ('charge deflection') is an implementation of the 'brightter-fatter'
model by Antilogus et al 2014 as performed by Gruen et al 2015.
"""
(aL,aR,aB,aT) = cdmodel_functions.readmeanmatrices()
cd = galsim.cdmodel.BaseCDModel (factor*aL,factor*aR,factor*aB,factor*aT)
im_out=cd.applyForward(im)
return im_out
def get_image_array(file=file_path, ext_per_image = 5,
exclude_last = 3):
hdr = esutil.io.read_header(file)
n_ext = hdr.get("NEXTEND")
ext_all = np.arange(1,n_ext,ext_per_image)
sci = []
err = []
mask = []
for ext in ext_all:
sci.append(esutil.io.read(file,ext=ext))
err.append(esutil.io.read(file,ext=ext+1))
mask.append(esutil.io.read(file,ext=ext+2))
sci_use = sci[0:-exclude_last]
err_use = err[0:-exclude_last]
mask_use = mask[0:-exclude_last]
sci_arr = np.array(sci_use)
err_arr = np.array(err_use)
mask_arr = np.array(mask_use)
return sci_arr, err_arr, 1.0*mask_arr
def get_simulated_array (delta_time=10, n_ext=5, doCD = False, factor=1):
import galsim
ext_all = np.arange(1,n_ext+1)
sci = []
err = []
mask = []
time=[]
sigma_noise = .02
gal_flux_rate = 2.e3
gal_fwhm = 0.5 # arcsec
pixel_scale=0.13
#time_vec = np.linspace (0., n_ext*delta_time, n_ext)
random_seed = 1234567
rng = galsim.BaseDeviate(random_seed)
base_size=64
for ext in ext_all:
profile=galsim.Gaussian(flux=gal_flux_rate, fwhm=gal_fwhm)
sci_im=profile.drawImage(image=galsim.Image(base_size, base_size, dtype=np.float64), scale=pixel_scale)
sci_im.addNoise(galsim.GaussianNoise(rng = rng, sigma=sigma_noise))
if doCD is True:
sci_im=apply_cdmodel(sci_im,factor=factor)
sci.append( sci_im.array )
err.append( sci_im.array*0.+ sigma_noise )
mask.append( sci_im.array * 0 )
time.append( sci_im.array*0 + ext*delta_time )
time=np.array(time)
sci_arr = np.cumsum(np.array(sci), axis=0)/ time
err_arr = np.array(err)
mask_arr = np.array(mask)
return sci_arr, err_arr, mask_arr
def make_chi2_map(sci_arr, err_arr, mask_arr):
mean_image = np.average(sci_arr,weights=1.0/err_arr**2, axis=0)
deviant_arr = sci_arr - np.expand_dims(mean_image,axis=0)
chisq = np.sum((deviant_arr/err_arr)**1,axis=0)
chisq_dof = chisq*1./sci_arr.shape[0]
mask_summed = (np.sum(mask_arr,axis=0) > 0) | ~np.isfinite(chisq) # bad guys
chisq_dof[mask_summed] = 0.
return chisq_dof, mask_summed
#def make_quadratict_map (sci_arr, err_arr, mask_arr):
# mean_image=np.mean(sci_arr, axis=0)
# deviant_arr = sci_arr - np.expand_dims(mean_image,axis=0)
def plot_average_pixel_trend(sci_arr, err_arr, mask_arr, scale = 0.13,
doPlot = True, doCD = False,factor=1):
chi2Map, ubermask = make_chi2_map(sci_arr, err_arr, mask_arr)
use = ~ubermask #Negating the bad guys
image = np.average(sci_arr,axis=0, weights = 1./err_arr**2)
image[ubermask] = -np.inf
'''
image_filtered = image * 0.
a=0.1
theFilter = np.array([[0.,a, 0.], [a, -4*a, a], [0., a, 0.]]) #Laplacian
for i in xrange(3):
for j in xrange(3):
image_filtered = image_filtered + theFilter[i,j] * image
image_filtered-=image
'''
image_filtered = apply_cdmodel (galsim.Image(image,scale=scale), factor= factor).array - image
image_filtered[ubermask | ~np.isfinite(image_filtered)] = 0.
# Bin the filtered image values into quantiles.
nq = 10
quant = np.percentile(image_filtered[use].flatten(), np.linspace(0,100,nq))
deviant_arr = sci_arr - np.expand_dims(image,axis=0)
max_interval = 0.
timeseries = []
for i in xrange(nq-1):
these_pixels = ( (image_filtered > quant[i]) &
(image_filtered <= quant[i+1]) &
(image_filtered != 0) )
these_pixels_3d = np.repeat(np.expand_dims(these_pixels,axis=0),sci_arr.shape[0],axis=0)
this_dev_array = deviant_arr.copy()
this_dev_array[~these_pixels_3d] = 0.
this_npix = np.sum(these_pixels)
this_timeseries = np.sum(np.sum(this_dev_array,axis=1),axis=1) * 1./this_npix
timeseries.append(this_timeseries)
this_interval = np.abs(np.max(this_timeseries) - np.min(this_timeseries))
if this_interval > max_interval:
max_interval = this_interval
offset_array = (np.arange(nq) - np.mean(np.arange(nq))) * max_interval
if doPlot is True:
fig,(ax1,ax2,ax3) = plt.subplots(nrows=1,ncols=3,figsize=(28,6))
colors = cm.seismic(np.linspace(0, 1, nq-1))
ax1.imshow(np.arcsinh(image))
ax2.imshow(image_filtered,cmap=cm.seismic,vmin = quant[1],vmax=-quant[1])
ax2.set_title("Laplacian-filtered image")
for i in xrange(nq-1):
ax3.plot((timeseries[i] + offset_array[i])[::-1],color=colors[i],marker='.')
fig.savefig("linearity_timeseries_trend.png")
ax3.set_xlabel ("Time (arbitrary units)")
ax3.set_ylabel ("Corrected pixel flux (e/sec)")
fig.tight_layout()
fig.subplots_adjust(wspace=0.3)
fig.show()
timeseries_offset = [ts + os for ts,os in zip(timeseries,offset_array)]
return timeseries_offset
def main(argv):
#sci_arr, err_arr, mask_arr = get_image_array()
sci_arr_nocd, err_arr_nocd, mask_arr_nocd = get_simulated_array(delta_time=5, n_ext=10, doCD = False)
sci_arr_cd, err_arr_cd, mask_arr_cd = get_simulated_array(delta_time=5, n_ext=10, doCD = True, factor=1)
timeseries_nocd = plot_average_pixel_trend(sci_arr_nocd, err_arr_nocd, mask_arr_nocd,doPlot= True)
timeseries_cd = plot_average_pixel_trend(sci_arr_cd, err_arr_cd, mask_arr_cd,doPlot= False)
fig,ax = plt.subplots()
for tnocd,tcd in zip(timeseries_nocd,timeseries_cd):
ax.plot((tcd - tnocd)[::-1])
fig.show()
stop
'''
chi2Map, ubermask = make_chi2_map(sci_arr, err_arr, mask_arr)
fig,(ax1,ax2) = plt.subplots(nrows=1,ncols=2,figsize=(14,7))
im1 = ax1.imshow(chi2Map,vmin=0,vmax=2)
image = np.mean(sci_arr,axis=0)
image_sharp = image - ndimage.gaussian_filter(image,5.)
use = ~ubermask #Negating the bad guys
## IPC:
#u = 0.0011
#v = 0.0127
#w = 0.936
#x = 0.0164
## Laplacian kernel
a=0.02
image_filtered = image * 0.
theFilter = np.array([[0.,a, 0.], [a, -4*a, a], [0., a, 0.]]) #Laplacian
for i in xrange(3):
for j in xrange(3):
image_filtered = image_filtered + theFilter[i,j] * image
image_filtered-=image
ax1.set_title("chisq dof map")
im2 = ax2.imshow(np.arcsinh(np.mean(sci_arr,axis=0)/0.1),cmap=plt.cm.Greys)
ax2.set_title("mean science image")
fig.savefig("chisq_nonlin_map.png")
fig.show()
from matplotlib.colors import LogNorm
root= file_path.split('/')[-1].split('.')[0]
header=esutil.io.read_header (file_path)
name, time = header.get('TARGNAME').strip(), int(header.get ('EXPTIME'))
root='simulated_gaussian'
name='andres'
time=100
fig2, ax4 = plt.subplots(nrows=1,ncols=1,figsize=(7,7))
#ax3.plot(image.flatten(),chi2Map.flatten(),',')
im_filtered_min, im_filtered_max = np.percentile ((image_filtered[use].flatten()), [5,95] )
ax4.hist2d((image_filtered[use].flatten()),(chi2Map[use].flatten()),norm=LogNorm(),
bins = [np.linspace(im_filtered_min - 0.5*np.abs(im_filtered_min), im_filtered_max + 0.5*np.abs(im_filtered_max),100),np.linspace(-2.0,2.0,100)])
#norm=LogNorm())
ax4.axhline (0., linestyle="--")
ax4.set_xlabel("(Laplacian filtered image value)**2")
ax4.set_ylabel("chi2")
fig2.savefig("flux_chi2_corr_%s_%s_%g.png" %(root, name, time) )
fig2.show()
stop
'''
if __name__ == "__main__":
import pdb, traceback
try:
main(sys.argv)
except:
thingtype, value, tb = sys.exc_info()
traceback.print_exc()
pdb.post_mortem(tb)
|
from .duet_core import *
from functools import reduce
import math
import numpy as np
class SensEnv:
def __init__(self, sens={}):
self.sens = sens
def _add_dicts(self, a, b):
return {x: a.get(x, 0) + b.get(x, 0) for x in set(a).union(b)}
def __add__(self, other):
all_sens = self._add_dicts(self.sens, other.sens)
return SensEnv(all_sens)
def extend(self, var, sens):
return self + SensEnv({var: DuetReal(sens)})
def scale(self, amount):
newSenses = {k : amount * v for (k, v) in self.sens.items()}
return SensEnv(newSenses)
def exp(self):
newSenses = {k : np.exp(v) for (k, v) in self.sens.items()}
return SensEnv(newSenses)
def sqrt(self):
newSenses = {k : np.sqrt(v) for (k, v) in self.sens.items()}
return SensEnv(newSenses)
def truncate(self, amount):
newSenses = {k : amount for (k, v) in self.sens.items()}
return SensEnv(newSenses)
def clip(self, amount):
newSenses = {k : amount for (k, v) in self.sens.items()}
return SensEnv(newSenses)
def shrug(self, epsilon, delta):
epsilons = {k : epsilon for (k, v) in self.sens.items()}
deltas = {k : delta for (k, v) in self.sens.items()}
return EDPrivEnv(epsilons, deltas)
def renyi_shrug(self, alpha, epsilon):
alphas = {k : alpha for (k, v) in self.sens.items()}
epsilons = {k : epsilon for (k, v) in self.sens.items()}
return RenyiPrivEnv(alphas, epsilons)
def get_max(self):
vs = self.sens.values()
if vs:
return max(vs)
else:
return 0
def __str__(self):
senses = [f'{k}: {v}' for (k,v) in self.sens.items()]
ss = ','.join(senses)
return f'[{ss}]'
from abc import ABC, abstractmethod
class PrivEnv(ABC):
@abstractmethod
def _add_dicts(self, a, b):
pass
@abstractmethod
def _max_dicts(self, a, b):
pass
@abstractmethod
def penv_max(self, other):
pass
@abstractmethod
def extend(self):
pass
@abstractmethod
def remove(self):
pass
class SinglePrivEnv(PrivEnv):
def _add_dicts(self, a, b):
pass
def _max_dicts(self, a, b):
pass
def penv_max(self, other):
pass
def extend(self):
pass
def remove(self):
pass
class DoublePrivEnv(PrivEnv):
def __init__(self, fp={}, sp={}):
self.fp = fp
self.sp = sp
def _add_dicts(self, a, b):
return {x: a.get(x, 0) + b.get(x, 0) for x in set(a).union(b)}
def _sum_costs(self):
a = 0
b = 0
for i in set(self.fp):
a += self.fp.get(i,0)
for i in set(self.sp):
b += self.sp.get(i,0)
return [a,b]
def __add__(self, other):
all_fps = self._add_dicts(self.fp, other.fp)
all_sps = self._add_dicts(self.sp, other.sp)
return type(self)(all_fps, all_sps)
def truncate(self,fp,sp):
fps = {k : fp for (k, v) in self.fp.items()}
sps = {k : sp for (k, v) in self.sp.items()}
return type(self)(fps, sps)
def _max_dicts(self, a, b):
return {x: max(a.get(x, 0), b.get(x, 0)) for x in set(a).union(b)}
def penv_max(self, other):
all_fps = self._max_dicts(self.fp, other.fp)
all_sps = self._max_dicts(self.sp, other.sp)
return type(self)(all_fps, all_sps)
def extend(self, var, fp , sp):
return self + type(self)({var: DuetReal(fp)}, {var: DuetReal(sp)})
def remove(self, var):
e = dict(self.fp)
d = dict(self.sp)
try:
e.pop(var)
d.pop(var)
except KeyError as ex:
pass
return type(self)(e, d)
def __str__(self):
vals = [f'{k}: ({self.fp[k]}, {self.sp[k]})' for k in self.fp.keys()]
es = ', '.join(vals)
return f'[{es}]'
class EpsPrivEnv(SinglePrivEnv):
pass
class ZCPrivEnv(SinglePrivEnv):
pass
class TCPrivEnv(DoublePrivEnv):
pass
class RenyiPrivEnv(DoublePrivEnv):
def ed(self,delta):
alphas = self.fp
epsilons = self.sp
new_epsilon = {x: epsilons[x] + (np.log(1/delta) / (alphas[x] - 1)) for x
in self.sp.keys()}
new_delta = {x: delta for x in self.sp.keys()}
return EDPrivEnv(new_epsilon, new_delta)
def _add_alphas(self, a, b):
return {x: a.get(x, 0) if (a.get(x, 0) == b.get(x, 0)) else a.get(x, 0) + b.get(x, 0) for x in set(a).union(b)}
def __add__(self, other):
all_fps = self._add_alphas(self.fp, other.fp)
all_sps = self._add_dicts(self.sp, other.sp)
return type(self)(all_fps, all_sps)
def _sum_costs(self):
a = 0
b = 0
for i in set(self.fp):
a = self.fp.get(i,0)
for i in set(self.sp):
b += self.sp.get(i,0)
return [a,b]
class EDPrivEnv(DoublePrivEnv):
def advcomp(self, k, delta_prime):
def sc_eps(eps):
return 2 * eps * np.sqrt(2 * k * np.log(1 / delta_prime))
def sc_delta(delta):
return k * delta + delta_prime
new_epsilon = {x: sc_eps(epsilon) for (x,epsilon) in self.fp.items()}
new_delta = {x: sc_delta(delta) for (x,delta) in self.sp.items()}
return EDPrivEnv(new_epsilon, new_delta)
class VariantException(Exception):
pass
class PrivacyFilterException(Exception):
pass
class PartitionException(Exception):
pass
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
@File : string.py
@Time : 2021/04/05 21:12:24
@Author : Liu ChaoYang
@Version : 1.0
@Contact : 2218932687@qq.com
'''
# here put the import lib
'''
word = '字符串'
sentence = "这是个句子"
paragraph = """
这是个段落
可以又多行组成
"""
print(word)
print(sentence)
print(paragraph)
'''
my_str1 = "i am a student"
print(my_str1)
#my_str2 = 'i'm a student'
my_str2 = 'i\'m a student' #转义字符
my_str = "he said \"he like you\"" #转义字符
my_str = 'he said "he like you"' #转义字符
print(my_str)
'''
常用
\'
\"
\n
\t
\r 回车
'''
str = "chengdu" #把str看出数组
# print(str)
# print(str[0:6]) #[起始位置:结束位置:步进值]
# print(str[0:6:2])
# print(str[5:])
# print(str[:5])
# print(str + ",hello")
# print('\n')
# print(str * 3)
print("hello\nchengdu")
print(r"hello\nchengdu") #不进行转义 显示原始字符串
'''
bytes.decode(encoding='utf-8',errors='strict')
join()
spilt()
''' |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.