text stringlengths 38 1.54M |
|---|
import pygame
from pygame.sprite import Sprite
class Player(Sprite):
""" Player class, where the player will control """
def __init__(self, hub, pos_x= 50, pos_y=50):
""" Initialize default values """
super().__init__()
self.hub = hub
self.screen = hub.main_screen
self.screen_rect = self.screen.get_rect()
self.controller = hub.controller
self.camera = hub.camera
self.gamemode = hub.gamemode
self.mario_motion_state = "idle"
self.mario_upgrade_state = "regular"
self.mario_facing_direction = hub.RIGHT
self.mario_image_flipped = False
# keep track on what image of multiple images
self.index = 0
self.change_freq = 120
self.player_clock = pygame.time.get_ticks() + self.change_freq
# regular mario image and collision
self.image_idle = pygame.image.load("imgs/Mario/RegularMario/MarioStanding.png")
self.image_run = [pygame.image.load('imgs/Mario/RegularMario/MarioRun01.gif'),
pygame.image.load('imgs/Mario/RegularMario/MarioRun02.gif'),
pygame.image.load('imgs/Mario/RegularMario/MarioRun03.gif')]
self.image_jump = pygame.image.load('imgs/Mario/RegularMario/MarioJumping.png')
# prep mario image
self.prep_mario_images()
# get current image and rect
self.current_image = self.image_idle
self.rect = self.current_image.get_rect()
# Set initial position
self.rect.x = pos_x
self.rect.y = pos_y
# player's fall rate, run velocity, jumping state
self.gravity = self.hub.GRAVITY
self.velocity = 10
self.is_jumping = False
self.is_bouncing = False
# Get mario time when jumping
self.counter_jump = 0
self.jump_max_height = 350
self.jump_velocity = 25 # How fast the player will jump
self.counter_bounce = 0
self.bounce_max_height = 100
self.bounce_velocity = 35
self.is_dead = False
def update(self):
""" Update the player logic """
# Check if mario is jumping
self.update_state()
# Apply gravity
self.rect.y += self.gravity
# Apply movement
if self.controller.move_right:
self.rect.x += self.velocity
self.mario_motion_state = "running"
self.mario_facing_direction = self.hub.RIGHT
if self.controller.move_left:
self.rect.x -= self.velocity
self.mario_motion_state = "running"
self.mario_facing_direction = self.hub.LEFT
if not self.controller.move_left and not self.controller.move_right:
if not self.gamemode.mario_in_air:
self.mario_motion_state = "idle"
self.reset_animations()
if self.controller.jump:
# turn off controller jump to prevent holding jump space bar
self.controller.jump = False
if not self.is_jumping or not self.is_bouncing:
self.jump()
# Check if the player is jumping
if self.is_jumping:
if self.counter_jump < self.jump_max_height:
self.counter_jump += self.jump_velocity
self.rect.y -= self.jump_velocity
if self.counter_jump > self.jump_max_height:
self.is_jumping = False
if self.is_bouncing:
if self.counter_bounce < self.bounce_max_height:
self.counter_bounce += self.bounce_velocity
self.rect.y -= self.bounce_velocity
if self.counter_bounce > self.bounce_max_height:
self.is_bouncing = False
self.check_collision()
def draw(self):
# draw current image
self.screen.blit(self.current_image, self.rect)
def check_collision(self):
""" Check player's collision with actor and other objects """
# Checks if the player hits the left screen
if self.rect.left < self.screen_rect.left:
self.rect.left = self.screen_rect.left
# Checks if the player goes pass the right screen
# If so, move the world camera off set
if self.rect.right > self.screen_rect.right / 2:
# Move camera and set player to the middle of screen
if not self.camera.camera_hit_right_screen:
self.rect.right = self.screen_rect.width / 2
# If camera hits the very right screen, player can move upon half the screen
if self.rect.right > self.screen_rect.right:
self.rect.right = self.screen_rect.right
self.camera.player_hit_right_screen = True
elif self.rect.right < self.screen_rect.right:
self.camera.player_hit_right_screen = False
# Move camera respective to player movement
self.camera.moveCamera(self.velocity)
# Check if the player is fallen off the screen (mario is dead)
if self.rect.top > self.screen_rect.bottom:
self.die()
def jump(self):
self.is_jumping = True
self.gamemode.mario_in_air = True
self.mario_motion_state = "jumping"
def bounce(self):
self.is_bouncing = True
self.gamemode.mario_in_air = True
self.mario_motion_state = "jumping"
print("Mario Bounced off AI")
def throw(self):
pass
def get_bigger(self):
pass
def get_smaller(self):
pass
def die(self):
# play mario is dying animation
if not self.is_dead:
print("mario is dead")
self.gamemode.lives -= 1
self.gamemode.mario_is_dead = True
self.is_dead = True
def become_fire_mario(self):
pass
def set_image_direction(self):
if self.mario_facing_direction == self.hub.LEFT:
self.current_image = pygame.transform.flip(self.current_image, True, False)
def prep_mario_images(self):
# Adjustment to mario images
# adjust regular mario images
self.image_idle = pygame.transform.scale(self.image_idle, (50, 50))
for i in range(len(self.image_run)):
self.image_run[i] = pygame.transform.scale(self.image_run[i], (50, 50))
self.image_jump = pygame.transform.scale(self.image_jump, (50, 50))
def reset_jump(self):
""" Reset mario's jump when mario hits the ground"""
self.gamemode.mario_in_air = False
self.is_jumping = False
self.counter_jump = 0
def reset_bounce(self):
"""Reset Mario's bounce when mario hits the ground or enemy"""
self.gamemode.mario_in_air = False
self.is_bouncing = False
self.counter_bounce = 0
def update_state(self):
""" Update state determine what state the player is in """
if self.mario_motion_state is "jumping" or self.gamemode.mario_in_air:
self.current_image = self.image_jump
self.set_image_direction()
else:
if self.mario_motion_state is "idle":
self.current_image = self.image_idle
self.set_image_direction()
if self.mario_motion_state is "running":
# start timer
if pygame.time.get_ticks() > self.player_clock:
self.player_clock = pygame.time.get_ticks() + self.change_freq
self.index += 1
self.index %= len(self.image_run)
self.current_image = self.image_run[self.index]
self.set_image_direction()
# self.rect = self.current_image.get_rect()
def reset_animations(self):
self.index = 0
self.player_clock = pygame.time.get_ticks()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import math
import time
def compute(word):
result = [word[0]]
for i in range(1, len(word)):
alpha = word[i]
if alpha >= result[0]:
result.insert(0, alpha)
else:
result.append(alpha)
return ''.join(result)
if __name__ == "__main__":
with open(sys.argv[1], 'r') as f:
cases = int(f.readline())
for i in range(cases):
word = f.readline().strip()
result = compute(word)
print('Case #{}: {}'.format(i+1, result))
|
s = input()
t = input()
set_s = set(s)
ans = 0
for i in t:
if i in set_s:
ans += 1
print(ans + 1)
|
import os
import sys
os.environ['OPENBLAS_NUM_THREADS'] = '1'
import numpy as np
import pickle
import dill
directory = sys.argv[1] #should be the directory of mim level
single_frag = sys.argv[2]
folder = sys.argv[3]
tmpdir = sys.argv[4]
frag_name = "fragment" + single_frag + ".dill"
print(folder)
print(directory)
print("fragment name:", frag_name)
os.chdir(folder)
os.chdir(directory)
status = 1
#undill and run e, g
infile = open(frag_name, 'rb')
new_class = dill.load(infile)
try:
new_class.qc_backend()
except:
print("Job died:", frag_name)
status = -1
name = "redo_frag" + single_frag + ".py"
#submit_line ='sbatch %s -J %s -o "%s" --export=LEVEL="%s",BATCH="%s",FOLDER="%s" slurm_pbs.sh'%(name+"redo", directory+"/"+name+"redo.log", directory, name, folder) ##For TinkerCliffs/Huckleberry
submit_line = 'sbatch -J %s -o "%s" -c "%s" --export=LEVEL="%s",FOLDER="%s" slurm_pbs.sh "%s"'%(name+"redo", directory+"/"+name+"redo.log", 1, directory, folder, single_frag) ##For TinkerCliffs/Huckleberry
lines = """import os
import sys
import dill
old_frag = open('{name}', 'rb')
fragment_obj = dill.load(old_frag)
#make changes to fragment_obj
# e.g. fragment_obj.qc_class.spin = 0
new_frag = open('{name}', 'wb')
dill.dump(fragment_obj, new_frag)
old_frag.close()
new_frag.close()
cmd = '{submit_line}'
os.chdir('../../')
os.system(cmd)
"""
context = {
"name":frag_name,
"submit_line":submit_line
}
with open(name, "w") as myfile:
myfile.write(lines.format(**context))
myfile.close()
finally:
##redill with updated fragment e, g, hess, apt, etc
infile.close()
outfile = open(frag_name, "wb")
dill.dump(new_class, outfile)
outfile.close()
#update status of calculation
status_name = frag_name.replace(".dill", ".status")
out_stat = open(status_name, "wb")
dill.dump(status, out_stat)
out_stat.close()
#put status file in $TMPDIR from Slurm
os.chdir(tmpdir)
out_stat = open(status_name, "w")
out_stat.write("status is done")
out_stat.close()
os.chdir('../')
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import glob
from lxml import objectify
def formatdate(date):
date = date.split('T')
date = date[0].split('-')
date = date[2]+date[1]+date[0]
return date
sped = dict()
part_list = []
cte_list = []
arq = open('1.txt', 'r')
texto = arq.readlines()
for line in texto:
line_split = line.split("|")
if line_split[1] not in sped:
sped[line_split[1]] = []
sped[str(line_split[1])].append(line)
arq.close()
for file in glob.glob('cte/*.xml'):
arq = open(file, 'r')
texto = arq.read()
texto = texto.encode('utf-8')
arq.close()
cte = objectify.fromstring(texto)
participante = "|0150|" + str(cte.cteProc.CTe.infCte.emit.CNPJ) + "|" + \
str(cte.cteProc.CTe.infCte.emit.xNome) + "|" + "1058|" + \
str(cte.cteProc.CTe.infCte.emit.CNPJ) + "||" + \
str(cte.cteProc.CTe.infCte.emit.IE) + "|" + \
str(cte.cteProc.CTe.infCte.emit.enderEmit.cMun) + "||" + \
str(cte.cteProc.CTe.infCte.emit.enderEmit.xLgr) + "|" + \
str(cte.cteProc.CTe.infCte.emit.enderEmit.nro) + "|"
if hasattr(cte.cteProc.CTe.infCte.emit.enderEmit, 'xCpl'):
participante = participante + str(cte.cteProc.CTe.infCte.emit.enderEmit.xCpl) + "|"
else:
participante = participante + "|"
participante = participante + str(cte.cteProc.CTe.infCte.emit.enderEmit.xBairro) + "|"
sped['0150'].append(participante)
|
"""Modules for making crystallographic plane surfaces."""
from jarvis.core.atoms import Atoms
from jarvis.core.utils import ext_gcd
import numpy as np
from jarvis.analysis.structure.spacegroup import Spacegroup3D
from numpy.linalg import norm
from numpy import gcd
from collections import OrderedDict
def wulff_normals(miller_indices=[], surface_energies=[]):
"""Obtain Wulff Normals.
Args:
miller_indices : Miller indices
surface_energies : corresponding surface energies
Returns: Surface normals
"""
max_s = min(surface_energies)
normals = []
for i, j in zip(miller_indices, surface_energies):
normal = j * np.linalg.norm(i) / float(max_s)
normals.append([normal, i])
return normals
class Surface(object):
"""Get surface object of arbitrary atoms object and miller index."""
def __init__(
self,
atoms=None,
indices=[0, 0, 1],
layers=3,
thickness=25,
vacuum=18.0,
tol=1e-10,
from_conventional_structure=True,
):
"""Initialize the class.
Args:
atoms: jarvis.core.Atoms object
indices: Miller indices
layers: Number of surface layers
thickness: Provide thickness instead of layers
vacuum: vacuum padding
tol: tolerance during dot product
from_conventional_structure: whether to use the conv. atoms
"""
self.indices = np.array(indices)
self.from_conventional_structure = from_conventional_structure
if self.from_conventional_structure:
self.atoms = Spacegroup3D(atoms).conventional_standard_structure
else:
self.atoms = atoms
self.tol = tol
self.vacuum = vacuum
self.layers = layers
self.thickness = thickness
# Note thickness overwrites layers
def to_dict(self):
"""Convert to a dictionary."""
d = OrderedDict()
d["atoms"] = self.atoms.to_dict()
d["indices"] = self.indices
d["tol"] = self.tol
d["vacuum"] = self.vacuum
d["layers"] = self.layers
d["from_conventional_structure"] = self.from_conventional_structure
return d
@classmethod
def from_dict(self, d={}):
"""Construct class from a dictionary."""
return Surface(
atoms=Atoms.from_dict(d["atoms"]),
indices=d["indices"],
tol=d["tol"],
vacuum=d["vacuum"],
layers=d["layers"],
from_conventional_structure=d["from_conventional_structure"],
)
def make_surface(self):
"""Generate specified surface. Modified from ase package."""
atoms = self.atoms
h_index, k_index, l_index = self.indices
h0, k0, l0 = self.indices == 0
if h0 and k0 or h0 and l0 or k0 and l0: # if two indices are zero
if not h0:
c1, c2, c3 = [(0, 1, 0), (0, 0, 1), (1, 0, 0)]
if not k0:
c1, c2, c3 = [(0, 0, 1), (1, 0, 0), (0, 1, 0)]
if not l0:
c1, c2, c3 = [(1, 0, 0), (0, 1, 0), (0, 0, 1)]
else:
p, q = ext_gcd(k_index, l_index)
a1, a2, a3 = self.atoms.lattice_mat # .lat_lengths()
# constants describing the dot product of basis c1 and c2:
# dot(c1,c2) = k1+i*k2, i in Z
k1 = np.dot(
p * (k_index * a1 - h_index * a2)
+ q * (l_index * a1 - h_index * a3),
l_index * a2 - k_index * a3,
)
k2 = np.dot(
l_index * (k_index * a1 - h_index * a2)
- k_index * (l_index * a1 - h_index * a3),
l_index * a2 - k_index * a3,
)
if abs(k2) > self.tol:
i = -int(round(k1 / k2))
p, q = p + i * l_index, q - i * k_index
a, b = ext_gcd(p * k_index + q * l_index, h_index)
c1 = (p * k_index + q * l_index, -p * h_index, -q * h_index)
c2 = np.array((0, l_index, -k_index)) // abs(gcd(l_index, k_index))
c3 = (b, a * p, a * q)
lattice = atoms.lattice_mat # .lat_lengths()
basis = np.array([c1, c2, c3])
scaled = np.linalg.solve(basis.T, np.array(atoms.frac_coords).T).T
scaled -= np.floor(scaled + self.tol)
new_coords = scaled
tmp_cell = np.dot(basis, lattice)
M = np.linalg.solve(lattice, tmp_cell)
cart_coords = np.dot(scaled, lattice)
new_coords = np.dot(cart_coords, M)
new_atoms = Atoms(
lattice_mat=tmp_cell,
coords=new_coords,
elements=atoms.elements,
cartesian=True,
)
if self.thickness is not None and (self.thickness) > 0:
self.layers = int(self.thickness / new_atoms.lattice.c) + 1
# dims=get_supercell_dims(new_atoms,enforce_c_size=self.thickness)
# print ('dims=',dims,self.layers)
# surf_atoms = new_atoms.make_supercell_matrix([1, 1, dims[2]])
# print('self.layers',self.layers,self.thickness,new_atoms.lattice.c)
surf_atoms = new_atoms.make_supercell_matrix([1, 1, self.layers])
# print("supercell_cart_coords", surf_atoms.frac_coords)
new_lat = surf_atoms.lattice_mat # lat_lengths()
a1 = new_lat[0]
a2 = new_lat[1]
a3 = new_lat[2]
new_lat = np.array(
[
a1,
a2,
np.cross(a1, a2)
* np.dot(a3, np.cross(a1, a2))
/ norm(np.cross(a1, a2)) ** 2,
]
)
a1 = new_lat[0]
a2 = new_lat[1]
a3 = new_lat[2]
# print("a1,a2,a3", new_lat)
latest_lat = np.array(
[
(np.linalg.norm(a1), 0, 0),
(
np.dot(a1, a2) / np.linalg.norm(a1),
np.sqrt(
np.linalg.norm(a2) ** 2
- (np.dot(a1, a2) / np.linalg.norm(a1)) ** 2
),
0,
),
(0, 0, np.linalg.norm(a3)),
]
)
M = np.linalg.solve(new_lat, latest_lat)
new_cart_coords = surf_atoms.cart_coords # np.dot(scaled,lattice)
new_coords = np.dot(new_cart_coords, M)
new_atoms = Atoms(
lattice_mat=latest_lat,
elements=surf_atoms.elements,
coords=new_coords,
cartesian=True,
).center_around_origin()
frac_coords = new_atoms.frac_coords
frac_coords[:] = frac_coords[:] % 1
new_atoms = Atoms(
lattice_mat=latest_lat,
elements=surf_atoms.elements,
coords=frac_coords,
cartesian=False,
)
new_lat = new_atoms.lattice_mat
new_cart_coords = new_atoms.cart_coords
elements = new_atoms.elements
new_lat[2][2] = new_lat[2][2] + self.vacuum
with_vacuum_atoms = Atoms(
lattice_mat=new_lat,
elements=elements,
coords=new_cart_coords,
cartesian=True,
)
# new_atoms.center()
# print (with_vacuum_atoms)
return with_vacuum_atoms
"""
if __name__ == "__main__":
box = [[2.715, 2.715, 0], [0, 2.715, 2.715], [2.715, 0, 2.715]]
coords = [[0, 0, 0], [0.25, 0.25, 0.25]]
elements = ["Si", "Si"]
Si = Atoms(lattice_mat=box, coords=coords, elements=elements)
Surface(atoms=Si, indices=[1, 1, 1]).make_surface()
su = [
0.8582640971273426,
0.9334963319196496,
0.9360461382184894,
0.9419095687284446,
0.9802042233627004,
0.9875446840480956,
1.0120634294466684,
1.0126231880823566,
1.0241538763302507,
1.0315901848682645,
1.0318271257831195,
1.0331286888257398,
1.0344297141291043,
1.0388709097092674,
1.040277640596931,
1.042494119906149,
1.04453679643896,
1.0450598648770613,
1.045076130339553,
1.0469310544190567,
1.0491015867538047,
1.0495494553198788,
1.0534717916897114,
1.0535201391639715,
1.054233162444997,
1.0579157863887743,
1.0595676718662346,
1.0601381085497692,
1.109580394178689,
]
ml = [
[0, 0, 1],
[2, 0, 3],
[2, 0, 1],
[1, 0, 1],
[3, 0, 2],
[1, 0, 3],
[3, 1, 1],
[3, 0, 1],
[3, 1, 3],
[3, -1, 1],
[3, 1, 0],
[3, 2, 1],
[3, 3, 1],
[1, 0, 0],
[2, 2, 1],
[3, -1, 3],
[3, -1, 2],
[3, 3, 2],
[3, 2, 2],
[2, -1, 3],
[3, 2, 0],
[3, 2, 3],
[1, 1, 1],
[1, 0, 2],
[3, 1, 2],
[2, -1, 2],
[3, -1, 0],
[2, 2, 3],
[1, 1, 0],
]
nm = wulff_normals(miller_indices=ml, surface_energies=su)
print(nm)
from jarvis.core.lattice import Lattice
lat = Lattice([[4.05, 0, 0], [0, 4.05, 0], [0, 0, 4.05]])
pmg_wulff = WulffShape(lat, ml, su)
print(pmg_wulff.facets)
"""
|
import network_functions as nf
import matplotlib.pyplot as plt
import generic_plot_functions as pf
if __name__ == '__main__':
cities = nf.get_list_cities_names()
area_population_file = 'results/all/json/area_population.json'
''' Load info about areas and populations for each city and plot them '''
area_population_dict = nf.load_json(area_population_file)
# print(area_population_dict)
# print(sorted(area_population_dict.items()))
areas = nf.get_list_sorted_values('area', area_population_dict)
populations = nf.get_list_sorted_values('population', area_population_dict)
y_values = [areas, populations]
colors = ['r', 'b']
labels = ['areas', 'population']
fig1 = pf.plot_bars_with_subplots(2, 1, cities, y_values, colors, labels)
fig_name = './results/all/plots/basic_measures/area_population_plot.png'
plt.savefig(fig_name)
plt.close()
|
# plot how the singular strategy and derivative of the fitness gradient varies with a parameter
import matplotlib.pyplot as plt
import pandas as pd
# parameters
# ---
'''
par_name = 'f'
idx_include = list(range(37))
par_name = 'r'
idx_include = list(range(28))
par_name = 'c'
idx_include = list(range(28))
'''
par_name = 'p_cat'
idx_include = None
# xlabels
xlabels = {
'f': r'proportion mainland disperse, $f$',
'c': r'dispersal cost, $c$',
'r': r'intrinsic growth rate, $r$',
'p_cat': r'probability of catastrophe, $p_c$',
}
# which results to use
suffix = '_1' # which parameter set to use
ID_default = 0 # where is the default value
# where results will be stored
dir_results = '../../results/circular/'
# get the singular strategy + divergence strength
# ---
# open csv file and read
fname = dir_results + 'sing_strat' + suffix + '.csv'
df = pd.read_csv(fname)
# find all the rows where par_name is not at its default value
res_default = df.iloc[ID_default]
par_val_default = res_default[par_name]
df2 = df[df[par_name] != par_val_default]
# append default and sort
df2 = df2.append(df.iloc[ID_default])
df2 = df2.sort_values(by=[par_name]) # sort so we can plot the line nicely
if idx_include is None:
fV = df2[par_name].values
m_ssV = df2['m_ss'].values
ddfit_ssV = df2['ddfit_ss'].values
else:
fV = df2[par_name].values[idx_include]
m_ssV = df2['m_ss'].values[idx_include]
ddfit_ssV = df2['ddfit_ss'].values[idx_include]
# get the high-dispersal strategy
# ---
# open csv file and read
fname = dir_results + 'dimorph_steady_state' + suffix + '.csv'
df = pd.read_csv(fname)
# find all the rows where par_name is not at its default value
res_default = df.iloc[ID_default]
par_val_default = res_default[par_name]
df2 = df[df[par_name] != par_val_default]
# append default and sort
df2 = df2.append(df.iloc[ID_default])
df2 = df2.sort_values(by=[par_name]) # sort so we can plot the line nicely
f2V = df2[par_name].values
m_res2V = df2['m_res2_ss'].values
# plot stacked
# ---
ylabel_coords = (-0.14, 0.5)
fig, (ax1, ax2, ax3) = plt.subplots(3, sharex=True, figsize=(7,7))
ax1.plot(fV, m_ssV, color='black')
ax2.plot(fV, ddfit_ssV, color='black')
ax3.plot(f2V, m_res2V, color='black')
#ax2.set_yscale('symlog')
#ax2.axhline(0, color='black', alpha=0.5)
from matplotlib import ticker
ax1.set_ylabel('singular strategy,\n' + r'$m^*$', fontsize='large')
ax1.get_yaxis().set_label_coords(*ylabel_coords)
#ax1.yaxis.set_major_formatter(ticker.StrMethodFormatter("{x:.2e}"))
ax1.ticklabel_format(axis="y", style="sci", scilimits=(0,0))
#ax2.set_ylabel('divergent selection strength,\n' + r'$\left. \frac{\partial^2 w(m^\prime,m)}{\partial {m^\prime}^2} \right|_{m^\prime = m^*}$')
ax2.set_ylabel('divergent selection\nstrength', fontsize='large')
ax2.get_yaxis().set_label_coords(*ylabel_coords)
ax3.set_ylabel('high-dispersal\nmorph, ' + r'$m_H^*$', fontsize='large')
ax3.get_yaxis().set_label_coords(*ylabel_coords)
ax3.set_xlabel(xlabels[par_name], fontsize='x-large')
plt.tight_layout()
plt.savefig(dir_results + 'combined_vary_' + par_name + '.pdf')
plt.close()
|
import pygame
def handle_keys(key):
if key.type == pygame.KEYDOWN:
button = key.key
# movement keys
if button == pygame.K_UP:
return {"move": (0, -1)}
if button == pygame.K_DOWN:
return {"move": (0, 1)}
if button == pygame.K_LEFT:
return {"move": (-1, 0)}
if button == pygame.K_RIGHT:
return {"move": (1, 0)}
if button == pygame.K_ESCAPE:
return {"exit": True}
mods = pygame.key.get_pressed()
if button == pygame.K_RETURN and mods[pygame.K_RALT]:
# Alt+Enter: toggle fullscreen
return {"fullscreen": True}
elif key.type == pygame.QUIT:
return {"exit": True}
return {}
|
class Solution:
def canConstruct(self, ransomNote: str, magazine: str) -> bool:
d = {}
for ch in magazine: # Making d for value count
if ch not in d:
d[ch] = 1
else:
d[ch] += 1
for ch in ransomNote:
if ch not in d: # if char not in d
return False
else: #if char present chech the count and reduce it
if d[ch] > 1:
d[ch] -= 1
else: # if char less then 1 and again appear delete it
del d[ch]
return True
opt = Solution()
print(opt.canConstruct('aa','aab'))
|
import discord
from discord.ext import commands
import json
from mojang import MojangAPI
async def get_data(ctx, member, users):
if str(member.id) in users:
bal = users[str(member.id)]["purse"]
hasvip = "User does not have vip"
vip = discord.utils.find(
lambda r: r.name == '-V.I.P-', ctx.message.guild.roles)
vipPluss = discord.utils.find(
lambda r: r.name == 'VIP+', ctx.message.guild.roles)
if vip in member.roles:
hasvip = "User has vip"
if vipPluss in member.roles:
hasvip = "User has vip"
roles = member.roles
roles.reverse()
highest = roles[0]
highestRole = discord.utils.find(
lambda r: r.name == str(highest), ctx.message.guild.roles)
if "." in member.display_name:
nickname = member.display_name[:-1]
if not "." in member.display_name:
nickname = member.display_name
embed = discord.Embed(
title="Lookup", description="Veiw basic stats of this user.", color=0xed1212)
embed.set_author(name="Lifeless SMP")
embed.add_field(name="Does member have vip",
value=hasvip, inline=False)
embed.add_field(name="Balance", value=bal, inline=False)
embed.add_field(name="Highest role", value=highestRole, inline=False)
embed.add_field(name="Discord id", value=member.id)
embed.add_field(name="Minecraft Name",
value=nickname, inline=False)
embed.add_field(name="UUID", value=MojangAPI.get_uuid(nickname))
embed.set_footer(text="Enjoy your stay with us! The prfix is -")
await ctx.send(embed=embed)
class misc(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(description="For sus people, Usage '-sus'")
async def sus(self, ctx):
embed = discord.Embed(title="Cought🎥In📷4K📸", color=0xc90808)
await ctx.send(embed=embed)
@commands.command(description="Lookup a user with their id or mention")
async def lookup(self, ctx, member: discord.Member):
with open("Economy.json", "r") as f:
users = json.load(f)
await get_data(ctx, member, users)
@lookup.error
async def lookup_error(self, ctx, error):
if isinstance(error, commands.MissingRequiredArgument):
await ctx.send("You didnt specify a user to lookup")
else:
print(error)
await ctx.send("Something happend dm xbtq")
def setup(bot):
bot.add_cog(misc(bot))
|
from django.db import models
class Subscriber(models.Model):
name = models.CharField(max_length=128)
phone = models.CharField(max_length=128)
descriptions = models.TextField()
def __str__(self):
return "Пользователь %s %s" % (self.name, self.phone)
class Meta:
verbose_name = 'MySubscriber'
verbose_name_plural = 'A lot of Subscribers'
|
# Generated by Django 2.1.7 on 2019-08-25 03:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('stockapp', '0003_auto_20190825_0830'),
]
operations = [
migrations.AlterField(
model_name='country',
name='name',
field=models.CharField(max_length=200),
),
]
|
import pyaudio
import numpy as np
import random
p = pyaudio.PyAudio()
volume = 0.5 # range [0.0, 1.0]
fs = 44100 # sampling rate, Hz, must be integer
duration = 1.0 # in seconds, may be float
f = 440.0 # sine frequency, Hz, may be float
f2 = 445.0 # sine frequency, Hz, may be float
stream = p.open(format=pyaudio.paFloat32,
channels=1,
rate=fs,
output=True)
samples = (np.sin(2*np.pi*np.arange(fs*duration)*f/fs)).astype(np.float32)
#stream.write(volume*samples)
correct=0
wrong=0
for i in range(10):
print "try number:", i+0
print "correct:", correct
print "wrong:", wrong
x=random.random()
print x
if x>0.5:
f3=f2
else:
f3=f
samples2 = (np.sin(2*np.pi*np.arange(fs*duration)*f3/fs)).astype(np.float32)
stream.write(volume*samples)
stream.write(volume*samples2)
z=raw_input( "Same? [y/n]:")
# play. May repeat with different volume values (if done interactively)
if ('y' in z and x<=0.5) or ('n' in z and x > 0.5):
correct+=1
print "right!"
else:
wrong+=1
print "wrong!"
stream.stop_stream()
stream.close()
p.terminate()
print "Your score. Correct:", correct, "wrong", wrong
|
"""
Getting existing single job webhook configuration info in Mitto instance.
"""
import os
import sys
from dotenv import load_dotenv
from create_job_webhook import main as created_job_webhook
from mitto_sdk import Mitto
load_dotenv()
BASE_URL = os.getenv("MITTO_BASE_URL")
API_KEY = os.getenv("MITTO_API_KEY")
WEBHOOK = {
"url": "https://webhook.site/83d6607a-0118-478d-a68c-cf2ab4645314",
"method": "POST",
"event_type": "JOB_COMPLETE",
"content_type": "application/json",
"body": '{ "text": "hello world" }',
"enabled": True
}
def main(base_url=BASE_URL, api_key=API_KEY, webhook=WEBHOOK):
"""getting webhook configuration info"""
mitto = Mitto(
base_url=BASE_URL,
api_key=API_KEY
)
webhook = created_job_webhook(webhook=webhook)
job_id = webhook["id"]
single_webhook = mitto.get_job_webhooks(job_id=job_id)
return single_webhook
if __name__ == "__main__":
sys.exit(main(base_url=BASE_URL, api_key=API_KEY, webhook=WEBHOOK))
|
import os
from flask import Flask
from flask_restful import Api
from flask_jwt import JWT
from security import authenticate, identity
from resources.user import UserRegister
from resources.item import Item, ItemList
from resources.store import Store, StoreList
from db import db
app = Flask(__name__)
#app.config['SQLALCHEMY_DATABASE_URI'] ='sqlite:///data.db'
#Changing it to the heroku database url but if we wanna test locally it won't be available
#hence we give two params to get if first not found second will be used
app.config['SQLALCHEMY_DATABASE_URI']= os.environ.get('DATABASE_URL', 'sqlite:///data.db')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
#app.config['PROPAGATE_EXCEPTIONS'] = True
app.secret_key = 'taz'
api = Api(app)
jwt = JWT(app, authenticate, identity)
api.add_resource(Store, '/store/<string:name>')
api.add_resource(Item, '/item/<string:name>')
api.add_resource(ItemList, '/items')
api.add_resource(StoreList, '/stores')
api.add_resource(UserRegister, '/register')
if __name__ == '__main__':
app.run(port=5000, debug=True) # important to mention debug=True
|
from django.shortcuts import render, HttpResponse
from django.conf import settings
from rest_framework.decorators import api_view
from rest_framework.response import Response
import random
from .models import Planet
from .serializers import PlanetSerializer
from api.serializers import GenericSerializer
from api.views import BaseRandomView, BaseIdView
from api.utils import validate_request, set_options_response
MODEL = Planet
@api_view(['GET', 'OPTIONS'])
def index(request):
if request.method == 'OPTIONS':
return set_options_response()
result = validate_request(request)
if 'error' in result:
return Response({"error": result['error']}, status=result['status'], headers=settings.CORS_HEADERS)
name = request.GET.get('name', None)
affiliation = request.GET.get('affiliation', None)
region = request.GET.get('region', None)
page = request.GET.get('page', 0)
planets_set = Planet.objects.all().order_by('id')
if name:
planets_set = planets_set.filter(name__icontains=name)
if affiliation:
planets_set = planets_set.filter(info__affiliation__icontains=affiliation)
if region:
planets_set = planets_set.filter(info__region__icontains=region)
if page:
try:
page = int(page)
except:
page = 1;
start = settings.RESOURCE_LIMIT*(page-1)
end = settings.RESOURCE_LIMIT*(page-1)+settings.RESOURCE_LIMIT
planets_set = planets_set[start:end]
else:
planets_set = planets_set[0:settings.RESOURCE_LIMIT]
serializer = PlanetSerializer(planets_set, many=True)
# If nothing matches queries
if not serializer.data:
return Response({"error": settings.MSG_404}, status=404, headers=settings.CORS_HEADERS)
return Response(serializer.data, headers=settings.CORS_HEADERS)
class RandomPlanetView(BaseRandomView):
model = MODEL
class PlanetIdView(BaseIdView):
model = MODEL |
import os
import psycopg2
from dotenv import load_dotenv
load_dotenv()
def create_connection():
return psycopg2.connect(os.environ.get("DATABASE_URI")) |
# Copyright 2015 Metaswitch Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Usage:
calicoctl profile show [--detailed]
calicoctl profile add <PROFILE>
calicoctl profile remove <PROFILE> [--no-check]
calicoctl profile <PROFILE> tag show
calicoctl profile <PROFILE> tag (add|remove) <TAG>
calicoctl profile <PROFILE> rule add (inbound|outbound) [--at=<POSITION>]
(allow|deny) [(
(tcp|udp) [(from [(ports <SRCPORTS>)] [(tag <SRCTAG>)] [(cidr <SRCCIDR>)])]
[(to [(ports <DSTPORTS>)] [(tag <DSTTAG>)] [(cidr <DSTCIDR>)])] |
icmp [(type <ICMPTYPE> [(code <ICMPCODE>)])]
[(from [(tag <SRCTAG>)] [(cidr <SRCCIDR>)])]
[(to [(tag <DSTTAG>)] [(cidr <DSTCIDR>)])] |
[(from [(tag <SRCTAG>)] [(cidr <SRCCIDR>)])]
[(to [(tag <DSTTAG>)] [(cidr <DSTCIDR>)])]
)]
calicoctl profile <PROFILE> rule remove (inbound|outbound) (--at=<POSITION>|
(allow|deny) [(
(tcp|udp) [(from [(ports <SRCPORTS>)] [(tag <SRCTAG>)] [(cidr <SRCCIDR>)])]
[(to [(ports <DSTPORTS>)] [(tag <DSTTAG>)] [(cidr <DSTCIDR>)])] |
icmp [(type <ICMPTYPE> [(code <ICMPCODE>)])]
[(from [(tag <SRCTAG>)] [(cidr <SRCCIDR>)])]
[(to [(tag <DSTTAG>)] [(cidr <DSTCIDR>)])] |
[(from [(tag <SRCTAG>)] [(cidr <SRCCIDR>)])]
[(to [(tag <DSTTAG>)] [(cidr <DSTCIDR>)])]
)])
calicoctl profile <PROFILE> rule show
calicoctl profile <PROFILE> rule json
calicoctl profile <PROFILE> rule update
Description:
Modify available profiles and configure rules or tags.
Options:
--detailed Show additional information.
--no-check Remove a profile without checking if there are endpoints
associated with the profile.
--at=<POSITION> Specify the position in the chain where the rule should
be placed. Default: append at end.
Examples:
Add and set up a rule to prevent all inbound traffic except pings from the 192.168/16 subnet
$ calicoctl profile add only-local-pings
$ calicoctl profile only-local-pings rule add inbound deny icmp
$ calicoctl profile only-local-pings rule add inbound --at=0 allow from 192.168.0.0/16
"""
import sys
import re
from prettytable import PrettyTable
from pycalico.datastore import Rule
from pycalico.datastore import Rules
from connectors import client
from utils import print_paragraph
from utils import validate_characters
from utils import validate_cidr
def validate_arguments(arguments):
"""
Validate argument values:
<PROFILE>
<SRCTAG>
<SRCCIDR>
<DSTTAG>
<DSTCIDR>
<ICMPTYPE>
<ICMPCODE>
Arguments not validated:
<SRCPORTS>
<DSTPORTS>
<POSITION>
:param arguments: Docopt processed arguments
"""
# Validate Profiles
profile_ok = True
if arguments.get("<PROFILE>") is not None:
profile = arguments.get("<PROFILE>")
profile_ok = validate_characters(profile)
# Validate tags
tag_src_ok = (arguments.get("<SRCTAG>") is None or
validate_characters(arguments["<SRCTAG>"]))
tag_dst_ok = (arguments.get("<DSTTAG>") is None or
validate_characters(arguments["<DSTTAG>"]))
# Validate IPs
cidr_ok = True
for arg in ["<SRCCIDR>", "<DSTCIDR>"]:
if arguments.get(arg) is not None:
cidr_ok = validate_cidr(arguments[arg])
icmp_ok = True
for arg in ["<ICMPCODE>", "<ICMPTYPE>"]:
if arguments.get(arg) is not None:
try:
value = int(arguments[arg])
if not (0 <= value < 255): # Felix doesn't support 255
raise ValueError("Invalid %s: %s" % (arg, value))
except ValueError:
icmp_ok = False
# Print error message
if not profile_ok:
print_paragraph("Profile names must be < 40 character long and can "
"only contain numbers, letters, dots, dashes and "
"underscores.")
if not (tag_src_ok and tag_dst_ok):
print_paragraph("Tags names can only contain numbers, letters, dots, "
"dashes and underscores.")
if not cidr_ok:
print "Invalid CIDR specified."
if not icmp_ok:
print "Invalid ICMP type or code specified."
# Exit if not valid
if not (profile_ok and tag_src_ok and tag_dst_ok
and cidr_ok and icmp_ok):
sys.exit(1)
def profile(arguments):
"""
Main dispatcher for profile commands. Calls the corresponding helper
function.
:param arguments: A dictionary of arguments already processed through
this file's docstring with docopt
:return: None
"""
validate_arguments(arguments)
if arguments.get("tag") and not arguments.get("rule"):
if arguments.get("show"):
profile_tag_show(arguments.get("<PROFILE>"))
elif arguments.get("add"):
profile_tag_add(arguments.get("<PROFILE>"),
arguments.get("<TAG>"))
elif arguments.get("remove"):
profile_tag_remove(arguments.get("<PROFILE>"),
arguments.get("<TAG>"))
elif arguments.get("rule"):
if arguments.get("show"):
profile_rule_show(arguments.get("<PROFILE>"),
human_readable=True)
elif arguments.get("json"):
profile_rule_show(arguments.get("<PROFILE>"),
human_readable=False)
elif arguments.get("update"):
profile_rule_update(arguments.get("<PROFILE>"))
elif arguments.get("add") or arguments.get("remove"):
operation = "add" if arguments.get("add") else "remove"
action = "allow" if arguments.get("allow") else "deny"
direction = ("inbound" if arguments.get("inbound")
else "outbound")
if arguments.get("tcp"):
protocol = "tcp"
elif arguments.get("udp"):
protocol = "udp"
elif arguments.get("icmp"):
protocol = "icmp"
else:
protocol = None
src_ports = parse_ports(arguments.get("<SRCPORTS>"))
dst_ports = parse_ports(arguments.get("<DSTPORTS>"))
position = arguments.get("--at")
if position is not None:
try:
position = int(position)
except ValueError:
sys.exit(1)
profile_rule_add_remove(
operation,
arguments.get("<PROFILE>"),
position,
action,
direction,
protocol=protocol,
icmp_type=arguments.get("<ICMPTYPE>"),
icmp_code=arguments.get("<ICMPCODE>"),
src_net=arguments.get("<SRCCIDR>"),
src_tag=arguments.get("<SRCTAG>"),
src_ports=src_ports,
dst_net=arguments.get("<DSTCIDR>"),
dst_tag=arguments.get("<DSTTAG>"),
dst_ports=dst_ports,
)
elif arguments.get("add"):
profile_add(arguments.get("<PROFILE>"))
elif arguments.get("remove"):
profile_remove(arguments.get("<PROFILE>"), arguments.get("--no-check"))
elif arguments.get("show"):
profile_show(arguments.get("--detailed"))
def profile_add(profile_name):
"""
Create a policy profile with the given name.
:param profile_name: The name for the profile.
:return: None.
"""
# Check if the profile exists.
if client.profile_exists(profile_name):
print "Profile %s already exists." % profile_name
else:
# Create the profile.
client.create_profile(profile_name)
print "Created profile %s" % profile_name
def profile_remove(profile_name, nocheck):
"""
Remove a profile as long as it does not contain any endpoints.
Allow user to explicitly remove the profile if desired.
:param profile_name: The name of the profile to remove.
:param nocheck: Flag saying to remove profile regardless of endpoints.
:return: None.
"""
# Check if the profile exists.
if client.profile_exists(profile_name):
rm_profile = False
# Check that the nocheck flag was used
if nocheck:
rm_profile = True
else:
# Check if the the profile has endpoints associated with it
members = client.get_profile_members(profile_name)
if not members:
rm_profile = True
# Remove the profile if criteria was met
if rm_profile:
client.remove_profile(profile_name)
print "Deleted profile %s" % profile_name
else:
# Members must exist if this branch is reached
print "Cannot remove profile - profile in use by endpoint(s).\n" + \
"Use the '--no-check' flag to remove the profile anyway."
else:
print "Profile %s not found." % profile_name
def profile_show(detailed):
profiles = client.get_profile_names()
if detailed:
x = PrettyTable(["Name", "Host", "Orchestrator ID", "Workload ID",
"Endpoint ID", "State"])
for name in profiles:
members = client.get_profile_members(name)
if not members:
x.add_row([name, "None", "None", "None", "None", "None"])
continue
for endpoint in members:
x.add_row([name,
endpoint.hostname,
endpoint.orchestrator_id,
endpoint.workload_id,
endpoint.endpoint_id,
endpoint.state])
else:
x = PrettyTable(["Name"])
for name in profiles:
x.add_row([name])
print x.get_string(sortby="Name")
def profile_tag_show(name):
"""Show the tags on the profile."""
try:
profile = client.get_profile(name)
except KeyError:
print "Profile %s not found." % name
sys.exit(1)
for tag in profile.tags:
print tag
def profile_tag_add(name, tag):
"""
Add a tag to the profile.
:param name: Profile name
:param tag: Tag name
:return: None
"""
try:
profile = client.get_profile(name)
except KeyError:
print "Profile %s not found." % name
sys.exit(1)
profile.tags.add(tag)
client.profile_update_tags(profile)
print "Tag %s added to profile %s" % (tag, name)
def profile_tag_remove(name, tag):
"""
Remove a tag from the profile.
:param name: Profile name
:param tag: Tag name
:return: None
"""
try:
profile = client.get_profile(name)
except KeyError:
print "Profile %s not found." % name
sys.exit(1)
try:
profile.tags.remove(tag)
except KeyError:
print "Tag %s is not on profile %s" % (tag, name)
sys.exit(1)
client.profile_update_tags(profile)
print "Tag %s removed from profile %s" % (tag, name)
def profile_rule_show(name, human_readable=False):
"""Show the rules on the profile."""
try:
profile = client.get_profile(name)
except KeyError:
print "Profile %s not found." % name
sys.exit(1)
if human_readable:
print "Inbound rules:"
for i, rule in enumerate(profile.rules.inbound_rules, start=1):
print " %3d %s" % (i, rule.pprint())
print "Outbound rules:"
for i, rule in enumerate(profile.rules.outbound_rules, start=1):
print " %3d %s" % (i, rule.pprint())
else:
print profile.rules.to_json(indent=2)
print ""
def profile_rule_update(name):
"""Update the rules on the profile"""
try:
profile = client.get_profile(name)
except KeyError:
print "Profile %s not found." % name
sys.exit(1)
# Read in the JSON from standard in.
rules_str = sys.stdin.read()
rules = Rules.from_json(rules_str)
if rules.id != name:
print 'Rules JSON "id"=%s doesn\'t match profile name %s.' % \
(rules.id, name)
sys.exit(1)
profile.rules = rules
client.profile_update_rules(profile)
print "Successfully updated rules on profile %s" % name
def profile_rule_add_remove(
operation,
name, position, action, direction,
protocol=None,
icmp_type=None, icmp_code=None,
src_net=None, src_tag=None, src_ports=None,
dst_net=None, dst_tag=None, dst_ports=None):
"""
Add or remove a rule from a profile.
Arguments not documented below are passed through to the rule.
:param operation: "add" or "remove".
:param name: Name of the profile.
:param position: Position to insert/remove rule or None for the default.
:param action: Rule action: "allow" or "deny".
:param direction: "inbound" or "outbound".
:return:
"""
if icmp_type is not None:
icmp_type = int(icmp_type)
if icmp_code is not None:
icmp_code = int(icmp_code)
# Convert the input into a Rule.
rule_dict = {k: v for (k, v) in locals().iteritems()
if k in Rule.ALLOWED_KEYS and v is not None}
rule_dict["action"] = action
if (protocol not in ("tcp", "udp")) and (src_ports is not None or
dst_ports is not None):
print "Ports are not valid with protocol %r" % protocol
sys.exit(1)
rule = Rule(**rule_dict)
# Get the profile.
try:
profile = client.get_profile(name)
except KeyError:
print "Profile %s not found." % name
sys.exit(1)
if direction == "inbound":
rules = profile.rules.inbound_rules
else:
rules = profile.rules.outbound_rules
if operation == "add":
if position is None:
# Default to append.
position = len(rules) + 1
if not 0 < position <= len(rules) + 1:
print "Position %s is out-of-range." % position
if rule in rules:
print "Rule already present, skipping."
return
rules.insert(position - 1, rule) # Accepts 0 and len(rules).
else:
# Remove.
if position is not None:
# Position can only be used on its own so no need to examine the
# rule.
if 0 < position <= len(rules): # 1-indexed
rules.pop(position - 1)
else:
print "Rule position out-of-range."
else:
# Attempt to match the rule.
try:
rules.remove(rule)
except ValueError:
print "Rule not found."
sys.exit(1)
client.profile_update_rules(profile)
def parse_ports(ports_str):
"""
Parse a string representing a port list into a list of ports and
port ranges.
Returns None if the input is None.
:param StringTypes|NoneType ports_str: string representing a port list.
Examples: "1" "1,2,3" "1:3" "1,2,3:4"
:return list[StringTypes|int]|NoneType: list of ports or None.
"""
if ports_str is None:
return None
# We allow ranges with : or - but convert to :, which is what the data
# model uses.
if not re.match(r'^(\d+([:-]\d+)?)(,\d+([:-]\d+)?)*$',
ports_str):
print_paragraph("Ports: %r are invalid; expecting a comma-separated "
"list of ports and port ranges." % ports_str)
sys.exit(1)
splits = ports_str.split(",")
parsed_ports = []
for split in splits:
m = re.match(r'^(\d+)[:-](\d+)$', split)
if m:
# Got a range, canonicalise it.
min = int(m.group(1))
max = int(m.group(2))
if min > max:
print "Port range minimum (%s) > maximum (%s)." % (min, max)
sys.exit(1)
if not (0 <= min <= 65535):
print "Port minimum (%s) out-of-range." % min
sys.exit(1)
if not (0 <= max <= 65535):
print "Port maximum (%s) out-of-range." % max
sys.exit(1)
parsed_ports.append("%s:%s" % (min, max))
else:
# Should be a lone port, convert to int.
port = int(split)
if not (0 <= port <= 65535):
print "Port (%s) out-of-range." % min
sys.exit(1)
parsed_ports.append(port)
return parsed_ports
|
from bs4 import BeautifulSoup
from requests import request
import os
class Parther_clss(object):
def __init__(self,path_file_html,filter_l):
self.path_file_html=path_file_html
self.filter_=filter_l
def soup_parth(self):
anime_input=[]
html = open(self.path_file_html, encoding='utf8').read()
soup = BeautifulSoup(html, 'lxml')
div=soup.find_all('a')
for a in div:
link=a.get('title')
link=str(link)
if link not in self.filter_:
anime_input.append(link+'\n')
return anime_input
class Filters(object):
def __init__(self,anime,path_txt,path_html):
self.anime=anime
self.path_txt=path_txt
self.path_html=path_html
def filter_see(self):
#anime.pop(0)
number=0
anime=self.anime
for i in anime:
number=number+1
if 'Смотреть ' in i:
anime.pop(number-1)
return anime
def filter_duble(self):
anime_out=[]
for i in range(1,int(len(self.anime))):
if self.anime[i]==self.anime[i-1]:
anime_out.append(self.anime[i])
return anime_out
def filter_replay(self):
anime_out=[]
for i in self.anime:
if i not in anime_out:
anime_out.append(i)
return anime_out
def write_in_file(anime,path_file_txt):
len_anime=len(anime)
number_line=0
my_file = open(path_file_txt, "w",encoding = 'utf-8')
for i in range(len_anime):
number_line=number_line+1
my_file.write(str(number_line)+') '+ anime[i])
my_file.close()
def main_parther_filter(filter_l,path_file_html,path_file_txt):
parther=Parther_clss(path_file_html,filter_l)
anime=parther.soup_parth()
filter_anime=Filters(anime,path_file_txt,path_file_html)
anime=filter_anime.filter_see()
anime_l=filter_anime.filter_duble()
filter_anime=Filters(anime_l,path_file_txt,path_file_html)
anime=filter_anime.filter_replay()
return anime
def download_page(page,path_file_html):
page_txt = request('GET', page).text
with open(path_file_html, 'w', encoding='utf-8') as f:
f.write(page_txt)
def dell_file(path_file):
try:
path = os.path.join(os.path.abspath(os.path.dirname(__file__)), path_file)
os.remove(path)
except Exception:
pass
|
#The basic outline is 1. First of all, we have to get the X and y value from the table
#Remember how to import the table values fromt the data frame, using double square brackets
#then we need to add the dense layer and then compile
#always remember in the layer we need to input the number of nodes, activation function, shape
#then we need to compile the model, for this we need the optimizer, loss function and learning rate and then we need to fit the model using epochs
#The last step is to predict the values using the X and the plotting the line on the scatter plot
from keras.layers import Dense
from keras.models import Sequential
from keras.optimizers import Adam, SGD
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
df = pd.read_csv("F://PycharmProjects//Zero_to_deep_learning//weight-height.csv")
print(df.head(5))
X = df[['Height']].values
y_true = df[['Weight']].values
'''
y_pred = model.predict(X)
df.plot(kind = 'scatter', x = 'Height', y = 'Weight')
plt.plot(X, y_pred, color = 'red')
print(y_pred)
plt.show()
W, B = model.get_weights()
print(W, B)
'''
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y_true, test_size=0.2)
model = Sequential()
model.add(Dense(1,input_shape = (1, )))
model.compile(Adam(lr = 0.8), 'mean_squared_error')
model.fit(X_train, y_train, epochs= 50, verbose = 1)
y_train_pred = model.predict(X_train).ravel()
y_test_pred = model.predict(X_test).ravel()
from sklearn.metrics import mean_squared_error as mse
print("The mean squared error on the train set is {:0.3f}".format(mse(y_train, y_train_pred)))
print('The mean squared error on the test set is {:0.3f}'.format(mse(y_test, y_test_pred)))
|
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 19 16:40:18 2020
@author: pedro
"""
globals().clear()
from pathlib import Path
import getpass
if getpass.getuser() == "pedro":
print('Logado de casa')
caminho = Path(r'D:\Códigos, Dados, Documentação e Cheat Sheets')
elif getpass.getuser() == "pedro-salj":
print('Logado da salj-alems')
caminho = Path(r'C:\Users\pedro-salj\Desktop\Pedro Nakashima\Códigos, Dados, Documentação e Cheat Sheets')
from matplotlib import pyplot as plt
plt.style.use("fivethirtyeight")
slices = [59219, 55466, 47544, 36443, 35917]
labels = ['JavaScript', 'HTML/CSS', 'SQL', 'Python', 'Java']
explode = [0, 0, 0, 0.1, 0]
plt.pie(slices, labels=labels, explode=explode, shadow=True,
startangle=90, autopct='%1.1f%%',
wedgeprops={'edgecolor': 'black'})
plt.title("My Awesome Pie Chart")
plt.tight_layout()
plt.show()
|
test = {
'name': 'q3',
'points': 1,
'suites': [
{
'cases': [
{
'code': r"""
>>> y = data['tip'];
>>> x = data['total_bill'];
>>> np.isclose(minimize_average_loss(squared_loss, model, x, y), 0.14373189123158361)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> y = data['tip'];
>>> x = data['total_bill'];
>>> np.isclose(minimize_average_loss(abs_loss, model, x, y), 0.1495886219625012)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> np.isclose(minimize_average_loss(squared_loss, model, data['total_bill'], data['tip']), 0.14373189229218733)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> np.isclose(minimize_average_loss(squared_loss, model, data['total_bill'], data['tip']), 0.14373189229218733)
True
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': '',
'teardown': '',
'type': 'doctest'
}
]
}
|
import pygame as pg
import settings
import LoadImages
from Bomb import Bomb
from PowerUp import PowerUp
import importlib
class Player(pg.sprite.Sprite):
def __init__(self, game, xSpawn, ySpawn, id):
self.groups = game.allSprites, game.players, game.destructibleAndDontBlockExplosion
pg.sprite.Sprite.__init__(self, self.groups)
self.game = game
self.image = LoadImages.PLAYER_IMAGE
self.rect = self.image.get_rect()
self.x, self.y = xSpawn * settings.TILESIZE, ySpawn * settings.TILESIZE
self.vx, self.vy = 0, 0
self.id = id
self.dropBomb = False
self.bomb = 1
self.bombPower = 1
self.roller = 0
self.maxBombPower = settings.MAX_BOMB_POWER
self.maxBomb = settings.MAX_BOMB
self.maxRoller = settings.MAX_ROLLER
self.choice = 1000 #for bots
self.lastSavedPos = [self.x, self.y] #for bots
self.lastBotUpdate = 0 #for bots
self.lastUpdate = 0
self.lastDirection = (0, 0)
self.currentFrame = 0
def update(self):
self.getKeys()
self.move()
self.updatePosition('x')
self.collideWithWalls('x')
self.updatePosition('y')
self.collideWithWalls('y')
self.collideWithBorder()
self.attack()
self.getPowerUp()
self.animate()
self.setSpeedToZero("both")
def move(self):
if self.vx !=0:
self.x += self.vx * (self.game.dt + (settings.ROLLER_SPEED * self.roller / abs(self.vx)))
if self.vy !=0:
self.y += self.vy * (self.game.dt + (settings.ROLLER_SPEED * self.roller / abs(self.vy)))
def collideWithWalls(self, dir):
hits = pg.sprite.spritecollide(self, self.game.blocks, False)
if dir == 'x':
if hits:
if self.vx > 0:
self.x = hits[0].rect.left - self.rect.width
elif self.vx < 0:
self.x = hits[0].rect.right
self.updatePosition('x')
if dir == 'y':
if hits:
if self.vy > 0:
self.y = hits[0].rect.top - self.rect.height
elif self.vy < 0:
self.y = hits[0].rect.bottom
self.updatePosition('y')
def collideWithBorder(self):
if self.x > settings.DISPLAY_SIZE - self.rect.width:
self.x = settings.DISPLAY_SIZE - self.rect.width
if self.x < 0:
self.x = 0
if self.y > settings.DISPLAY_SIZE - self.rect.height:
self.y = settings.DISPLAY_SIZE - self.rect.height
if self.y < 0:
self.y = 0
self.updatePosition("both")
def getPowerUp(self):
hits = pg.sprite.spritecollide(self, self.game.powerUp, True)
for powerUp in hits:
powerUp.isTaken(self)
def setSpeedToZero(self, dir):
if dir =='x':
self.vx = 0
elif dir =='y':
self.vy = 0
elif dir == 'both':
self.vx, self.vy = 0, 0
def updatePosition(self, dir):
if dir == 'x':
self.rect.x = self.x
elif dir == 'y':
self.rect.y = self.y
elif dir =="both":
self.rect.x, self.rect.y = self.x, self.y
def attack(self):
bombx, bomby = self.rect.center[0]//settings.TILESIZE, self.rect.center[1]//settings.TILESIZE
if self.dropBomb and self.bomb>0 and [bombx, bomby] not in self.game.bombPos:
Bomb(self, self.game, bombx, bomby)
self.bomb -= 1
self.dropBomb = False
def animate(self):
now = pg.time.get_ticks()
if now - self.lastUpdate > settings.ANIMATION_TIME_TO_WAIT:
self.lastUpdate = now
if self.vx > 0:
self.currentFrame = (self.currentFrame + 1) % len(LoadImages.PLAYER_RIGHT)
self.image = LoadImages.PLAYER_RIGHT[self.currentFrame]
self.lastDirection = (1, 0)
elif self.vx < 0:
self.currentFrame = (self.currentFrame + 1) % len(LoadImages.PLAYER_LEFT)
self.image = LoadImages.PLAYER_LEFT[self.currentFrame]
self.lastDirection = (-1, 0)
elif self.vy > 0:
self.currentFrame = (self.currentFrame + 1) % len(LoadImages.PLAYER_FRONT)
self.image = LoadImages.PLAYER_FRONT[self.currentFrame]
self.lastDirection = (0, 1)
elif self.vy < 0:
self.currentFrame = (self.currentFrame + 1) % len(LoadImages.PLAYER_BACK)
self.image = LoadImages.PLAYER_BACK[self.currentFrame]
self.lastDirection = (0, -1)
else:
if self.lastDirection == (1, 0):
self.image = LoadImages.PLAYER_RIGHT[0]
elif self.lastDirection == (-1, 0):
self.image = LoadImages.PLAYER_LEFT[0]
elif self.lastDirection == (0, 1):
self.image = LoadImages.PLAYER_FRONT[0]
elif self.lastDirection == (0, -1):
self.image = LoadImages.PLAYER_BACK[0]
def getKeys(self):
keys = pg.key.get_pressed()
if self.id == 1:
if keys[pg.K_w]:
self.vy = -settings.PLAYER_SPEED
if keys[pg.K_a]:
self.vx = -settings.PLAYER_SPEED
if keys[pg.K_s]:
self.vy = settings.PLAYER_SPEED
if keys[pg.K_d]:
self.vx = settings.PLAYER_SPEED
if self.vx != 0 and self.vy != 0:
self.vx *= 0.7071
self.vy *= 0.7071
if keys[pg.K_SPACE]:
self.dropBomb = True
'''
if self.id == 2:
if keys[pg.K_UP]:
self.vy = -settings.PLAYER_SPEED
if keys[pg.K_LEFT]:
self.vx = -settings.PLAYER_SPEED
if keys[pg.K_DOWN]:
self.vy = settings.PLAYER_SPEED
if keys[pg.K_RIGHT]:
self.vx = settings.PLAYER_SPEED
if self.vx != 0 and self.vy != 0:
self.vx *= 0.7071
self.vy *= 0.7071
if keys[pg.K_KP_ENTER]:
self.dropBomb = True
'''
#Player's movement speed is calculated in pixel/ms so if the tile rise but the screen size doesn't, we need to make sure the player moves faster so his tile/ms speed stays the same
if settings.TILESIZE == 60: self.vx, self.vy = self.vx * 1.5, self.vy * 1.5
def refreshData(self):
importlib.reload(settings)
importlib.reload(LoadImages)
|
# По введенным пользователем координатам двух точек вывести уравнение прямой вида y = kx + b, проходящей через эти точки.
x_1 = int(input('Введите X1'))
y_1 = int(input('Введите Y1'))
x_2 = int(input('Введите X2'))
y_2 = int(input('Введите Y2'))
k = (y_1 - y_2) / (x_1 - x_2)
b = y_2 - (k * x_2)
print (f'y = {k}x + {b}') |
n=str(input())
length = len(n)
ans=0
for i in range(length//2):
if n[i]==n[-i-1]:
ans+=0
else:
ans+=1
print(ans) |
from __future__ import print_function
from oauth2client import tools
import urllib.parse as parser
try:
import argparse
flags = tools.argparser.parse_args([])
except ImportError:
flags = None
# very much copied from the Google Calendar API Python Quickstart tutorial
# If modifying these scopes, delete your previously saved credentials
# at ~/.credentials/calendar-python-quickstart.json
SCOPES = 'https://www.googleapis.com/auth/calendar','https://www.googleapis.com/auth/calendar.readonly','https://www.googleapis.com/auth/plus.login'
CLIENT_SECRET_FILE = 'calendar_auth.json'
APPLICATION_NAME = 'ECSE428 - McBot'
CLIENT_ID = '362386413877-7c39vktq1du448tnti5d5fr7qs8jfa3d.apps.googleusercontent.com'
CLIENT_SECRET = 'YUM0eM5AOAJZfCrLEL6YHMp2'
APP_NAME_SHORT = 'McBot'
class CalendarService:
http = ""
service = ""
credentials = ""
# summary = 'McBot%20Event',
# location = '800%20Howard%20St.,%20San%20Francisco,%20CA%2094103',
# description = 'A%20chance%20to%20hear%20more%20about%20Google\'s%20developer%20products.',
# dates = '20170310T000000Z%2F20170310T010000Z&',
# attendees = [
# {'email': 'lpage@example.com'},
# {'email': 'sbrin@example.com'},
# ],
# reminders = ''
#need to figure out how the API wants time to be written
def create_event_client(self, name, description, dates = '20170310T000000Z/20170310T010000Z', location = '800 Howard St., San Francisco, CA 94103'):
params = '&text=' + parser.quote_plus(name) +'&dates=' + parser.quote_plus(dates) +'&details=' + parser.quote_plus(description) +'&location=' + parser.quote_plus(location)
return 'http://www.google.com/calendar/event?action=TEMPLATE' + params |
# 导入相关的库
from absl import app, flags, logging
from absl.flags import FLAGS
import tensorflow as tf
import numpy as np
import cv2
from tensorflow.keras.callbacks import (
ReduceLROnPlateau,
EarlyStopping,
ModelCheckpoint,
TensorBoard
)
# 导入自定义的库
from yolov3_tf2.models import (
YoloV3, YoloV3Tiny, YoloLoss,
yolo_anchors, yolo_anchor_masks,
yolo_tiny_anchors, yolo_tiny_anchor_masks
)
from yolov3_tf2.utils import freeze_all
import yolov3_tf2.dataset as dataset
# 设置命令行传参的关键字
flags.DEFINE_string('dataset', '', 'path to dataset') # 训练数据集
flags.DEFINE_string('val_dataset', '', 'path to validation dataset') # 验证数据集
flags.DEFINE_boolean('tiny', True, 'yolov3-tiny') # 是否训练tiny模型
flags.DEFINE_string('classes', './data/coco.names', 'path to classes file') # 类别文件
flags.DEFINE_integer('size', 416, 'image size') # 输入图片大小
flags.DEFINE_integer('epochs', 2, 'number of epochs') # 周期数
flags.DEFINE_integer('batch_size', 8, 'batch size') # 批次
flags.DEFINE_float('learning_rate', 1e-3, 'learning rate') # 学习率
flags.DEFINE_integer('num_classes', 80, 'number of classes in the model') # 类别数
# 定义主函数
def main(_argv):
# 使用GPU
physical_devices = tf.config.experimental.list_physical_devices('GPU')
for physical_device in physical_devices:
tf.config.experimental.set_memory_growth(physical_device, True)
if FLAGS.tiny: # tiny模型
model = YoloV3Tiny(FLAGS.size, training=True,
classes=FLAGS.num_classes)
anchors = yolo_tiny_anchors # 锚框
anchor_masks = yolo_tiny_anchor_masks # 锚框对应的索引值
if FLAGS.dataset: # 读取训练数据集
train_dataset = dataset.load_tfrecord_dataset(
FLAGS.dataset, FLAGS.classes, FLAGS.size)
# 用来打乱数据集中数据顺序,训练时非常常用,取所有数据的前buffer_size数据项
train_dataset = train_dataset.shuffle(buffer_size=512)
# 设置批次,按照顺序取出batch_size行数据,最后一次输出可能小于batch
train_dataset = train_dataset.batch(FLAGS.batch_size)
# 训练数据,格式(x,y)
train_dataset = train_dataset.map(lambda x, y: (
dataset.transform_images(x, FLAGS.size),
dataset.transform_targets(y, anchors, anchor_masks, FLAGS.size)))
# 预先载入buffer_size项
train_dataset = train_dataset.prefetch(
buffer_size=tf.data.experimental.AUTOTUNE)
if FLAGS.val_dataset:# 读取验证数据集
val_dataset = dataset.load_tfrecord_dataset(
FLAGS.val_dataset, FLAGS.classes, FLAGS.size)
# 设置批次,按照顺序取出batch_size行数据,最后一次输出可能小于batch
val_dataset = val_dataset.batch(FLAGS.batch_size)
# 验证数据,格式(x,y)
val_dataset = val_dataset.map(lambda x, y: (
dataset.transform_images(x, FLAGS.size),
dataset.transform_targets(y, anchors, anchor_masks, FLAGS.size)))
# 优化器
optimizer = tf.keras.optimizers.Adam(lr=FLAGS.learning_rate)
# 损失函数
loss = [YoloLoss(anchors[mask], classes=FLAGS.num_classes)
for mask in anchor_masks]
# 编译模型
model.compile(optimizer=optimizer, loss=loss,
run_eagerly=False)
# 回调函数列表
callbacks = [
ReduceLROnPlateau(verbose=1),
EarlyStopping(patience=3, verbose=1),
ModelCheckpoint('checkpoints/yolov3_tiny_train_{epoch}.tf',
verbose=1, save_weights_only=True),
TensorBoard(log_dir='logs')
]
# 训练模型
history = model.fit(train_dataset,
epochs=FLAGS.epochs,
callbacks=callbacks,
validation_data=val_dataset)
if __name__ == '__main__':
try:
app.run(main) # 运行主函数
except SystemExit:
pass
|
# coding: utf-8
from abc import ABCMeta, abstractmethod
import os
from stinfo import *
##################################################
# 解析クラスの基底クラス
##################################################
class AbsAnalyzer(metaclass=ABCMeta):
"""データロードクラスの基底クラス
Attributes:
_base_dir (string) : ベースディレクトリ
_ouput_dirname (string) : 出力ディレクトリ名
_ouput_dir (string) : 出力ディレクトリパス
"""
##################################################
# コンストラクタ
##################################################
def __init__(self, base_dir, ouput_dirname):
""" コンストラクタ。
Args:
base_dir (string) : ベースディレクトリ
ouput_dirname (string) : 出力ディレクトリ名
Returns:
"""
self._base_dir = base_dir
self._ouput_dirname = ouput_dirname
# 出力用ディレクトリをセットする
self._ouput_dir = os.path.join(self._base_dir, self._ouput_dirname)
# 出力用ディレクトリを作成する
os.makedirs(self._ouput_dir, exist_ok=True)
##################################################
# 解析を実行する。
##################################################
def run(self):
""" 解析を実行する。
Args:
Returns:
"""
# 証券コードと名称のディクショナリを取得する
codes = self._get_codes()
# 各銘柄の基本情報を解析する。
self._analyze_basic_infos(codes)
# 各銘柄の決算情報を解析する。
self._analyze_financial_infos(codes)
# 各銘柄の株価を解析する。
self._analyze_stock_pricess(codes)
##################################################
# 各銘柄の基本情報を取得する。
##################################################
def _analyze_basic_infos(self, codes):
""" 各銘柄の基本情報を取得する。
Args:
codes (dict) : 証券コードと名称のディクショナリ
(ex){'JR東日本':9020, 'JR西日本': 9021}
Returns:
"""
# 各銘柄の基本情報を取得する。
df = get_basic_infos(codes)
# 各銘柄の基本情報を整形する。
df = reshape_basic_info(df)
# 平均値と標準偏差の列を削除する
df = df.drop(index='標準偏差')
df = df.drop(index='平均値')
# PER,を可視化する。
per_file = os.path.join(self._ouput_dir, 'per.png')
visualize_basic_info(df, ['PER(調整後)'], per_file)
# PBR, PSRを可視化する。
psr_pbr_file = os.path.join(self._ouput_dir, 'psr_pbr.png')
visualize_basic_info(df, ['PSR', 'PBR'], psr_pbr_file)
# 時価総額を可視化する。
market_cap_file = os.path.join(self._ouput_dir, 'market_cap.png')
visualize_basic_info(df, ['時価総額(兆円)'], market_cap_file)
# 配当利回りを可視化する
dividend_yield_file = os.path.join(self._ouput_dir, 'dividend_yield.png')
visualize_basic_info(df, ['配当利回り'], dividend_yield_file)
##################################################
# 各銘柄の決算情報を取得する。
##################################################
def _analyze_financial_infos(self, codes):
""" 各銘柄の決算情報を取得する。
Args:
codes (dict) : 証券コードと名称のディクショナリ
(ex){'JR東日本':9020, 'JR西日本': 9021}
Returns:
"""
# 指定した複数銘柄の基本情報を取得する。
df = get_financial_infos(codes)
# 複数銘柄の決算情報を整形する
df = reshape_financial_info(df)
# 各銘柄別に可視化する
for brand_name in codes.keys():
# 銘柄用のフォルダを作成
code = codes[brand_name]
dir_name = '{0:d}_{1:s}'.format(code, brand_name)
brand_dir = os.path.join(self._ouput_dir, dir_name)
os.makedirs(brand_dir, exist_ok=True)
# ROAとROEを可視化する
roa_roe_file = os.path.join(brand_dir, 'roa_roe.png')
visualize_financial_info_for_specified_brand(
df, brand_name, bar_datas=['ROA', 'ROE'], bar_label='ROA,ROE[%]',
filepath=roa_roe_file)
# 利益を可視化する
income_file = os.path.join(brand_dir, 'income.png')
visualize_financial_info_for_specified_brand(
df, brand_name,
bar_datas=['営業利益(十億円)', '経常利益(十億円)', '純利益(十億円)'],
bar_label='利益(十億円)',
line_datas=['売上高(十億円)'], line_label='売上高(十億円)',
filepath=income_file)
# 資産を可視化する
assets_file = os.path.join(brand_dir, 'assets.png')
visualize_financial_info_for_specified_brand(
df, brand_name,
bar_datas=['総資産(十億円)', '純資産(十億円)'], bar_label='資産(十億円)',
line_datas=['純利益(十億円)'], line_label='利益(十億円)',
filepath=assets_file)
# キャッシュフロー情報を可視化する
cf_file = os.path.join(brand_dir, 'cf.png')
visualize_financial_info_for_specified_brand(
df, brand_name,
bar_datas=['営業CF(十億円)', '投資CF(十億円)', '財務CF(十億円)', '現金期末残高(十億円)'],
bar_label='キャッシュ(十億円)',
filepath=cf_file)
##################################################
# 各銘柄の株価を解析する。
##################################################
def _analyze_stock_pricess(self, codes):
""" 各銘柄の株価を解析する。
Args:
codes (dict) : 証券コードと名称のディクショナリ
(ex){'JR東日本':9020, 'JR西日本': 9021}
Returns:
"""
# 株価取得の範囲(開始年, 終了年)を取得する
start_year, end_year = self._get_date_range_for_stock_price()
# 指定した複数銘柄の株価を取得する
df = get_stock_prices(codes, start_year, end_year)
# 株価を補正する
df = self._correct_stock_prices(df)
# 銘柄名を取得する
brand_names = list(df.index.unique('銘柄'))
# 複数銘柄の値上がり率を折れ線グラフで可視化する
ref_date = self._get_ref_date_for_price_rates()
price_rate_file = os.path.join(self._ouput_dir, 'stock_price_rate.png')
visualize_stock_price_rates_in_line(df, brand_names, ref_date=ref_date, filepath=price_rate_file)
# 複数銘柄の株価を折れ線グラフで可視化する
start_date = self._get_stock_chart_start_date()
df2 = df.loc[pd.IndexSlice[:, start_date:], :]
stock_chart_file = os.path.join(self._ouput_dir, 'stock_chart.png')
visualize_multi_stock_prices_in_line(df2, brand_names, show_average=True, filepath=stock_chart_file)
##################################################
# 証券コードと名称のディクショナリを返す。
##################################################
@abstractmethod
def _get_codes(self):
""" 証券コードと名称のディクショナリを返す。
Args:
Returns:
dict : 証券コードと名称のディクショナリ
(ex){'JR東日本':9020, 'JR西日本': 9021}
"""
raise NotImplementedError()
##################################################
# 株価取得の範囲(開始年, 終了年)を取得する
##################################################
@abstractmethod
def _get_date_range_for_stock_price(self):
""" 株価取得の範囲(開始年, 終了年)を取得する
Args:
Returns:
tuple : 開始年, 終了年
"""
raise NotImplementedError()
##################################################
# 値上がり率の基準とする日付を取得する。
##################################################
@abstractmethod
def _get_ref_date_for_price_rates(self):
""" 値上がり率の基準とする日付を取得する。
Args:
Returns:
datetime : 値上がり率の基準とする日付
"""
raise NotImplementedError()
##################################################
# 株価チャート表示開始日付を取得する。
##################################################
@abstractmethod
def _get_stock_chart_start_date(self):
""" 株価チャート表示開始日付を取得する。
Args:
Returns:
string : 株価チャート表示開始日付を下記形式の文字列で返す。
(ex) 'YYYY-MM-DD'
"""
raise NotImplementedError()
##################################################
# 株価を補正する。
##################################################
@abstractmethod
def _correct_stock_prices(self, df):
""" 株価を補正する。
株式分割や併合に対する補正として使用する。
Args:
df(DataFrame) : 株価データが格納されたDataFrame
Returns:
DataFrame : 補正後のDataFrame
"""
return df
|
#!/usr/bin/env python
from frontend import app, init_application
from config import DebugConfiguration as config
if __name__ == "__main__":
init_application(app, config)
app.debug = config.DEBUG
app.run(host='0.0.0.0', port=config.APP_PORT, threaded=True)
|
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class DoubanItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
_id = scrapy.Field()
movie_name = scrapy.Field()
ranking = scrapy.Field()
score = scrapy.Field()
vote_count = scrapy.Field()
types = scrapy.Field()
regions = scrapy.Field()
url = scrapy.Field()
release_date = scrapy.Field()
actors = scrapy.Field()
|
import numpy as np
from sklearn import cross_validation
from sklearn.decomposition import PCA
from sklearn.grid_search import GridSearchCV
from sklearn.metrics import confusion_matrix, f1_score
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import SVC
import preprocess
import preprocess_original
__author__ = 'maeglin89273'
FINAL_CLASSIFIER = SVC(kernel="rbf", gamma=4.43, C=3.25) #99.7
FEATURE_EXTRACION_FUNC = preprocess.compute_normalized_edges
RESAMPLED = "" # or blank string "" for given dataset
def grid_search_opt(x, y, clf, param_grid, cv):
grid_search = GridSearchCV(clf, param_grid=param_grid, cv=cv)
grid_search.fit(x, y)
print("best score: %s%%" % (100 * grid_search.best_score_))
print(grid_search.grid_scores_[0].cv_validation_scores)
print("parameters: %s" % grid_search.best_params_)
def eval_test_set():
tr_x, tr_y = preprocess.parse_xy("./dataset/pendigits%s_train.csv" % RESAMPLED)
ts_x, ts_y = preprocess.parse_xy("./dataset/pendigits%s_test.csv" % RESAMPLED)
print("train started")
tr_x, ts_x = FEATURE_EXTRACION_FUNC(tr_x, ts_x)
# tr_x, ts_x = preprocess.min_max_normalize(tr_x, ts_x)
FINAL_CLASSIFIER.fit(tr_x, tr_y)
pd_y = FINAL_CLASSIFIER.predict(ts_x)
print(100 * FINAL_CLASSIFIER.score(ts_x, ts_y))
print(confusion_matrix(ts_y, pd_y, np.arange(0, 10)))
print(f1_score(ts_y, pd_y, average="weighted"))
if __name__ == "__main__":
eval_test_set()
# x, y = preprocess.parse_xy("./dataset/pendigits%s_train.csv" % RESAMPLED)
# x = FEATURE_EXTRACION_FUNC(x)
# clf = SVC()
# clf = KNeighborsClassifier()
# param_grid = {"kernel": ["rbf"], "gamma": np.linspace(1, 2, 8), "C": np.linspace(2, 8, 8)}
# param_grid = {"n_neighbors": np.arange(1, 11), "weights": ["distance", "uniform"]}
# grid_search_opt(x, y, clf, param_grid, 5)
|
#import threading
#from kivy.clock import mainthread
from connection import Connection
#from datetime import date
#import time
#from kivy.utils import strtotuple
from kivymd.uix.boxlayout import MDBoxLayout
from kivymd.uix.dialog import MDDialog
from kivymd.uix.menu import MDDropdownMenu
from kivymd.uix.picker import MDDatePicker, MDTimePicker
from kivy.metrics import dp
from kivy.lang import Builder
from kivymd.uix.screen import MDScreen
from kivy.core.window import Window
#from kivymd.uix.card import MDCard
from kivy.properties import StringProperty, NumericProperty, ObjectProperty
from kivymd.uix.button import MDFlatButton
#from threading import Thread
Builder.load_file('createnew.kv')
Window.keyboard_anim_args = {'d': 0.2, 't': 'in_out_expo'}
Window.softinput_mode = 'below_target'
PREACHERS = ['Ptr. William', 'Jayr', 'Lovely', 'Paul', 'Kim', 'Milca']
MONTHS = ['January', 'February', 'March', 'April',
'May', 'June', 'July', 'August',
'September', 'October', 'November', 'December', ]
sched_counter = 0
outreach_name_list = []
month_list = []
sched_num_list = []
date_list = []
time_list = []
year_list = []
preachers_list = []
sub_preacher_list = []
topic_list = []
topic_details_list = []
event_list = []
event_details_list = []
event_organizer_list = []
event_contact_list = []
opening_prayer_list = []
scripture_reading_list = []
offertory_list = []
closing_prayer_list = []
status = []
TO_SEND = []
# Main Schedule Creation
class MainServiceCreate(MDScreen):
schedule_num = NumericProperty(1)
def on_add_button_press(self):
self.ids.create.add_widget(MainServiceCreateCard(schedule_num_text=f'Schedule #{self.schedule_num}'))
self.schedule_num += 1
def on_send_button_press(self, button):
MainServiceCreateCard().send_dialog_open()
# Outreach Sched Creation
class OutreachServiceCreate(MDScreen):
schedule_num = NumericProperty(1)
def on_add_button_press(self):
self.ids.create.add_widget(OutreachServiceCreateCard(schedule_num_text=f'Schedule #{self.schedule_num}'))
self.schedule_num += 1
def on_send_button_press(self, button):
OutreachServiceCreateCard().send_dialog_open()
# Event Schedule Creation
class EventServiceCreate(MDScreen):
schedule_num = NumericProperty(1)
def on_add_button_press(self):
self.ids.create.add_widget(EventServiceCreateCard(schedule_num_text=f'Schedule #{self.schedule_num}'))
self.schedule_num += 1
def on_send_button_press(self, button):
EventServiceCreateCard().send_dialog_open()
# Create Cards
class MainServiceCreateCard(MDBoxLayout):
schedule_num_text = StringProperty('')
def __init__(self, **kwargs):
super().__init__(**kwargs)
# Preachers Dropdown
preachers_menu_items = [{
'viewclass': 'OneLineListItem',
'divider': None,
'text': i,
'height': dp(40),
'on_release': lambda x=i: self.set_preachers_item(x),
} for i in PREACHERS
]
sub_preachers_menu_items = [{
'viewclass': 'OneLineListItem',
'divider': None,
'text': i,
'height': dp(40),
'on_release': lambda x=i: self.set_sub_preachers_item(x),
} for i in PREACHERS
]
self.preachers_menu = MDDropdownMenu(
caller=self.ids.preachers_drop_down,
items=preachers_menu_items,
position='auto',
width_mult=2
)
self.sub_preachers_menu = MDDropdownMenu(
caller=self.ids.sub_drop_down,
items=sub_preachers_menu_items,
position='auto',
width_mult=2
)
# Months Dropdown
months_menu_items = [{
'viewclass': 'OneLineListItem',
'divider': None,
'text': i,
'size_hint_y': None,
'height': dp(40),
'on_release': lambda x=i: self.set_months_item(x),
} for i in MONTHS
]
self.months_menu = MDDropdownMenu(
caller=self.ids.months_drop_down,
items=months_menu_items,
position='auto',
width_mult=2
)
# Toolbar button
self.dialog = MDDialog(
title='Are you sure ka na ba?',
text=f'{sched_counter} schedule(s) to send.\nContinue?',
type='simple',
buttons=[
MDFlatButton(text='Wait lang muna', font_style='Button', on_release=self.on_close_dialog),
MDFlatButton(text='Send na dali', font_style='Button', on_release=self.on_send_press)
]
)
def set_preachers_item(self, text__item):
self.preachers_menu.dismiss()
self.ids.preachers.text = text__item
def set_sub_preachers_item(self, text__item):
self.sub_preachers_menu.dismiss()
self.ids.sub_preachers.text = text__item
def set_months_item(self, text__item):
self.months_menu.dismiss()
self.ids.months.text = text__item
def on_date_picker(self):
self.date_dialog = MDDatePicker()
self.date_dialog.bind(on_save=self.on_date_save, on_cancel=self.on_date_cancel)
self.date_dialog.open()
def on_date_save(self, instance, value, date_range):
self.ids.date_text_field.text = str(value)
date_list.append(self.ids.date_text_field.text)
def on_date_cancel(self, instance, value):
self.date_dialog.dismiss()
def show_time_picker(self):
self.time_dialog = MDTimePicker()
self.time_dialog.bind(on_save=self.get_time_save)
self.time_dialog.open()
def get_time_save(self, instance, value):
self.ids.time_text_field.text = str(value)
def on_time_save(self, instance, time):
self.ids.time_text_field = str(time)
# textfield functions
def get_year(self):
year_list.append(date_list[-1][0:4])
def on_schedule_save(self):
self.save_dialog = MDDialog(
title='Save Schedule',
text='You can no longer undo this.\nAre you sure?',
type='simple',
buttons=[
MDFlatButton(
text='Save',
font_style='Button',
on_release=self.on_save
),
MDFlatButton(
text='Cancel',
font_style='Button',
on_release=self.on_sched_dialog_dismiss
)
]
)
if self.ids.preachers.disabled == False:
self.save_dialog.open()
else:
self.save_dialog.disabled = True
def on_check_text(self, text_input):
if self.ids.months.text != '' and self.ids.preachers.text != '' \
and self.ids.date_text_field.text != '' and self.ids.time_text_field.text != '' \
and self.ids.topic.text != '' and self.ids.sub_preachers.text != '' \
and self.ids.opening_prayer.text != '' and self.ids.scripture_reading.text != '' \
and self.ids.offertory.text != '' and self.ids.closing_prayer.text != '':
self.ids.save_button.disabled = False
def save_button_change(self):
self.ids.save_button.text = 'Saved'
self.ids.save_button.md_bg_color = 254 / 255, 215 / 255, 46 / 255, 1
def on_sched_dialog_dismiss(self, button):
self.save_dialog.dismiss()
def for_sending(self):
send_list = (month_list[0], sched_counter, date_list[0], time_list[0], year_list[0],
preachers_list[0], sub_preacher_list[0], topic_list[0], topic_details_list[0],
event_list[0], event_details_list[0], opening_prayer_list[0], scripture_reading_list[0],
offertory_list[0], closing_prayer_list[0], status[0])
TO_SEND.append(send_list)
def clear_list(self):
month_list.clear()
preachers_list.clear()
date_list.clear()
time_list.clear()
year_list.clear()
sub_preacher_list.clear()
topic_list.clear()
topic_details_list.clear()
event_list.clear()
event_details_list.clear()
opening_prayer_list.clear()
scripture_reading_list.clear()
offertory_list.clear()
closing_prayer_list.clear()
status.clear()
def disable_fields(self):
self.ids.months_drop_down.disabled = True
self.ids.preachers.disabled = True
self.ids.preachers_drop_down.disabled = True
self.ids.date_text_button.disabled = True
self.ids.time_text_button.disabled = True
self.ids.topic.disabled = True
self.ids.topic_details.disabled = True
self.ids.sub_preachers.disabled = True
self.ids.sub_drop_down.disabled = True
self.ids.opening_prayer.disabled = True
self.ids.scripture_reading.disabled = True
self.ids.offertory.disabled = True
self.ids.closing_prayer.disabled = True
self.ids.event.disabled = True
self.ids.event_details.disabled = True
def on_save(self, button):
global sched_counter
self.save_dialog.dismiss()
month_list.append(self.ids.months.text)
preachers_list.append(self.ids.preachers.text)
date_list.append(self.ids.date_text_field.text)
time_list.append(self.ids.time_text_field.text)
self.get_year()
topic_list.append(self.ids.topic.text)
sub_preacher_list.append(self.ids.sub_preachers.text)
opening_prayer_list.append(self.ids.opening_prayer.text)
scripture_reading_list.append(self.ids.scripture_reading.text)
offertory_list.append(self.ids.offertory.text)
closing_prayer_list.append(self.ids.closing_prayer.text)
if self.ids.topic_details.text != '':
topic_details_list.append(self.ids.topic_details.text)
else:
topic_details_list.append('No details provided.')
if self.ids.event.text != '':
event_list.append(self.ids.event.text)
else:
event_list.append('None')
if self.ids.event_details.text != '':
event_details_list.append(self.ids.event_details.text)
else:
event_details_list.append('None')
sched_counter += 1
sched_num_list.append(sched_counter)
status.append('Pending')
# Prepare to send
self.for_sending()
# clear list
self.clear_list()
# disable fields
self.disable_fields()
# change button
self.save_button_change()
# Toobar button
def send_dialog_open(self):
if sched_counter != 0:
self.dialog.open()
else:
self.dialog.disabled = True
def on_close_dialog(self, button):
self.dialog.dismiss()
# SEND BUTTON
def on_send_press(self, obj):
self.on_close_dialog(self)
Connection().on_main_send(TO_SEND)
self.send_status()
def send_status(self):
self.send_dialog = MDDialog(
title='Sent',
type='simple',
size_hint=(.45, None),
buttons=[
MDFlatButton(
text='Okay',
font_style='Button',
on_release=self.send_spinner_close
)
]
)
self.send_dialog.open()
def send_spinner_close(self, button):
self.send_dialog.dismiss()
# Outreach Card
class OutreachServiceCreateCard(MDBoxLayout):
schedule_num_text = StringProperty('')
def __init__(self, **kwargs):
super().__init__(**kwargs)
preachers_menu_items = [{
'viewclass': 'OneLineListItem',
'divider': None,
'text': i,
'height': dp(40),
'on_release': lambda x=i: self.set_preachers_item(x),
} for i in PREACHERS
]
sub_preachers_menu_items = [{
'viewclass': 'OneLineListItem',
'divider': None,
'text': i,
'height': dp(40),
'on_release': lambda x=i: self.set_sub_preachers_item(x),
} for i in PREACHERS
]
self.preachers_menu = MDDropdownMenu(
caller=self.ids.preachers_drop_down,
items=preachers_menu_items,
position='auto',
width_mult=2
)
self.sub_preachers_menu = MDDropdownMenu(
caller=self.ids.sub_drop_down,
items=sub_preachers_menu_items,
position='auto',
width_mult=2
)
# Months Dropdown
months_menu_items = [{
'viewclass': 'OneLineListItem',
'divider': None,
'text': i,
'size_hint_y': None,
'height': dp(40),
'on_release': lambda x=i: self.set_months_item(x),
} for i in MONTHS
]
self.months_menu = MDDropdownMenu(
caller=self.ids.months_drop_down,
items=months_menu_items,
position='auto',
width_mult=2
)
# Toolbar button
self.dialog = MDDialog(
title='Are you sure ka na ba?',
text=f'{sched_counter} schedule(s) to send.\nContinue?',
type='simple',
buttons=[
MDFlatButton(text='Wait lang muna', font_style='Button', on_release=self.on_close_dialog),
MDFlatButton(text='Send na dali', font_style='Button', on_release=self.on_send_press)
]
)
def set_preachers_item(self, text__item):
self.preachers_menu.dismiss()
self.ids.preachers.text = text__item
def set_sub_preachers_item(self, text__item):
self.sub_preachers_menu.dismiss()
self.ids.sub_preachers.text = text__item
def set_months_item(self, text__item):
self.months_menu.dismiss()
self.ids.months.text = text__item
def on_date_picker(self):
self.date_dialog = MDDatePicker()
self.date_dialog.bind(on_save=self.on_date_save, on_cancel=self.on_date_cancel)
self.date_dialog.open()
def on_date_save(self, instance, value, date_range):
self.ids.date_text_field.text = str(value)
def on_date_cancel(self, instance, value):
self.date_dialog.dismiss()
def show_time_picker(self):
self.time_dialog = MDTimePicker()
self.time_dialog.bind(on_save=self.get_time_save)
self.time_dialog.open()
def get_time_save(self, instance, value):
self.ids.time_text_field.text = str(value)
def on_time_save(self, instance, time):
self.ids.time_text_field = str(time)
# textfield functions
def get_year(self):
year_list.append(date_list[-1][0:4])
def on_schedule_save(self):
self.save_dialog = MDDialog(
title='Save Schedule',
text='Please check all the fields before sending. This can no longer be undone.\nAre you sure?',
type='simple',
buttons=[
MDFlatButton(
text='Save',
font_style='Button',
on_release=self.on_save
),
MDFlatButton(
text='Cancel',
font_style='Button',
on_release=self.on_sched_dialog_dismiss
)
]
)
if self.ids.preachers.disabled == False:
self.save_dialog.open()
else:
self.save_dialog.disabled = True
def on_check_text(self, text_input):
if self.ids.outreach_name != '' and self.ids.months.text != '' and self.ids.preachers.text != '' \
and self.ids.date_text_field.text != '' and self.ids.time_text_field.text != '' \
and self.ids.topic.text != '' and self.ids.sub_preachers.text != '' \
and self.ids.opening_prayer.text != '' and self.ids.scripture_reading.text != '' \
and self.ids.offertory.text != '' and self.ids.closing_prayer.text != '':
self.ids.save_button.disabled = False
def save_button_change(self):
self.ids.save_button.text = 'Saved'
self.ids.save_button.md_bg_color = 254 / 255, 215 / 255, 46 / 255, 1
def on_sched_dialog_dismiss(self, button):
self.save_dialog.dismiss()
def for_sending(self):
send_list = (month_list[0], outreach_name_list[0], sched_counter, date_list[0], time_list[0], year_list[0],
preachers_list[0], sub_preacher_list[0], topic_list[0], topic_details_list[0],
event_list[0], event_details_list[0], opening_prayer_list[0], scripture_reading_list[0],
offertory_list[0], closing_prayer_list[0], status[0])
TO_SEND.append(send_list)
def clear_list(self):
month_list.clear()
outreach_name_list.clear()
preachers_list.clear()
date_list.clear()
time_list.clear()
year_list.clear()
sub_preacher_list.clear()
topic_list.clear()
topic_details_list.clear()
event_list.clear()
event_details_list.clear()
opening_prayer_list.clear()
scripture_reading_list.clear()
offertory_list.clear()
closing_prayer_list.clear()
status.clear()
def disable_fields(self):
self.ids.months_drop_down.disabled = True
self.ids.outreach_name.disabled = True
self.ids.preachers.disabled = True
self.ids.preachers_drop_down.disabled = True
self.ids.date_text_button.disabled = True
self.ids.time_text_button.disabled = True
self.ids.topic.disabled = True
self.ids.topic_details.disabled = True
self.ids.sub_preachers.disabled = True
self.ids.sub_drop_down.disabled = True
self.ids.opening_prayer.disabled = True
self.ids.scripture_reading.disabled = True
self.ids.offertory.disabled = True
self.ids.closing_prayer.disabled = True
self.ids.event.disabled = True
self.ids.event_details.disabled = True
def on_save(self, button):
global sched_counter
self.save_dialog.dismiss()
month_list.append(self.ids.months.text)
outreach_name_list.append(self.ids.outreach_name.text)
preachers_list.append(self.ids.preachers.text)
date_list.append(self.ids.date_text_field.text)
time_list.append(self.ids.time_text_field.text)
self.get_year()
topic_list.append(self.ids.topic.text)
sub_preacher_list.append(self.ids.sub_preachers.text)
opening_prayer_list.append(self.ids.opening_prayer.text)
scripture_reading_list.append(self.ids.scripture_reading.text)
offertory_list.append(self.ids.offertory.text)
closing_prayer_list.append(self.ids.closing_prayer.text)
if self.ids.topic_details.text != '':
topic_details_list.append(self.ids.topic_details.text)
else:
topic_details_list.append('No details provided.')
if self.ids.event.text != '':
event_list.append(self.ids.event.text)
else:
event_list.append('None')
if self.ids.event_details.text != '':
event_details_list.append(self.ids.event_details.text)
else:
event_details_list.append('None')
sched_counter += 1
sched_num_list.append(sched_counter)
status.append('Pending')
# Prepare to send
self.for_sending()
# clear list
self.clear_list()
# disable fields
self.disable_fields()
# change button
self.save_button_change()
# Toobar button
def send_dialog_open(self):
if sched_counter != 0:
self.dialog.open()
else:
self.dialog.disabled = True
def on_close_dialog(self, button):
self.dialog.dismiss()
# SEND BUTTON
def on_send_press(self, obj):
self.on_close_dialog(self)
Connection().on_outreach_send(TO_SEND)
self.send_status()
def send_status(self):
self.send_dialog = MDDialog(
title='Sent',
type='simple',
size_hint=(.45, None),
buttons=[
MDFlatButton(
text='Okay',
font_style='Button',
on_release=self.send_spinner_close
)
]
)
self.send_dialog.open()
def send_spinner_close(self, button):
self.send_dialog.dismiss()
# Events Card
class EventServiceCreateCard(MDBoxLayout):
schedule_num_text = StringProperty('')
def __init__(self, **kwargs):
super().__init__(**kwargs)
preachers_menu_items = [{
'viewclass': 'OneLineListItem',
'divider': None,
'text': i,
'height': dp(40),
'on_release': lambda x=i: self.set_preachers_item(x),
} for i in PREACHERS
]
sub_preachers_menu_items = [{
'viewclass': 'OneLineListItem',
'divider': None,
'text': i,
'height': dp(40),
'on_release': lambda x=i: self.set_sub_preachers_item(x),
} for i in PREACHERS
]
self.preachers_menu = MDDropdownMenu(
caller=self.ids.preachers_drop_down,
items=preachers_menu_items,
position='auto',
width_mult=2
)
self.sub_preachers_menu = MDDropdownMenu(
caller=self.ids.sub_drop_down,
items=sub_preachers_menu_items,
position='auto',
width_mult=2
)
# Months Dropdown
months_menu_items = [{
'viewclass': 'OneLineListItem',
'divider': None,
'text': i,
'size_hint_y': None,
'height': dp(40),
'on_release': lambda x=i: self.set_months_item(x),
} for i in MONTHS
]
self.months_menu = MDDropdownMenu(
caller=self.ids.months_drop_down,
items=months_menu_items,
position='auto',
width_mult=2
)
# Toolbar button
self.dialog = MDDialog(
title='Are you sure ka na ba?',
text=f'{sched_counter} schedule(s) to send.\nContinue?',
type='simple',
buttons=[
MDFlatButton(text='Wait lang muna', font_style='Button', on_release=self.on_close_dialog),
MDFlatButton(text='Send na dali', font_style='Button', on_release=self.on_send_press)
]
)
def set_preachers_item(self, text__item):
self.preachers_menu.dismiss()
self.ids.preachers.text = text__item
def set_sub_preachers_item(self, text__item):
self.sub_preachers_menu.dismiss()
self.ids.sub_preachers.text = text__item
def set_months_item(self, text__item):
self.months_menu.dismiss()
self.ids.months.text = text__item
def on_date_picker(self):
self.date_dialog = MDDatePicker()
self.date_dialog.bind(on_save=self.on_date_save, on_cancel=self.on_date_cancel)
self.date_dialog.open()
def on_date_save(self, instance, value, date_range):
self.ids.date_text_field.text = str(value)
def on_date_cancel(self, instance, value):
self.date_dialog.dismiss()
def show_time_picker(self):
self.time_dialog = MDTimePicker()
self.time_dialog.bind(on_save=self.get_time_save)
self.time_dialog.open()
def get_time_save(self, instance, value):
self.ids.time_text_field.text = str(value)
def on_time_save(self, instance, time):
self.ids.time_text_field = str(time)
# textfield functions
def get_year(self):
year_list.append(date_list[-1][0:4])
def on_schedule_save(self):
self.save_dialog = MDDialog(
title='Save Schedule',
text='Please check all the fields before sending. This can no longer be undone.\nAre you sure?',
type='simple',
buttons=[
MDFlatButton(
text='Save',
font_style='Button',
on_release=self.on_save
),
MDFlatButton(
text='Cancel',
font_style='Button',
on_release=self.on_sched_dialog_dismiss
)
]
)
if self.ids.preachers.disabled == False:
self.save_dialog.open()
else:
self.save_dialog.disabled = True
def on_check_text(self, text_input):
if self.ids.event.text != '' and self.ids.months.text != '' and self.ids.preachers.text != '' \
and self.ids.date_text_field.text != '' and self.ids.time_text_field.text != '' \
and self.ids.topic.text != '' and self.ids.sub_preachers.text != '' \
and self.ids.opening_prayer.text != '' and self.ids.scripture_reading.text != '' \
and self.ids.offertory.text != '' and self.ids.closing_prayer.text != '':
self.ids.save_button.disabled = False
def save_button_change(self):
self.ids.save_button.text = 'Saved'
self.ids.save_button.md_bg_color = 254 / 255, 215 / 255, 46 / 255, 1
def on_sched_dialog_dismiss(self, button):
self.save_dialog.dismiss()
def for_sending(self):
send_list = (month_list[0], event_list[0], sched_counter, date_list[0], time_list[0], year_list[0],
preachers_list[0], sub_preacher_list[0], topic_list[0], topic_details_list[0], \
event_details_list[0], opening_prayer_list[0], scripture_reading_list[0], offertory_list[0], \
closing_prayer_list[0], event_organizer_list[0], event_contact_list[0], status[0])
TO_SEND.append(send_list)
def clear_list(self):
month_list.clear()
event_list.clear()
preachers_list.clear()
date_list.clear()
time_list.clear()
year_list.clear()
sub_preacher_list.clear()
topic_list.clear()
topic_details_list.clear()
event_details_list.clear()
opening_prayer_list.clear()
scripture_reading_list.clear()
offertory_list.clear()
closing_prayer_list.clear()
event_organizer_list.clear()
event_contact_list.clear()
status.clear()
def disable_fields(self):
self.ids.months_drop_down.disabled = True
self.ids.event.disabled = True
self.ids.preachers.disabled = True
self.ids.preachers_drop_down.disabled = True
self.ids.date_text_button.disabled = True
self.ids.time_text_button.disabled = True
self.ids.topic.disabled = True
self.ids.topic_details.disabled = True
self.ids.sub_preachers.disabled = True
self.ids.sub_drop_down.disabled = True
self.ids.opening_prayer.disabled = True
self.ids.scripture_reading.disabled = True
self.ids.offertory.disabled = True
self.ids.closing_prayer.disabled = True
self.ids.event_details.disabled = True
self.ids.event_organizer.disabled = True
self.ids.event_contact.disabled = True
def on_save(self, button):
global sched_counter
self.save_dialog.dismiss()
month_list.append(self.ids.months.text)
event_list.append(self.ids.event.text)
preachers_list.append(self.ids.preachers.text)
date_list.append(self.ids.date_text_field.text)
time_list.append(self.ids.time_text_field.text)
self.get_year()
topic_list.append(self.ids.topic.text)
sub_preacher_list.append(self.ids.sub_preachers.text)
opening_prayer_list.append(self.ids.opening_prayer.text)
scripture_reading_list.append(self.ids.scripture_reading.text)
offertory_list.append(self.ids.offertory.text)
closing_prayer_list.append(self.ids.closing_prayer.text)
if self.ids.topic_details.text != '':
topic_details_list.append(self.ids.topic_details.text)
else:
topic_details_list.append('No details provided.')
if self.ids.event_details.text != '':
event_details_list.append(self.ids.event_details.text)
else:
event_details_list.append('No details provided.')
event_organizer_list.append(self.ids.event_organizer.text)
event_contact_list.append(self.ids.event_contact.text)
sched_counter += 1
sched_num_list.append(sched_counter)
status.append('Pending')
# Prepare to send
self.for_sending()
# clear list
self.clear_list()
# disable fields
self.disable_fields()
# change button
self.save_button_change()
# Toobar button
def send_dialog_open(self):
if sched_counter != 0:
self.dialog.open()
else:
self.dialog.disabled = True
def on_close_dialog(self, button):
self.dialog.dismiss()
# SEND BUTTON
def on_send_press(self, obj):
self.on_close_dialog(self)
Connection().on_events_send(TO_SEND)
self.send_status()
def send_status(self):
self.send_dialog = MDDialog(
title='Sent',
type='simple',
size_hint=(.45, None),
buttons=[
MDFlatButton(
text='Okay',
font_style='Button',
on_release=self.send_spinner_close
)
]
)
self.send_dialog.open()
def send_spinner_close(self, button):
self.send_dialog.dismiss() |
import random
data = []
count = 0
with open('reviews.txt', 'r') as f: #with可以自動關閉讀取檔案
for line in f:
data.append(line)
count += 1
if count % 100000 == 0: # 求餘數時使用%
print(len(data))
print('檔案讀取完ㄌ,總共有', len(data), '筆資料')
sum_len = 0
for d in data:
sum_len += len(d)
#print(sum_len)
print('留言的平均長度為', sum_len/len(data))
s = 1
y = 21741
z = random.randint(s, y)
new = []
for d in data:
if len(d) < 100:
new.append(d)
print(z)
print('一共有', len(new), '筆留言長度小於100')
print('第', z, '筆資料為: ')
print(new[z]) |
#!/usr/bin/env python
# encoding: utf-8
# @author: liusir
# @file: run_all_cases.py
# @time: 2020/10/11 5:14 下午
import unittest
import os
from itsDemoTest.comm import HTMLTestReportCN
from itsDemoTest.comm.email_utils import EmailUtils
import time
from itsDemoTest.comm.ReadConfig import config
from itsDemoTest.comm.log_utils import logger
case_path = os.path.join( os.path.dirname(__file__),'testcases' )
print(case_path)
discover = unittest.defaultTestLoader.discover(start_dir=case_path,
pattern='test_*.py',
top_level_dir=case_path)
all_case_suite = unittest.TestSuite()
all_case_suite.addTest( discover )
report_path = os.path.join(os.path.dirname(__file__),'reports/')
report_dir = HTMLTestReportCN.ReportDirectory(report_path) #创建一个测试报告路径对象
report_dir.create_dir('API_TEST_') #调用创建目录的方法
report_html_path = HTMLTestReportCN.GlobalMsg.get_value('report_path') #获取测试报告文件的路径
report_html_file = open( report_html_path,'wb' )
html_runner = HTMLTestReportCN.HTMLTestRunner(stream=report_html_file,
title='工时2.0接口测试报告',
description='接口框架测试实战使用',
tester='P3P4')
html_runner.run(all_case_suite)
log_path = os.path.join(os.path.dirname(__file__),
config.LOG_PATH + '/'+time.strftime('%Y%m%d%H%M_', time.localtime(time.time())) + 'API_TEST_LOG.log')
email_u = EmailUtils('智能工时2.0接口测试报告',report_html_file.name,log_path)
print(report_html_file)
email_u.send_mail()
|
import torch
from segan.discriminator import Discriminator
from segan.generator import Generator
class SEGANModule(torch.nn.Module):
""" Container for both generator and discriminator """
def __init__(
self,
n_layers: int = 10,
init_channels: int = 2,
kernel_size: int = 31,
stride: int = 2,
d_linear_units: int = 8,
g_norm: str = None,
d_norm: str = None,
):
super(SEGANModule, self).__init__()
feature_maps = [1] + [init_channels * stride ** i for i in range(n_layers + 1)]
self.generator = Generator(
kernel_size=kernel_size,
stride=stride,
norm=g_norm,
feature_maps=feature_maps[:-1],
)
self.discriminator = Discriminator(
kernel_size=kernel_size,
stride=stride,
norm=d_norm,
feature_maps=feature_maps,
linear_units=d_linear_units,
)
def forward(self, x: torch.Tensor):
return self.generator(x)
|
import re
import time
import os
import logging
from io import BytesIO
from typing import Optional, List, Set
from dateutil.parser import parse as parse_date
from datetime import date, timedelta
from pdfminer.high_level import extract_pages
from pdfminer.layout import LTTextContainer
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.support.select import Select
from datafeeds import config, db
from datafeeds.common.alert import post_slack_message
from datafeeds.common.batch import run_datafeed
from datafeeds.common.captcha import recaptcha_v2
from datafeeds.common.support import Results
from datafeeds.common.base import BaseWebScraper, CSSSelectorBasePageObject
from datafeeds.common.support import Configuration
from datafeeds.common.typing import Status, BillingDatum
from datafeeds.common.upload import upload_bill_to_s3, hash_bill
from datafeeds.models import (
SnapmeterAccount,
Meter,
SnapmeterMeterDataSource as MeterDataSource,
UtilityService,
SnapmeterAccountMeter,
)
from datafeeds.parsers.pacific_power import extract_pdf_text
log = logging.getLogger(__name__)
def notify_rebill(meter_number: str, statement: date):
meter = (
db.session.query(Meter)
.filter(
UtilityService.service_id == meter_number,
Meter.service == UtilityService.oid,
)
.first()
)
account = (
db.session.query(SnapmeterAccount)
.filter(
UtilityService.service_id == meter_number,
Meter.service == UtilityService.oid,
Meter.oid == SnapmeterAccountMeter.meter,
SnapmeterAccountMeter.account == SnapmeterAccount.oid,
)
.first()
)
if meter and account:
message = "%s (%s) in %s, statement date %s" % (
meter.name,
meter_number,
account.name,
statement.strftime("%m/%d/%y"),
)
else:
message = "meter number %s, statement date %s" % (
meter_number,
statement.strftime("%m/%d/%y"),
)
post_slack_message(
"LAWDP PDF scraper found a bill with corrections: %s" % message,
"#scrapers",
":exclamation:",
username="Scraper monitor",
)
def get_pdf_text(filename) -> str:
lines: List[str] = []
for page in [p for p in extract_pages(filename)]:
elements = [el for el in page if isinstance(el, LTTextContainer)]
for element in elements:
line = element.get_text().replace("\n", " ").strip()
# modify METER NUMBER line to make searching easier ( remove space in between )
if re.match(r"METER NUMBER (\w+-\d+) (\d+)", line):
_meter_num = re.search(r"METER NUMBER (\w+-\d+) (\d+)", line).group(
1, 2
)
line = "METER NUMBER {}{}".format(_meter_num[0], _meter_num[1])
lines.append(line)
pdf_text = "\n".join(lines)
text_filename = filename.replace(r".pdf", r".txt")
with open("%s" % text_filename, "w") as f:
f.write(pdf_text)
log.info("wrote text to %s" % text_filename)
return pdf_text
def kw_regexes(meter_number: str):
return {
"meter_number": r"METER NUMBER (.+) \d+",
"billing_period": r"BILLING PERIOD (\d+/\d+/\d+) - (\d+/\d+/\d+)",
"billing_section": (
r"(?:[\s\S]*)" # This ensures that we only match the BILLING PERIOD occurrence that is closest to METER NUMBER
r"(BILLING PERIOD (?:\d+\/\d+\/\d+) - (?:\d+\/\d+\/\d+)[\s\S]+"
fr"METER NUMBER {meter_number}[\s\S]+?"
r"Total Electric Charges \$ [\d,.]+"
r"\n.+)" # Match an extra line ( this line sometimes contain bill usage data (see ladwp-multi.txt) )
),
"sub_billing_period": (
r"BILLING PERIOD (?:\d+\/\d+\/\d+) - (?:\d+\/\d+\/\d+)[\s\S]+?"
r"State Energy Surcharge - \d+ days\n(.+?kWh)[\s\S]+?"
r"Electric Charges (\d+/\d+/\d+) - (\d+/\d+/\d+) \(\d+ Days\)[\s\S]+?"
r"\$([\d,\.]+)[\s\S]+?"
r"\$([\d,\.]+)"
),
"sub_bill_data": (
r"State Energy Surcharge - \d+ days\n(?P<used>.+?)kWh[\s\S]+?"
r"Electric Charges (?P<start_date>\d+/\d+/\d+) - (?P<end_date>\d+/\d+/\d+) \(\d+ Days\)[\s\S]+?"
r"\$(?:[\d,\.]+)[\s\S]+?"
r"\$(?P<cost>[\d,\.]+)"
),
"peaks": (
r"(?:High Peak|Low Peak|Base) High Season Demand - .+days"
r"[\s\S]+?(?P<peak>[\d,\.]+) kW x \$[\d\.]+\/kW"
),
"peaks_2": r"\n([\d\.]+) kW +([\d\.]+) kW +([\d\.]+) kW\n",
# Demand kW Energy kWh
"usage_type_1": r"([\d\.]+) kW ([\d\.]+) kWh",
# from box above line items; may have a variable number of kW / kWh fields
# 39.47 kW 38.28 kW 24.59 kW 1556 kWh 2064 kWh 5247 kWh
"usage_box_1": r"\n([\d\.]+ +kW .*? kWh)\n",
# usage box with kVarh
# 878.4 kW 0 kW 892.8 kW 0 kW 619.2 kW 0 kW 234720 kWh 270720 kWh 567360 kWh 145440 kVarh 165600 kVarh 348480 kVarh
# 0 kW 0 kW 0 kW 0 kWh 0 kWh 96 kWh 0 kVarh 96 kVarh 96 kVarh
"usage_box_2": r"\n([\d\.]+ +kW .*? kVarh)\n",
# usage box at the very end of the file
"usage_box_3": r"([\d\.]+ .*?kWh)$",
"bill_data": (
r"State Energy Surcharge[\s\S]+?"
r"(?P<used>.+?)kWh x \$([\d\.,]+)\/kWh[\s\S]+?"
r"Total Electric Charges \$ (?P<cost>[\d,\.]+)"
),
"cost": r"Total Electric Charges \$ ([\d,.]+)",
# if billing_section not found, try these
"alt1_date_usage": r"Electric Charges\s+(\d+/\d+/\d+) - (\d+/\d+/\d+)\s+([\d\.,]+) kWh",
# dates but no usage
"alt2_date_usage": r"Electric Charges\s+(\d+/\d+/\d+) - (\d+/\d+/\d+)\s+",
"alt1_cost": r"Total \w+ Charges \$ ([\d,.]+)",
"cost_subtotal": r"Subtotal Electric Charges\n.*?Total Electric Charges\s+\$\s+(?P<cost>[\d\.,]+)",
# requires re.DOTALL
"alt1_peak": r"Total kWh used.*?([\d\.,]+) kW\s+([\d\.,]+) kWh",
"alt_3_multi": r"Electric Charges\s+(\d+/\d+/\d+) - (\d+/\d+/\d+)\s+\(\d+ Days\) \$([\d,]*\.\d\d)",
}
def _alternate_section(
filename: str, bill_date: date, meter_number: str, pdf_text: str
) -> List[BillingDatum]:
regexes = kw_regexes(meter_number)
# try multiple bills option first:
with open(filename, "rb") as f:
pdf_data = f.read()
# Use PyPDF2 here to extract the individual bill costs beside their bill dates.
alt_pdf_text = extract_pdf_text(BytesIO(pdf_data))
sub_bills = re.findall(regexes["alt_3_multi"], alt_pdf_text)
if sub_bills:
billing_data = []
for bill in sub_bills:
datum = BillingDatum(
start=parse_date(bill[0]).date(),
end=parse_date(bill[1]).date() - timedelta(days=1),
statement=bill_date,
cost=str_to_float(bill[2]),
used=None,
peak=None,
attachments=None,
utility_code=None,
items=None,
)
billing_data.append(datum)
log.info("alternate regex 3: data=%s", datum)
return billing_data
else:
date_usage = re.search(regexes["alt1_date_usage"], pdf_text)
if date_usage:
used = str_to_float(date_usage.group(3))
else:
date_usage = re.search(regexes["alt2_date_usage"], pdf_text)
used = 0
cost = str_to_float(re.search(regexes["alt1_cost"], pdf_text).group(1))
peak_match = re.search(regexes["alt1_peak"], pdf_text, re.DOTALL)
if date_usage and cost:
datum = BillingDatum(
start=parse_date(date_usage.group(1)).date(),
end=parse_date(date_usage.group(2)).date() - timedelta(days=1),
statement=bill_date,
cost=cost,
used=used,
peak=str_to_float(peak_match.group(1)) if peak_match else None,
attachments=None,
utility_code=None,
items=None,
)
log.info("alternate regex 1: data=%s", datum)
return [datum]
raise Exception(
"Error parsing pdf %s for %s: no billing section found",
filename,
meter_number,
)
def _multi_period(
bill_date: date, meter_number: str, bill_data_section: str
) -> List[BillingDatum]:
bills: List[BillingDatum] = []
regexes = kw_regexes(meter_number)
# We're only interested in the "sub" billing periods; delete the first billing
# period line from the text, so that regexes["sub_billing_period"] doesn't match it.
bill_data_section = re.sub(
regexes["billing_period"],
"",
bill_data_section,
count=1,
)
bill_data_subsections = re.finditer(
regexes["sub_billing_period"], bill_data_section
)
for bill_data_subsection_match in bill_data_subsections:
# Make a BillingDatum for each billing period
bill_data_subsection = bill_data_subsection_match.group()
bill_data_match = re.search(regexes["sub_bill_data"], bill_data_subsection)
peaks_match = re.findall(regexes["peaks"], bill_data_subsection)
log.debug("bill_data=%s peaks=%s", bill_data_match, peaks_match)
if bill_data_match:
datum = BillingDatum(
start=parse_date(bill_data_match.group("start_date")).date(),
end=parse_date(bill_data_match.group("end_date")).date()
- timedelta(days=1),
statement=bill_date,
cost=str_to_float(bill_data_match.group("cost")),
used=str_to_float(bill_data_match.group("used")),
peak=max([str_to_float(x) for x in peaks_match])
if peaks_match
else None,
attachments=None,
utility_code=None,
items=None,
)
log.info("multiple billing periods: data=%s", datum)
bills.append(datum)
return bills
def _single_period(
bill_date: date,
filename: str,
meter_number: str,
billing_period_match: str,
bill_data_section: str,
pdf_text: str,
) -> List[BillingDatum]:
"""Parse a regular bill with only one billing period."""
regexes = kw_regexes(meter_number)
cost = used = peak = None
if re.search(regexes["cost"], bill_data_section):
cost = str_to_float(re.search(regexes["cost"], bill_data_section).group(1))
else:
raise Exception(
"Error parsing pdf %s for %s: couldn't extract cost",
filename,
meter_number,
)
# There are multiple different ways the bill data is represented in the pdf...
for idx in [1, 2, 3]:
usage_match = re.search(regexes["usage_box_%s" % idx], bill_data_section)
if usage_match:
break
log.debug("usage_match=%s" % idx)
if usage_match:
bill_data_match = re.search(regexes["bill_data"], bill_data_section)
if not bill_data_match:
bill_data_match = re.search(
regexes["cost_subtotal"], bill_data_section, re.DOTALL
)
if bill_data_match:
# used = str_to_float(bill_data_match.group("used"))
cost = str_to_float(bill_data_match.group("cost"))
else:
cost = str_to_float(re.search(regexes["alt1_cost"], pdf_text).group(1))
# ['878.4 kW ', '0 kW ', '892.8 kW ', '0 kW ', '619.2 kW ', '0 kW ', '234720 kWh ', ...
used = 0.0
peak = None
# ['0 kW', '0 kW', '0 kW', '0 kWh', '0 kWh', '96 kWh']
for val in re.findall(r"([\d\.]+ +kWh?)", usage_match.group(1)):
float_val = str_to_float(val)
if "kWh" in val:
used += float_val
else:
peak = max(peak, float_val) if peak is not None else float_val
elif re.search(regexes["usage_type_1"], bill_data_section):
peak_str, used_str = re.search(
regexes["usage_type_1"], bill_data_section
).group(1, 2)
peak = str_to_float(peak_str)
used = str_to_float(used_str)
log.debug("usage_type_1: peak=%s used=%s", peak, used)
else:
bill_data_match = re.search(regexes["bill_data"], bill_data_section)
used = str_to_float(bill_data_match.group("used"))
cost = str_to_float(bill_data_match.group("cost"))
peak_matches = re.findall(regexes["peaks"], bill_data_section)
peak = None
if peak_matches:
peak = max([str_to_float(x) for x in peak_matches])
else:
for exp in ["peaks_2", "usage_box_1"]:
match = re.search(regexes[exp], bill_data_section)
if match:
peak = max([str_to_float(x) for x in match.groups()])
break
log.debug("other: bill_data_match=%s", bill_data_match)
if cost is not None and used is not None:
datum = BillingDatum(
start=parse_date(billing_period_match[0]).date(),
end=parse_date(billing_period_match[1]).date() - timedelta(days=1),
statement=bill_date,
cost=cost,
used=used,
peak=peak,
attachments=None,
utility_code=None,
items=None,
)
log.info("single billing period: data=%s", datum)
return [datum]
return []
def parse_kw_bill(
filename: str, bill_date: date, meter_number: str, pdf_text: str
) -> List[BillingDatum]:
regexes = kw_regexes(meter_number)
bill_data_section_match = re.search(regexes["billing_section"], pdf_text)
if not bill_data_section_match:
return _alternate_section(filename, bill_date, meter_number, pdf_text)
bill_data_section: str = bill_data_section_match.group(1)
billing_periods = re.findall(regexes["billing_period"], bill_data_section)
if not billing_periods:
raise Exception(
"Error parsing pdf%s for %s: no Billing Periods found",
filename,
meter_number,
)
log.debug("billing_periods=%s", billing_periods)
# Check if we have multiple billing periods in a bill
if len(billing_periods) > 1:
return _multi_period(bill_date, meter_number, bill_data_section)
return _single_period(
bill_date,
filename,
meter_number,
billing_periods[0],
bill_data_section,
pdf_text,
)
def parse_ccf_bill(meter_number: str, pdf_text: str) -> List[BillingDatum]:
"""Method for parsing Water and Fire Bills"""
bills: List[BillingDatum] = []
regexes = {
"bill_date": r"BILL DATE (.+)",
"meter_number": r"METER NUMBER (.+) \d+",
"billing_period": r"BILLING PERIOD (?:\d+/\d+/\d+) - (?:\d+/\d+/\d+)",
"water_billing_section": fr"SA # : {meter_number}[\s\S]+?Total Water Charges",
"sub_billing_period": (
r"BILLING PERIOD (?:\d+\/\d+\/\d+) - (?:\d+\/\d+\/\d+)[\s\S]+?"
r"State Energy Surcharge - \d+ days\n(.+?kWh)[\s\S]+?"
r"Electric Charges (\d+/\d+/\d+) - (\d+/\d+/\d+) \(\d+ Days\)[\s\S]+?"
r"\$([\d,\.]+)[\s\S]+?"
r"\$([\d,\.]+)"
),
"sub_bill_data": (
r"State Energy Surcharge - \d+ days\n(?P<used>.+?)kWh[\s\S]+?"
r"Electric Charges (?P<start_date>\d+/\d+/\d+) - (?P<end_date>\d+/\d+/\d+) \(\d+ Days\)[\s\S]+?"
r"\$(?:[\d,\.]+)[\s\S]+?"
r"\$(?P<cost>[\d,\.]+)"
),
"fire_service_data_1": (
r"Fire Service Charges\n"
rf"SA # : {meter_number}[\s\S]+?"
r"BILLING PERIOD (?P<start_date>[\d\/]+) - (?P<end_date>[\d\/]+)[\s\S]+?"
r"^(?P<used>[\d\.]+) HCF$[\s\S]+?"
r"Total Fire Service Charges \$ (?P<cost>[\d\.]+)"
),
"fire_service_data_2": (
rf"SA # : {meter_number}.*?"
r"BILLING PERIOD +(?P<start_date>[\d\/]+) - (?P<end_date>[\d\/]+)[\s\S].*?"
r"\n(?P<used>[\d\.,]+) HCF.*?"
r"Total Fire Service Charges .*?(?P<cost>[\d\.,]+)"
),
# without HCF value
"fire_service_data_3": (
rf"SA # : {meter_number}.*?"
r"BILLING PERIOD +(?P<start_date>[\d\/]+) - (?P<end_date>[\d\/]+)[\s\S].*?"
r"\n(?P<used>[\d\.,]+)\n.*?"
r"Total Fire Service Charges .*?(?P<cost>[\d\.,]+)"
),
"single_line_water": (
r"Water Charges +(?P<start_date>\d+\/\d+\/\d+) - (?P<end_date>\d+\/\d+\/\d+) +"
r"(?P<used>[\d,\.]+) HCF\n\$(?P<cost>[\d,\.]+)"
),
"multi_line_water": (
rf"SA # : {meter_number}.*?"
r"BILLING PERIOD +(?P<start_date>[\d\/]+) - (?P<end_date>[\d\/]+)[\s\S].*?"
r"\n(.*?)"
r"Total Water Charges .*?(?P<cost>[\d\.,]+)"
),
"multi_line_water_use": r"([\d\.,]+) HCF x \$[\d\.,]+/HCF",
}
bill_date_str = re.search(regexes["bill_date"], pdf_text).group(1)
bill_date = parse_date(bill_date_str).date()
bill_data_section_match = re.search(regexes["water_billing_section"], pdf_text)
if not bill_data_section_match:
# check if we have a Fire Service bill and parse that
for idx in [1, 2, 3]:
fire_data_match = re.search(
regexes[f"fire_service_data_{idx}"], pdf_text, re.MULTILINE | re.DOTALL
)
if fire_data_match:
break
if fire_data_match:
bills.append(
BillingDatum(
start=parse_date(fire_data_match.group("start_date")).date(),
end=parse_date(fire_data_match.group("end_date")).date()
- timedelta(days=1),
statement=bill_date,
cost=str_to_float(fire_data_match.group("cost")),
used=str_to_float(fire_data_match.group("used")),
peak=None,
attachments=None,
utility_code=None,
items=None,
)
)
else:
log.warning("couldn't find Water or Fire Service Charges in pdf_text")
else:
# Parse water bill
bill_data_section_text = bill_data_section_match.group()
billing_periods = re.findall(regexes["billing_period"], bill_data_section_text)
water_match = re.search(regexes["single_line_water"], pdf_text)
if not water_match:
water_match = re.search(regexes["multi_line_water"], pdf_text, re.DOTALL)
# Check if we have multiple billing periods in a bill
if len(billing_periods) > 1:
# We're only interested in the "sub" billing periods, delete the first billing
# period line from the text, so that regexes["billing_period"] doesn't match it.
bill_data_section_text = re.sub(
regexes["billing_period"],
"",
bill_data_section_text,
count=1,
)
bill_data_subsections = re.split(
regexes["billing_period"], bill_data_section_text
)
# pop irrelevant lines above the first billing_period match
bill_data_subsections.pop(0)
for bill_data_subsection in bill_data_subsections:
# bill: Dict[str, Any] = dict(start=None, end=None, statement=bill_date, cost=None, used=None, peak=None)
dates_match = re.search(
r"Water Charges (\d+\/\d+\/\d+) - (\d+\/\d+\/\d+) \(\d+ Days\)",
bill_data_subsection,
)
cost_match = re.findall(
r"([\d\.]+) HCF x \$([\d\.]+)\/HCF", bill_data_subsection
)
if cost_match and dates_match:
bills.append(
BillingDatum(
start=parse_date(dates_match.group(1)).date(),
end=parse_date(dates_match.group(2)).date()
- timedelta(days=1),
statement=bill_date,
cost=round(
sum(
[
str_to_float(x[0]) * str_to_float(x[1])
for x in cost_match
]
),
2,
),
used=round(
sum([str_to_float(x[0]) for x in cost_match]), 5
),
peak=None,
attachments=None,
utility_code=None,
items=None,
)
)
else:
log.warning(
"Couldn't extract cost or start/end dates from water bill"
)
elif water_match:
if "used" in water_match.groupdict():
used = str_to_float(water_match.group("used"))
else:
used = sum(
[
str_to_float(u)
for u in re.findall(
regexes["multi_line_water_use"], water_match.group(3)
)
]
)
bills.append(
BillingDatum(
start=parse_date(water_match.group("start_date")).date(),
end=parse_date(water_match.group("end_date")).date()
- timedelta(days=1),
statement=bill_date,
cost=str_to_float(water_match.group("cost")),
used=used,
peak=None,
attachments=None,
utility_code=None,
items=None,
)
)
# close up one day gaps; sometimes bill end dates don't need to be adjusted
final_bills: List[BillingDatum] = []
sorted_bills = sorted(bills, key=lambda b: b.start)
for idx, bill in enumerate(sorted_bills):
curr_bill = bill
next_bill = sorted_bills[idx + 1] if idx + 1 < len(sorted_bills) else None
if next_bill and (next_bill.start - bill.end).days == 2:
curr_bill = bill._replace(end=bill.end + timedelta(days=1))
final_bills.append(curr_bill)
return final_bills
def str_to_float(val: str) -> float:
"""Convert a string to a float; remove characters other than digits . and ,"""
return float(re.sub(r"[^\d\.-]", "", val))
def parse_pdf(filename: str, meter_number: str, commodity: str) -> List[BillingDatum]:
"""Parse a PDF and return a list of BillingDatum objects, sorted by start date."""
pdf_text: str = get_pdf_text(filename)
# "Bill Date:" is the same on every page of the pdf
match = re.search(r"BILL DATE (.+)", pdf_text)
if not match:
log.warning("Not a bill")
return []
bill_date = parse_date(match.group(1)).date()
if "Corrections" in pdf_text:
notify_rebill(meter_number, bill_date)
if commodity == "ccf":
# Water/Fire bills
bills = parse_ccf_bill(meter_number, pdf_text)
else:
bills = parse_kw_bill(filename, bill_date, meter_number, pdf_text)
return sorted(bills, key=lambda b: b.start)
class LADWPBillPdfConfiguration(Configuration):
def __init__(
self,
meter_number: str,
utility_account_id: str,
commodity: str,
account_name: str,
):
super().__init__(scrape_bills=True)
self.meter_number = meter_number
self.utility_account_id = utility_account_id
self.commodity = commodity
self.account_name = account_name
class LoginPage(CSSSelectorBasePageObject):
"""The ladwp.com home page, with username and password fields."""
UsernameFieldSelector = r"#LoginForm\:pt_sf1\:username\:\:content"
PasswordFieldSelector = r"#LoginForm\:pt_sf1\:password\:\:content"
LoginButtonSelector = r"#LoginForm\:pt_sf1\:lgnbtn"
SplashScreenSelector = ".af_document_splash-screen-cell"
def wait_until_ready(self):
log.info("Waiting for Login page to be ready")
self._driver.wait().until(
EC.presence_of_element_located(
(By.CSS_SELECTOR, self.UsernameFieldSelector)
)
)
self._driver.wait().until(
EC.presence_of_element_located(
(By.CSS_SELECTOR, self.PasswordFieldSelector)
)
)
self._driver.wait().until(
EC.presence_of_element_located((By.CSS_SELECTOR, self.LoginButtonSelector))
)
self._driver.wait().until(
EC.invisibility_of_element_located(
(By.CSS_SELECTOR, self.SplashScreenSelector)
)
)
def login(self, username: str, password: str):
"""Authenticate with the web page.
Fill in the username, password, then click "Log In"
"""
log.info("Inserting credentials on login page.")
self._driver.fill(self.UsernameFieldSelector, username)
self._driver.fill(self.PasswordFieldSelector, password)
self.find_element(self.LoginButtonSelector).click()
class MyAccountPage(CSSSelectorBasePageObject):
"""My Account page contains a captcha, but we only care about the left navbar buttons."""
BillHistorySelector = 'a[title="Bill & Notification History"]'
def wait_until_ready(self):
log.debug("Waiting for Login page to be ready")
self._driver.wait().until(
EC.presence_of_element_located((By.CSS_SELECTOR, self.BillHistorySelector))
)
self._driver.sleep(5)
def navigate_to_bill_history(self):
log.info("clicking Bill & Notification History")
self.find_element(self.BillHistorySelector).click()
class BillHistoryPage(CSSSelectorBasePageObject):
BillHistoryHeaderXpath = (
"//span[contains(.,'Bill & Notification History') and @class='hdng2']/.."
)
ReCaptchaIframeParentXpath = "//div[@class='g-recaptcha']//iframe[@height]/.."
BillPdfsTableSelector = "table.paymentHistoryMidTitle.af_panelGroupLayout"
def too_many_sessions(self):
self._driver.wait().until(
EC.presence_of_element_located((By.CSS_SELECTOR, ".rightPanelMyAcct"))
)
text = self._driver.find_element_by_css_selector(".rightPanelMyAcct").text
if "This web user has reached too many sessions" in text:
return True
return False
def wait_until_ready(self):
log.info("Waiting for Bill History Page to be ready")
self._driver.wait().until(
EC.presence_of_element_located((By.XPATH, self.BillHistoryHeaderXpath))
)
log.info(
"Waiting for ReCaptcha to Appear"
) # should we add a special case for when a captcha isn't present?
self._driver.wait().until(
EC.presence_of_element_located((By.XPATH, self.ReCaptchaIframeParentXpath))
)
def wait_until_bills_ready(self):
log.info("Waiting for Bills Pdf Table to be ready")
self._driver.wait().until(
EC.presence_of_element_located(
(By.CSS_SELECTOR, self.BillPdfsTableSelector)
)
)
def select_account(self, account_id: str, account_name: str):
log.info("selecting account %s" % account_id)
try:
select = Select(
self._driver.find_element_by_css_selector(".rightPanelMyAcct select")
)
except NoSuchElementException:
log.info("no account select; single account")
return
try:
select.select_by_visible_text(account_id)
except NoSuchElementException as exc:
# try account name if there is one
if not account_name:
raise exc
log.debug("trying account name %s", account_name)
select.select_by_visible_text(account_name)
log.debug("waiting for loading spinner to appear")
self._driver.sleep(5)
log.debug("waiting for loading spinner to disappear")
self._driver.wait().until(
EC.invisibility_of_element_located(
(By.CSS_SELECTOR, ".AFBlockingGlassPane")
)
)
time.sleep(5)
def solve_captcha(self) -> bool:
iframe_parent = self._driver.find_element_by_xpath(
self.ReCaptchaIframeParentXpath
)
page_url = self._driver.current_url
if not recaptcha_v2(self._driver, iframe_parent, page_url):
log.warning("failed captcha solving")
return False
self.find_element('a[title="Next"]').click()
return True
def download_bills(self, start: date, end: date):
for link in self._driver.find_elements_by_css_selector(".af_commandImageLink"):
bill_date_str = link.text.strip()
log.debug("found bill link %s", bill_date_str)
try:
bill_date = parse_date(bill_date_str).date()
except Exception:
# Probably not a date
continue
log.debug("found bill date %s", bill_date)
if start <= bill_date <= end:
log.info(f"Downloading Bill for date: {bill_date_str}")
link.click()
def logout(self):
# try to avoid This web user has reached too many sessions
self._driver.find_element_by_xpath("//a[contains(text(), 'Log out')]").click()
class LADWPBillPdfScraper(BaseWebScraper):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.name = "LADWP bill PDF"
self.login_url = "https://ladwp.com/"
self.bill_components = ["start", "end", "statement", "cost", "used", "peak"]
@property
def meter_number(self):
return self._configuration.meter_number
@property
def commodity(self):
return self._configuration.commodity
def _execute(self):
# Direct the driver to the login page
self._driver.get(self.login_url)
# Create page helpers
login_page = LoginPage(self._driver)
my_account_page = MyAccountPage(self._driver)
bill_history_page = BillHistoryPage(self._driver)
try:
login_page.wait_until_ready()
except Exception:
self.screenshot("initial page load failed")
# try one more time
self._driver.get(self.login_url)
login_page.wait_until_ready()
login_page.login(self.username, self.password)
self.screenshot("after login")
my_account_page.wait_until_ready()
my_account_page.navigate_to_bill_history()
self.screenshot("bill history")
if bill_history_page.too_many_sessions():
# waiting 5 minutes doesn't seem to help
bill_history_page.logout()
raise Exception("too many sessions")
bill_history_page.wait_until_ready()
self.screenshot("after captcha")
if not bill_history_page.solve_captcha():
bill_history_page.logout()
raise Exception("captcha failed")
bill_history_page.wait_until_bills_ready()
bill_history_page.select_account(
self._configuration.utility_account_id, self._configuration.account_name
)
bill_history_page.wait_until_bills_ready()
bill_history_page.download_bills(self.start_date, self.end_date)
bill_history_page.logout()
# get bills from download directory and parse
bills: List[BillingDatum] = []
prefix = f"{config.WORKING_DIRECTORY}/current"
log.info("Waiting for downloads to finish")
while any(".pdf.crdownload" in f for f in os.listdir(prefix)):
# Wait for downloads to finish
time.sleep(1)
continue
start_dates: Set[date] = set()
for filename in sorted(os.listdir(prefix)):
if ".pdf" not in filename:
continue
log.info("parsing file %s" % filename)
parsed_bills = parse_pdf(
f"{prefix}/{filename}", self.meter_number, self.commodity
)
log.info(f"filename {filename} bills={parsed_bills}")
if not parsed_bills:
log.warning(f"no billing datum: filename={filename}")
continue
with open(prefix + "/" + filename, "rb") as pdf_data:
bill = parsed_bills[0]
key = hash_bill(
self._configuration.utility_account_id,
bill.start,
bill.end,
bill.cost,
bill.peak,
bill.used,
)
attachment_entry = upload_bill_to_s3(
BytesIO(pdf_data.read()),
key,
source="www.ladwp.com",
statement=bill.end,
utility="utility:ladwp",
utility_account_id=self._configuration.utility_account_id,
)
for bill in parsed_bills:
attachments = [attachment_entry]
if bill.start in start_dates:
# if we already have a bill with this start date, replace it
prev_bill = [b for b in bills if b.start == bill.start][0]
log.info(
"duplicate bill start: prev_bill = %s, bill = %s",
prev_bill,
bill,
)
bills.remove(prev_bill)
# copy the attachment
attachments += prev_bill.attachments
bills.append(bill._replace(attachments=attachments))
start_dates.add(bill.start)
return Results(bills=bills)
def datafeed(
account: SnapmeterAccount,
meter: Meter,
datasource: MeterDataSource,
params: dict,
task_id: Optional[str] = None,
) -> Status:
configuration = LADWPBillPdfConfiguration(
meter_number=meter.service_id,
utility_account_id=meter.utility_service.utility_account_id,
commodity=meter.commodity,
account_name=(datasource.meta or {}).get("accountName"),
)
# If meter has a recent bill, don't go to website since ladwp.com is fragile.
# last_closing is last element of tuple
latest_closing = meter.bills_range[-1]
if latest_closing and latest_closing >= date.today() - timedelta(days=21):
log.info("latest bill is fresh (%s); stopping now", latest_closing)
return Status.COMPLETED
return run_datafeed(
LADWPBillPdfScraper,
account,
meter,
datasource,
params,
configuration=configuration,
task_id=task_id,
)
|
import csv
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from model import (Base,
Surname,
FemaleFirstName,
MaleFirstName)
engine = create_engine('sqlite:///census_data.db')
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
session = Session()
for fname, _class in [('ref_census_surnames.csv', Surname),
('ref_census_firstnames_female.csv', FemaleFirstName),
('ref_census_firstnames_male.csv', MaleFirstName)]:
names = []
with open(fname, 'rb') as csvfile:
rdr = csv.reader(csvfile, delimiter=',')
for row in rdr:
names.append(_class(name=row[0],
freq=float(row[1]),
cum_freq=float(row[2]),
rank=int(row[3])))
session.add_all(names)
session.commit()
print('Number of surnames: {}'.format(session.query(Surname).count()))
print('Number of female first names: {}'.format(session.query(FemaleFirstName).count()))
print('Number of male first names: {}'.format(session.query(MaleFirstName).count()))
|
"""
The team.py file is where you should write all your code!
Write the __init__ and the step functions. Further explanations
about these functions are detailed in the wiki.
List your Andrew ID's up here!
gkasha
mdunaevs
aecos
"""
import random
from awap2019 import Tile, Direction, State
class Team(object):
def __init__(self, initial_board, team_size, company_info):
"""
The initializer is for you to precompute anything from the
initial board and the company information! Feel free to create any
new instance variables to help you out.
Specific information about initial_board and company_info are
on the wiki. team_size, although passed to you as a parameter, will
always be 4.
"""
self.board = initial_board
self.team_size = team_size
self.company_info = company_info
self.team_name = "We don't deserve a team name"
def step(self, visible_board, states, score):
"""
The step function should return a list of four Directions.
For more information on what visible_board, states, and score
are, please look on the wiki.
"""
pass
def testCase1():
threshHoldArr = [[5,1,6,10,124,20,18,4],
[13,21,12,12,124,24,11,1],
[12,10,4,10,14,20,11,4],
[11,15,14,10,12,20,12,4],
[10,12,56,7,124,20,13,4],
[9,9,6,11,124,23,4,4],
[8,6,6,3,122,2,14,4],
[7,3,6,11,1,21,13,7]]
boothArr = [[0,0,0,0,0,0,0,0],
[0,0,0,12,0,0,0,1],
[0,10,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0],
[0,0,0,7,0,0,0,0],
[0,0,0,0,0,0,4,0],
[0,0,0,3,0,0,0,0],
[0,0,0,0,0,0,0,0]]
#really big number
minBooth = 1000
x = 0
y = 0
for i in range(len(boothArr)):
for j in range(len(boothArr[0])):
if(boothArr[i][j] != 0 and boothArr[i][j] < minBooth):
minBooth = boothArr[i][j]
x = i
y = j
assert(minBooth == threshHoldArr[x][y])
return (minBooth,x,y)
print(testCase1())
|
from setuptools import setup, find_packages
setup(
name='torrentleech_monitor',
version='1.0',
packages=find_packages(),
long_description=open('README.md').read(),
install_requires=['logbook', 'requests', 'beautifulsoup4', 'ujson', 'tvdb_api', 'guessit'],
entry_points={
'console_scripts': [
'torrentleech_monitor = monitor:main',
]
}
)
|
#!/usr/bin/env python3.7
#Scapy_Graph_Of_IPs.py - Version 1.0 - By Joe McManus - Modified By MMC - 31st March 2019
#import section - scapy, prettytable, collections and plotly.
#step 1: Imports.
from scapy.all import *
from prettytable import PrettyTable
from collections import Counter
import plotly
#Step 2: Read and append.
packets = rdpcap('/home/mark/coding/Python/Packet Capturing/2ndCapture-full.pcapng')
srcIP = []
for pkt in packets:
if IP in pkt:
try:
srcIP.append(pkt[IP].src)
except:
pass
#Step 3: Count up the totals of each source IP.
cnt = Counter()
for ip in srcIP:
cnt[ip] += 1
#Step 4: Put results in a table and print.
#table = PrettyTable(["IP","Count"])
#for ip, count in cnt.most_common():
# table.add_row([ip,count])
#print(table)
#Step 4 Version 2.0: Add lists for x and y and graph it.
xData = []
yData = []
for ip, count in cnt.most_common():
xData.append(ip)
yData.append(count)
#Step 5: Plot graph in local web browser.
plotly.offline.plot({
"data":[plotly.graph_objs.Bar(x=xData, y=yData)]
})
|
# Time Complexity : O(mn)
# Space Complexity :O(mn)
# Did this code successfully run on Leetcode : Yes
# Any problem you faced while coding this : No
# Your code here along with comments explaining your approach
class Solution:
def minDistance(self, word1: str, word2: str) -> int:
dp = [[0 for _ in range(len(word1)+1)] for _ in range(len(word2)+1)]
for i in range(len(dp[0])):
dp[0][i] = i
for i in range(len(dp)):
dp[i][0] = i
for i in range(1, len(dp)):
for j in range(1, len(dp[0])):
if word2[i-1] == word1[j-1]:
dp[i][j] = dp[i-1][j-1]
else:
dp[i][j] = min( dp[i-1][j-1], dp[i][j-1], dp[i-1][j]) + 1
return dp[-1][-1] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Date : Nov-20-20 16:27
# @Author : Kelly Hwong (dianhuangkan@gmail.com)
# @Link : http://example.org
import os
from datetime import datetime
import numpy as np
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Conv2D, Dense, MaxPool2D, Flatten
from lenet import LeNet5
from model_config import get_confusion_matrix_metrics
def main():
# Experiment configs
model_type = "LeNet5-tf"
# Paths
current_time = datetime.now().strftime("%Y%m%d-%H%M%S")
log_dir = os.path.join("logs", model_type, current_time)
# Prepare data
dataset_name = "mnist"
if dataset_name == "cifar10":
dataset = tf.keras.datasets.cifar10
input_shape = (32, 32, 3)
elif dataset_name == "mnist":
dataset = tf.keras.datasets.mnist
input_shape = (28, 28, 1)
else:
dataset = tf.keras.datasets.mnist
num_classes = 10
(train_images, train_labels), \
(test_images, test_labels) = dataset.load_data()
train_labels = tf.keras.utils.to_categorical(train_labels) # to one-hot
if dataset_name == "mnist":
train_images = np.expand_dims(train_images, -1)
input_shape = train_images.shape[1:]
model = LeNet5(input_shape=input_shape, num_classes=num_classes)
target_class_id = 1
metrics = get_confusion_matrix_metrics(class_id=target_class_id)
from tensorflow.keras.losses import CategoricalCrossentropy
from tensorflow.keras.optimizers import Adam
model.compile(loss=CategoricalCrossentropy(),
optimizer=Adam(),
metrics=metrics)
from tensorflow.keras.callbacks import TensorBoard
tensorboard_callback = TensorBoard(log_dir=log_dir, update_freq="batch")
callbacks = [tensorboard_callback]
model.fit(
train_images,
train_labels,
batch_size=32,
epochs=10,
callbacks=callbacks,
verbose=1)
if __name__ == "__main__":
main()
|
import os
import cx_Oracle # 导入数据库
import pandas as pd #导入操作数据集工具
#from sqlalchemy import create_engine #导入 sqlalchemy 库,然后建立数据库连接
import time #导入时间模块
import numpy as np #导入numpy数值计算扩展
class OpOracle(object): # 新式类
def __init__(self, host='172.30.10.180', port='1521', sid='bpmtest', user='ecology', password='bpmtest01'):
try:
dsn = cx_Oracle.makedsn(host, port, sid)
# scott是数据用户名,tiger是登录密码(默认用户名和密码)
self.conn = cx_Oracle.connect(user, password, dsn)
except Exception as e:
print('数据库连接异常!%s' % e)
quit()
else: # 没有异常的情况下,建立游标
self.cur = self.conn.cursor()
def execute(self, sql):
try:
self.cur.execute(sql)
except Exception as e:
print('sql语句有错误!%s' % e)
return e
if sql[:6].upper() == 'SELECT':
# return self.cur.fetchall()
des = self.cur.description
columns = [x[0] for x in des]
cursor01 = self.cur.fetchall()
data = pd.DataFrame(cursor01, columns=columns)
return data
else: # 其他sql语句的话
self.conn.commit()
return 'ok'
def query(self, sql):
try:
data = pd.read_sql_query(sql,self.conn)
except Exception as e:
print('sql语句有错误!%s' % e)
return e
else:
return data
def __del__(self):
self.cur.close()
self.conn.close()
#bpm_Op = OpOracle('172.30.10.180', '1521', 'bpmtest', 'ecology', 'bpmtest01') # 实例化
#print(bpm_Op.query('select * from hrmresource'))
#print('ok!')
#print(bpm_Op.execute('select * from hrmdepartment'))
#print('ok!!')
#bpm_Op.__del__
# def query(table):
#
# host = "172.30.10.180" #数据库ip
# port = "1521" #端口
# sid = "bpmtest" #数据库名称
# dsn = cx_Oracle.makedsn(host, port, sid)
#
# #scott是数据用户名,tiger是登录密码(默认用户名和密码)
# conn = cx_Oracle.connect("ecology", "bpmtest01", dsn)
#
# #SQL语句,可以定制,实现灵活查询
# sql = 'select * from '+ table
#
# # 使用pandas 的read_sql函数,可以直接将数据存放在dataframe中
# results = pd.read_sql(sql,conn)
#
# conn.close
# return results
#
# test_data = query('hrmresource') # 可以得到结果集
# print(test_data)
|
# Создать (программно) текстовый файл, записать в него программно набор чисел, разделенных пробелами.
# Программа должна подсчитывать сумму чисел в файле и выводить ее на экран.
user_input = input(
"Please input numbers separated by spaces> ")
user_words = user_input.split(" ")
numbers = []
for word in user_words:
possible_number = None
try:
possible_number = float(word)
except ValueError:
print(f"There is not a number: {word}")
continue
numbers.append(possible_number)
file_path = "Task5_05_Result.txt"
with open(file_path, 'w') as stream:
print(*numbers, file=stream)
print(f"Numbers data in the following file: {file_path}")
print(f"Numbers sum: {sum(numbers)}")
|
'''
@Author: your name
@Date: 2020-03-31 21:57:19
@LastEditTime: 2020-03-31 22:29:04
@LastEditors: Please set LastEditors
@Description: In User Settings Edit
@FilePath: /Algrithm/LeetCode/26.删除排序数组中的重复项.py
'''
#
# @lc app=leetcode.cn id=26 lang=python3
#
# [26] 删除排序数组中的重复项
#
# @lc code=start
class Solution:
def removeDuplicates(self, nums):
N = len(nums)
_idx = 0
ith = 0
while ith < N:
num = nums[ith]
repeatNum = 0
for jth in range(ith+1,N):
if nums[jth] == num:
repeatNum += 1
else:
break
nums[_idx] = num
_idx += 1
ith += repeatNum + 1
return _idx
class Solution:
def maxProfit(self, prices: List[int]) -> int:
# dp = [[0] * len(prices)
dp =[[0,0] for i in range(len(prices))]
for i in range(len(prices)):
if i == 0:
dp[i][0] = 0
dp[i][1] = -prices[0]
else:
dp[i][0] = max(dp[i-1][1]+prices[i],dp[i-1][0])#第i天非持有状态的最大利润
dp[i][1] = max(-prices[i],dp[i-1][1])#第i天持有状态的最大利润,
#不能使用dp[i-1][0]-prices[i],因为只可购买一次
return dp[-1][0]
# @lc code=end
if __name__ == "__main__":
so = Solution()
so.removeDuplicates([1,1,2]) |
#!/usr/bin/env python
"""Cron flows."""
# pylint: disable=unused-import
# These imports populate the Flow registry
from grr.lib.flows.cron import compactors
from grr.lib.flows.cron import filestore_stats
from grr.lib.flows.cron import system
|
# coding:utf-8
import argparse
import re
import time
from math import *
import numpy as np
from my_functions_2 import *
startTime = time.time()
parser = argparse.ArgumentParser(
description='analyse BG and output BG stats parameter(number of sample ,sum, , mean, std, kurtosis, max, min, middle, sum(2, 3, 4)')
parser.add_argument(
'-c', '--config_file', help='config file path (init : None)')
parser.add_argument(
'-i', '--input_directory', default='./',
help='input directory path (init : ./)')
parser.add_argument(
'-m', '--mean_BG_file', help='mean BG file path (init : None)')
parser.add_argument(
'-std', '--std_BG_file', help='std BG file path (init : None)')
parser.add_argument(
'-kurtosis', '--kurtosis_BG_file',
help='kurtosis BG file path (init : None)')
parser.add_argument(
'--valid_pixel', help='valid pixel condition (init : None)')
parser.add_argument(
'--event_list_file', help='event list file path (init : None)')
parser.add_argument(
'--exclude_rim', default='True',
help='exclude event mixing rim of frame (init : True)')
parser.add_argument(
'--limit_frame_num', help='limit frame number in analysis (init : None)')
parser.add_argument(
'--match_file_name', default='.+\.fits',
help='file name as regular expression (init : .+\\.fits)')
parser.add_argument(
'--HDU_index', default='0',
help='HDU index containing frame data (init : 0)')
parser.add_argument(
'--valid_frame_shape',
help='valid frame shape (init : None)')
parser.add_argument(
'--invalid_shape_process', default='first',
help='invalid shape process (init : first)')
parser.add_argument(
'-o', '--BG_stats_file', help='BG stats file path(output) (init : None)')
args = parser.parse_args()
if args.config_file is not None:
dicConfig = getDicIni(args.config_file, message=True)
else:
dicConfig = None
strInputDirPath = getConfig(
args.input_directory, ['-i','--input_directory'], dicConfig, 'input',
'directory_path')
strMeanBGFilePath = getConfig(
args.mean_BG_file, ['-m','--mean_BG_file'], dicConfig, 'input',
'mean_BG_file_path')
strStdBGFilePath = getConfig(
args.std_BG_file, ['-std','--std_BG_file'], dicConfig, 'input',
'std_BG_file_path')
strKurtosisBGFilePath = getConfig(
args.kurtosis_BG_file, ['-kurtosis','--kurtosis_BG_file'], dicConfig, 'input',
'kurtosis_BG_file_path')
strValidPixelCondition = getConfig(
args.valid_pixel, ['--valid_pixel'], dicConfig, 'input',
'valid_pixel')
strEvlistFilePath = getConfig(
args.event_list_file, ['--event_list_file'], dicConfig,
'input', 'event_list_file_path')
strExcludeRim = getConfig(
args.exclude_rim, ['--exclude_rim'], dicConfig,
'input', 'exclude_rim')
strLimitFrameNum = getConfig(
args.limit_frame_num, ['--limit_frame_num'], dicConfig, 'input',
'limit_frame_num')
strMatchFileName = getConfig(
args.match_file_name, ['--match_file_name'], dicConfig, 'input',
'match_file_name')
strHDUIndex = getConfig(
args.HDU_index, ['--HDU_index'], dicConfig, 'input', 'HDU_index')
strValidFrameShape = getConfig(
args.valid_frame_shape, ['--valid_frame_shape'], dicConfig, 'input',
'valid_frame_shape')
strInvalidShapeProcess = getConfig(
args.invalid_shape_process, ['--invalid_shape_process'], dicConfig, 'input',
'invalid_shape_process')
strOutputBGStatsFilePath = getConfig(
args.BG_stats_file, ['-o','--BG_stats_file'], dicConfig, 'output',
'BG_stats_file_path')
strInputDirPath = getStrAbsPath(strInputDirPath)
if strInputDirPath[-1] != '/':
strInputDirPath += '/'
lsStrFileName = sorted(
getLsStrFileName(strInputDirPath, match=strMatchFileName))
if strMeanBGFilePath is not None:
strMeanBGFilePath = getStrAbsPath(strMeanBGFilePath)
arrMeanBG = getArrFits(strMeanBGFilePath, message=True)
else:
arrMeanBG = None
if strStdBGFilePath is not None:
strStdBGFilePath = getStrAbsPath(strStdBGFilePath)
arrStdBG = getArrFits(strStdBGFilePath, message=True)
else:
arrStdBG = None
if strKurtosisBGFilePath is not None:
strKurtosisBGFilePath = getStrAbsPath(strKurtosisBGFilePath)
arrKurtosisBG = getArrFits(strKurtosisBGFilePath, message=True)
else:
arrKurtosisBG = None
if strEvlistFilePath is not None:
strEvlistFilePath = getStrAbsPath(strEvlistFilePath)
dicEvlistData = getArrFits(strEvlistFilePath, header=True, message=True)
else:
dicEvlistData = None
if strExcludeRim != 'True':
excludeRim = False
else:
excludeRim = True
if strLimitFrameNum is not None:
limitFrameNum = int(strLimitFrameNum)
else:
limitFrameNum = None
HDUIndex = int(strHDUIndex)
tpValidFrameShape = getTpValidFrameShape(
strValidFrameShape, strInputDirPath, lsStrFileName, HDUIndex, arrMeanBG)
strThisScriptFileDir = os.path.dirname(os.path.abspath(__file__)) + '/'
if strValidPixelCondition not in [None, 'True']:
lsStrValidPixelTxtLine = getLsStrTxtLine(strThisScriptFileDir + 'valid_pixel_function_0.py')
lsStrValidPixelTxtLine[-1] = ' return ' + strValidPixelCondition
saveAsTxt(lsStrValidPixelTxtLine, strThisScriptFileDir+'valid_pixel_function.py')
from valid_pixel_function import *
arrIsValidPixel = genArrIsValidPixel(tpValidFrameShape, arrMeanBG, arrStdBG, arrKurtosisBG)
else:
arrIsValidPixel = np.zeros(tpValidFrameShape) == 0
if dicEvlistData is None:
dicStrBGStatsParam = genDicStrBGStatsParamWithoutEvlist(
strInputDirPath, lsStrFileName, arrMeanBG, arrStdBG, arrKurtosisBG,
arrIsValidPixel, limitFrameNum, strMatchFileName, HDUIndex,
tpValidFrameShape, strInvalidShapeProcess)
else:
dicStrBGStatsParam = genDicStrBGStatsParamWithEvlist(
strInputDirPath, lsStrFileName, arrMeanBG, arrStdBG, arrKurtosisBG,
arrIsValidPixel, dicEvlistData, excludeRim, limitFrameNum,
strMatchFileName, HDUIndex, tpValidFrameShape, strInvalidShapeProcess)
if dicConfig is None:
dicConfig = {'input' : {
'directory_path' : setHeader(strInputDirPath),
'mean_BG_file_path' : setHeader(strMeanBGFilePath),
'std_BG_file_path' : setHeader(strStdBGFilePath),
'kurtosis_BG_file_path' : setHeader(strKurtosisBGFilePath),
'valid_pixel' : setHeader(strValidPixelCondition),
'event_list_file_path' : setHeader(strEvlistFilePath),
'limit_frame_num' : setHeader(strLimitFrameNum),
'match_file_name' : setHeader(strMatchFileName),
'HDU_index' : setHeader(strHDUIndex),
'valid_frame_shape' : setHeader(strValidFrameShape),
'invalid_shape_process' : setHeader(strInvalidShapeProcess)
}}
dicOutput = {
'config' : dicConfig['input'],
'stats_data' : dicStrBGStatsParam
}
saveAsIni(dicOutput, strOutputBGStatsFilePath, message=True)
print('required time : ' + str(time.time() - startTime) + ' sec')
|
import os
import re
from cs50 import SQL
from flask import Flask, flash, redirect, render_template, request, session
from flask_session import Session
from tempfile import mkdtemp
from werkzeug.exceptions import default_exceptions
from werkzeug.security import check_password_hash, generate_password_hash
from helpers import apology, login_required, lookup, usd
# Configure application
app = Flask(__name__)
# Ensure templates are auto-reloaded
app.config["TEMPLATES_AUTO_RELOAD"] = True
# Ensure responses aren't cached
@app.after_request
def after_request(response):
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Expires"] = 0
response.headers["Pragma"] = "no-cache"
return response
# Custom filter
app.jinja_env.filters["usd"] = usd
# Configure session to use filesystem (instead of signed cookies)
app.config["SESSION_FILE_DIR"] = mkdtemp()
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Configure CS50 Library to use SQLite database
db = SQL("sqlite:///finance.db")
@app.route("/")
@login_required
def index():
"""Show portfolio of stocks"""
try:
# Fetch how many shares of what companies the user has
rows = db.execute(("SELECT SUM(shares * type) as shares, company, symbol "
"FROM transactions "
"WHERE user_id = :user_id GROUP BY symbol"), user_id=session["user_id"])
# Fetch user's current balance
balance = db.execute("SELECT cash FROM users WHERE id = :id", id=session["user_id"])
# Check for errors
if not balance or len(balance) != 1:
flash("User not found, please (re)log in")
return redirect("/logout")
# Save the current balance
balance = balance[0]["cash"]
# Grand total (balance + shares * cost)
total = balance
# Table rows are passed to construct the table in the index page
t_rows = []
# Iterate over rows
# Fetch the actual prices for each stock
# Recalculate the total variable
# Save the stocks needed to be displayed (if number of shares is >0)
for row in rows:
if row["shares"] > 0:
result = lookup(row["symbol"])
if not result:
flash("Invalid symbol in your transactions list, please try again")
return apology("ERROR")
row["price"] = result["price"]
total += result["price"] * row["shares"]
t_rows.append(row)
return render_template("index.html", rows=t_rows, balance=balance, total=total, usd=usd)
except RuntimeError:
flash("Some error occurred, please try again")
return apology("ERROR")
@app.route("/buy", methods=["GET", "POST"])
@login_required
def buy():
"""Buy shares of stock"""
# Process form on POST and show the form on GET requests
if request.method == "POST":
# Get symbol and number of shares
symbol = request.form.get("symbol")
try:
shares = int(request.form.get("shares"))
except ValueError:
flash("Please provide a valid symbol (e.g. GOOGL) and positive number of shares to buy")
return apology("ERROR")
# Check for errors
if not symbol or not shares or shares < 1:
flash("Please provide a valid symbol (e.g. GOOGL) and positive number of shares to buy")
return apology("ERROR")
# Look up for actual data
result = lookup(symbol)
if not result:
flash("Invalid symbol, please try again")
return apology("ERROR")
else:
try:
# Get user's cash
row = db.execute("SELECT cash FROM users WHERE id = :id", id=session["user_id"])
# Check for errors
if not row or len(row) != 1:
flash("User not found, please (re)log in")
return redirect("/logout")
balance = row[0]["cash"]
# Check if user has enough money and proceed the operations if so
if balance >= shares * result["price"]:
db.execute("INSERT INTO transactions (user_id, company, symbol, shares, cost, type) VALUES (:user_id, :company, :symbol, :shares, :cost, :type)",
user_id=session["user_id"], company=result["name"], symbol=result["symbol"], shares=shares, cost=result["price"], type=1)
db.execute("UPDATE users SET cash = cash - :price WHERE id = :id",
price=result["price"] * shares, id=session["user_id"])
flash("Successful operation")
return redirect("/")
else:
flash("No sufficient funds")
return apology("ERROR")
except RuntimeError:
flash("Some error occurred, please try again")
return apology("ERROR")
else:
return render_template("buy.html")
@app.route("/history")
@login_required
def history():
"""Show history of transactions"""
# Simply fetch all user's transactions
try:
rows = db.execute("SELECT * FROM transactions WHERE user_id = :user_id", user_id=session["user_id"])
return render_template("history.html", rows=rows, usd=usd)
except RuntimeError:
flash("Some error occurred, please try again")
return apology("ERROR")
@app.route("/login", methods=["GET", "POST"])
def login():
"""Log user in"""
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
rows = db.execute("SELECT * FROM users WHERE username = :username",
username=request.form.get("username"))
# Ensure username exists and password is correct
if len(rows) != 1 or not check_password_hash(rows[0]["hash"], request.form.get("password")):
return apology("invalid username and/or password", 403)
# Remember which user has logged in
session["user_id"] = rows[0]["id"]
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
return render_template("login.html")
@app.route("/logout")
def logout():
"""Log user out"""
# Forget any user_id
session.clear()
# Redirect user to login form
return redirect("/")
@app.route("/quote", methods=["GET", "POST"])
@login_required
def quote():
"""Get stock quote."""
# Process form on POST, show the form on GET requests
if request.method == "POST":
# Get the symbol
symbol = request.form.get("symbol")
# Check for errors
if not symbol:
flash("Please provide a symbol (e.g. GOOGL) to quote for")
return apology("ERROR")
# Look up actual data, checking for errors
result = lookup(symbol)
if not result:
flash("Invalid symbol, please try again")
return apology("ERROR")
else:
return render_template("quoted.html", result=result, usd=usd)
else:
return render_template("quote.html")
@app.route("/register", methods=["GET", "POST"])
def register():
"""Register user"""
# Process form on POST, show the form on GET
if request.method == "POST":
# Get user's data
username = request.form.get("username")
password = request.form.get("password")
confirmPassword = request.form.get("confirmation")
# Check for errors
# Passwords don't match
if password != confirmPassword:
flash("Password and password confirmation fields don't match")
return apology("ERROR")
# One or more of the required fields is not present
if not username or not password or not confirmPassword:
flash("Please provide a username and password")
return apology("ERROR")
# Username too short or too long
if len(username) < 3 or len(username) > 16:
flash("Username must be at least 3 characters but not more than 16")
return apology("ERROR")
# Password don't contain small Latin letter/capital Latin letter/digit or less than 8 symbols
if not re.fullmatch(r'(?=.*\d)(?=.*[a-z])(?=.*[A-Z]).{8,}', password):
flash("Your password must contain at least one number and one uppercase and lowercase letter, and at least 8 characters")
return apology("ERROR")
try:
# Create new user
result = db.execute("INSERT INTO users (username, hash) VALUES (:username, :hash)",
username=username, hash=generate_password_hash(password))
# Check if username is not already taken
if not result:
flash("The username is already in use, please try another one")
return apology("ERROR")
# Log in new user
session["user_id"] = result
return redirect("/")
except RuntimeError:
flash("Some error occurred, please try again")
return apology("ERROR")
else:
return render_template("register.html")
@app.route("/sell", methods=["GET", "POST"])
@login_required
def sell():
"""Sell shares of stock"""
# Process form on POST, show the form on GET requests
if request.method == "POST":
# Get symbol and number of shares
symbol = request.form.get("symbol")
try:
shares = int(request.form.get("shares"))
except ValueError:
flash("Please provide a valid symbol (e.g. GOOGL) along with positive number of shares")
return apology("ERROR")
# Check for errors
if not symbol or not shares or shares < 1:
flash("Please provide a valid symbol (e.g. GOOGL) along with positive number of shares")
return apology("ERROR")
try:
# Fetch user's shares for appropriate symbol
rows = db.execute(("SELECT SUM(shares * type) as shares, symbol "
"FROM transactions "
"WHERE user_id = :user_id AND symbol = :symbol GROUP BY symbol"), symbol=symbol, user_id=session["user_id"])
# Check for errros
if not rows or len(rows) != 1:
flash("Quotes not found")
return apology("ERROR")
if shares > rows[0]["shares"]:
flash("Too many shares")
return apology("ERROR")
# Fetch actual prices for selling
result = lookup(rows[0]["symbol"])
if not result:
flash("Invalid symbol in your transactions list, please try again")
return apology("ERROR")
# Proceed the operation
db.execute("INSERT INTO transactions (user_id, company, symbol, shares, cost, type) VALUES (:user_id, :company, :symbol, :shares, :cost, :type)",
user_id=session["user_id"], company=result["name"], symbol=result["symbol"], shares=shares, cost=result["price"], type=-1)
db.execute("UPDATE users SET cash = cash + :profit WHERE id = :id",
profit=result["price"] * shares, id=session["user_id"])
flash("Successful operation")
return redirect("/")
except RuntimeError:
flash("Some error occurred, please try again")
return apology("ERROR")
else:
try:
# Fetch all user's shares for any company
rows = db.execute(("SELECT SUM(shares * type) as shares, symbol FROM transactions "
"WHERE user_id = :user_id GROUP BY symbol"), user_id=session["user_id"])
# Table rows that will be passed to the template
t_rows = []
for row in rows:
if row["shares"] > 0:
# Remember only companies with at least one share
t_rows.append(row)
return render_template("sell.html", rows=t_rows)
except RuntimeError:
flash("Some error occurred, please try again")
return apology("ERROR")
@app.route("/changePassword", methods=["GET", "POST"])
@login_required
def changePassword():
"""Change user's password"""
# Process form on POST, show the form on GET requests
if request.method == "POST":
# Get user's new password and confirmation
password = request.form.get("password")
confirmPassword = request.form.get("confirmation")
# Check for errors
# Password and confirmation fields don't match
if password != confirmPassword:
flash("Password and password confirmation fields don't match")
return apology("ERROR")
# One or more fields is not present
if not password or not confirmPassword:
flash("Please provide a password")
return apology("ERROR")
# Password don't contain small Latin letter/capital Latin letter/digit or less than 8 symbols
if not re.fullmatch(r'(?=.*\d)(?=.*[a-z])(?=.*[A-Z]).{8,}', password):
flash("Your password must contain at least one number and one uppercase and lowercase letter, and at least 8 characters")
return apology("ERROR")
try:
# Update user's password
db.execute("UPDATE users SET hash = :hash WHERE id = :id", hash=generate_password_hash(password), id=session["user_id"])
flash("Successful operation")
return redirect("/")
except RuntimeError:
flash("Some error occurred, please try again")
return apology("ERROR")
return render_template("changePassword.html")
def errorhandler(e):
"""Handle error"""
return apology(e.name, e.code)
# listen for errors
for code in default_exceptions:
app.errorhandler(code)(errorhandler)
|
import gym
import torch.nn as nn
from .resnet18_nav_base import Resnet18NavBaseConfig
from .pointnav_base import PointNavBaseConfig, PointNavTask
from projects.pointnav_baselines.models.point_nav_models import (
ResnetTensorPointNavActorCritic,
)
class Resnet18PointNavExperimentConfig(PointNavBaseConfig, Resnet18NavBaseConfig):
"""A Point Navigation experiment configuration."""
OBSERVATIONS = [
Resnet18NavBaseConfig.RESNET_OUTPUT_UUID,
PointNavBaseConfig.TARGET_UUID,
]
@classmethod
def tag(cls):
return "Resnet18PointNav"
@classmethod
def create_model(cls, **kwargs) -> nn.Module:
return ResnetTensorPointNavActorCritic(
action_space=gym.spaces.Discrete(len(PointNavTask.class_action_names())),
observation_space=kwargs["observation_set"].observation_spaces,
goal_sensor_uuid=PointNavBaseConfig.TARGET_UUID,
depth_resnet_preprocessor_uuid=Resnet18NavBaseConfig.RESNET_OUTPUT_UUID,
)
|
# https://github.com/mhagiwara/realworldnlp/blob/master/examples/generation/lm.py
import torch
from typing import Tuple, List
from allennlp.models import Model
from allennlp.modules.seq2seq_encoders import Seq2SeqEncoder
from allennlp.modules import TextFieldEmbedder
from allennlp.data.vocabulary import Vocabulary, DEFAULT_PADDING_TOKEN
from allennlp.data.tokenizers import Token
from allennlp.nn.util import get_text_field_mask, sequence_cross_entropy_with_logits
from allennlp.common.util import START_SYMBOL, END_SYMBOL
class GenerativeSeqModel(Model):
def __init__(self,
word_embeddings: TextFieldEmbedder,
hidden_size: int,
max_len: int,
vocab: Vocabulary,
encoder: Seq2SeqEncoder = None,
**kwargs) -> None:
super().__init__(vocab)
self.embeddings = word_embeddings
self.encoder = encoder
self.hidden2out = torch.nn.Linear(in_features=encoder.get_output_dim(),
out_features=vocab.get_vocab_size('tokens'))
self.hidden_size = hidden_size
self.max_len = max_len
self.vocab = vocab
def forward(self, input_tokens, output_tokens):
mask = get_text_field_mask(input_tokens)
embeddings = self.embeddings(input_tokens)
hidden_states = self.encoder(embeddings, mask)
out_logits = self.hidden2out(hidden_states)
loss = sequence_cross_entropy_with_logits(out_logits, output_tokens['tokens'], mask)
return {'loss': loss}
def generate(self, device, state) -> Tuple[List[Token], torch.Tensor]:
start_symbol_index = self.vocab.get_token_index(START_SYMBOL, 'tokens')
end_symbol_index = self.vocab.get_token_index(END_SYMBOL, 'tokens')
padding_symbol_index = self.vocab.get_token_index(DEFAULT_PADDING_TOKEN, 'tokens')
word_idx = start_symbol_index
# TODO: many ways to init state.
log_likihood = 0.
words = []
for i in range(self.max_len):
tokens = torch.tensor([[word_idx]]).to(device)
embeddings = self.embeddings({'tokens': tokens})
output, state = self.encoder._module(embeddings, state)
output = self.hidden2out(output)
log_prob = torch.log_softmax(output[0,0], dim=0)
dist = torch.exp(log_prob)
word_idx = start_symbol_index
while word_idx in {start_symbol_index, padding_symbol_index}:
word_idx = torch.multinomial(dist, num_samples=1, replacement=False).item()
log_likihood += log_prob[word_idx]
if word_idx == end_symbol_index:
break
token = Token(text=self.vocab.get_token_from_index(word_idx, 'tokens'))
words.append(token)
return words, log_likihood |
#!/user/bin/env python3
# -*- coding: utf-8 -*-
import requests
from dao.es_dao import es_connect
import re
import json
def transformer_data(data_source):
# 通用清洗方案
transfor_data = json.loads(re.findall(' = (.*)}catch', data_source)[0])
return transfor_data
def run_spider():
url = 'http://3g.dxy.cn/newh5/view/pneumonia?from=singlemessage&isappinstalled=0'
headers = {
"accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
"accept-encoding":"gzip, deflate, br",
"accept-language":"zh-CN,zh;q=0.9",
"cache-control":"max-age=0",
"if-modified-since":"Thu, 23 Jan 2020 01:56:04 GMT",
"sec-fetch-mode":"navigate",
"sec-fetch-site":"none",
"sec-fetch-user":"?1",
"upgrade-insecure-requests":"1",
"user-agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.117 Safari/537.36",}
data_source = requests.get(url, headers=headers).content.decode('utf-8')
return data_source
def transformer(content):
# extract news
Timeline_news = []
base_info = BeautifulSoup(content, 'lxml')
getTimelineService = base_info.find_all(id="getTimelineService")
for action_news1 in transformer_data(str(getTimelineService)):
source_news = {}
source_news['_index'] = 'outbreak_data_news'
source_news['_type'] = 'outbreak_data_news'
action_news1['ct'] = ct
source_news['_source'] = action_news1
Timeline_news.append(source_news)
new_status = helpers.bulk(es, Timeline_news, ignore=[400, 404])
print(new_status)
# extract country
CountryTypeService = []
getListByCountryTypeService1 = base_info.find_all(id="getListByCountryTypeService1")
for action_Country in transformer_data(str(getListByCountryTypeService1)):
source_country = {}
source_country['_index'] = 'outbreak_data_country'
source_country['_type'] = 'outbreak_data_country'
action_Country['ct'] = ct
source_country['_source'] = action_Country
CountryTypeService.append(source_country)
new_status = helpers.bulk(es, CountryTypeService, ignore=[400, 404])
print(new_status)
# extract summary
getStatisticsService = base_info.find_all(id="getStatisticsService")
es_data = transformer_data(str(getStatisticsService))
index = 'outbreak_data_summary'
doc_type = 'outbreak_data_summary'
body_source = dict({"body": es_data, 'ct': ct})
status = es.index(index=index, doc_type=doc_type, body=body_source)
print(status)
if __name__ == '__main__':
import time
from datetime import datetime
from bs4 import BeautifulSoup
from elasticsearch import helpers
ct = datetime.now()
es = es_connect()
st = time.time()
data_source = run_spider()
transformer(data_source)
print(ct)
print('time used:{}'.format(time.time()-st)) |
import numpy as np
from sklearn.svm import LinearSVC
### Functions for you to fill in ###
def one_vs_rest_svm(train_x, train_y, test_x):
"""
Trains a linear SVM for binary classifciation
Args:
train_x - (n, d) NumPy array (n datapoints each with d features)
train_y - (n, ) NumPy array containing the labels (0 or 1) for each training data point
test_x - (m, d) NumPy array (m datapoints each with d features)
Returns:
pred_test_y - (m,) NumPy array containing the labels (0 or 1) for each test data point
"""
clf = LinearSVC(random_state=0, C=0.1)
clf.fit(train_x, train_y)
ret = clf.predict(test_x)
return ret
def multi_class_svm(train_x, train_y, test_x):
"""
Trains a linear SVM for multiclass classifciation using a one-vs-rest strategy
Args:
train_x - (n, d) NumPy array (n datapoints each with d features)
train_y - (n, ) NumPy array containing the labels (int) for each training data point
test_x - (m, d) NumPy array (m datapoints each with d features)
Returns:
pred_test_y - (m,) NumPy array containing the labels (int) for each test data point
"""
clf = LinearSVC(random_state=0, C=0.1)
clf.fit(train_x, train_y)
ret = clf.predict(test_x)
return ret
### You can easily copy the last line of "compute_test_error_linear" function and change the name of the variables.
### or
### Hello, implement zero-one loss in the function.
### You can also import the function from the sklearn lib: from sklearn.metrics import zero_one_loss
def compute_test_error_svm(test_y, pred_test_y):
return 1 - np.mean(test_y == pred_test_y)
|
from django.test import TestCase
from .forms import AddUrlForm
from django_webtest import WebTest
# Create your tests here.
class InputUrlTests(TestCase):
def test_homepage(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
def test_add_url_form_label(self):
form = AddUrlForm()
print form.fields["Add_URL"].label
self.assertTrue(form.fields['Add_URL'].label == None)
class EntryViewTest(WebTest):
def test_view_page(self):
page = self.app.get('/')
self.assertEqual(len(page.forms), 1) |
"""
This file is part of Linspector (https://linspector.org/)
Copyright (c) 2013-2023 Johannes Findeisen <you@hanez.org>. All Rights Reserved.
See LICENSE.
"""
import configparser
import importlib
import time
class Monitor:
def __init__(self, configuration, environment, identifier, log, monitor_configuration,
notifications, services, tasks, kwargs):
self._args = kwargs
self._configuration = configuration
self._enabled = True
self._environment = environment
self._error_count = 0
self._host = monitor_configuration.get('monitor', 'host')
try:
self._hostgroups = monitor_configuration.get('monitor', 'hostgroups')
except configparser.NoOptionError as err:
self._hostgroups = "None"
self._identifier = identifier
try:
self._interval = int(monitor_configuration.get('monitor', 'interval'))
except Exception as err:
log.warning('no interval set in identifier ' + identifier +
', trying to get a monitor configuration setting. error: ' + str(err))
try:
self._interval = int(configuration.get_option('linspector', 'default_interval'))
log.warning('set default_interval as per core configuration with '
'identifier: ' + identifier + ' to: ' + str(self._interval))
except Exception as err:
log.warning('no default_interval found in core configuration for identifier ' +
identifier +
', set to default interval 300 seconds. error: ' + str(err))
# default interval is 300 seconds (5 minutes) if not set in the monitor
# configuration args or a default_interval in the core configuration.
self._interval = 300
self._log = log
self._monitor_configuration = monitor_configuration
self._notification_list = []
self._notifications = notifications
self._result = None
self._scheduler_job = None
try:
self._service = monitor_configuration.get('monitor', 'service')
except Exception as err:
# if no service is set in the monitor configuration, the service is set to misc.dummy
# instead. just to make Linspector run but with no real result.
log.debug('no service set for identifier: ' + identifier + ' setting to '
'misc.dummy as '
'default to ensure '
'Linspector will run. '
'error: ' + str(err))
self._service = 'misc.dummy'
self._services = services
self._tasks = tasks
"""
NONE job was not executed: -1
OK when everything is fine: 0
WARNING when a job has errors but the threshold is not overridden: 1
RECOVER when a job recovers e.g. the error count decrements: 2
ERROR when a jobs error threshold is overridden: 3
UNKNOWN when a job throws an exception which is not handled by the job itself: 4
"""
try:
notification_list = None
if configuration.get_option('linspector', 'notifications') or \
monitor_configuration.get('monitor', 'notifications'):
if configuration.get_option('linspector', 'notifications') and \
monitor_configuration.get('monitor', 'notifications'):
notification_list = \
configuration.get_option('linspector', 'notifications') + ',' + \
monitor_configuration.get('monitor', 'notifications')
elif configuration.get_option('linspector', 'notifications'):
notification_list = configuration.get_option('linspector', 'notifications')
elif monitor_configuration.get('monitor', 'notifications'):
notification_list = monitor_configuration.get('monitor', 'notifications')
self._notification_list = notification_list.split(',')
for notification_option in notification_list.split(','):
if notification_option not in notifications:
notification_package = 'linspector.notifications.' + notification_option.lower()
notification_module = importlib.import_module(notification_package)
notification = notification_module.create(configuration, environment, log)
notifications[notification_option.lower()] = notification
except configparser.NoOptionError as err:
self._notifications = notifications
if self._monitor_configuration.get('monitor', 'service'):
if monitor_configuration.get('monitor', 'service') not in services:
service_package = 'linspector.services.' + \
monitor_configuration.get('monitor', 'service').lower()
service_module = importlib.import_module(service_package)
self._service = monitor_configuration.get('monitor', 'service').lower()
service = service_module.create(configuration, environment, log)
self._services[monitor_configuration.get('monitor', 'service').lower()] = service
def execute(self):
self._log.debug('identifier=' + self._identifier + ' object=' + str(self))
self._log.debug('identifier=' + self._identifier + ' kwargs=' + str(self._args) +
' message=handle call to service')
if self._enabled:
try:
self._result = self._services[self._service].execute(self._identifier, self,
self._service, **self._args)
if self._result['status'] == 'ERROR':
self._error_count += 1
self._log.debug('error count: ' + str(self._error_count))
self._log.debug(self._result)
self._log.debug(self._tasks)
for task in self._tasks:
self._tasks[task].execute(self._error_count,
self._result['host'],
self._identifier,
self._result,
self._result['log'],
self._result['service'],
self._result['status'],
int(time.time()))
self._log.info(self._result['log'])
except Exception as err:
self._log.error(err)
else:
self._log.info('identifier=' + self._identifier + ' message=is disabled')
def get_host(self):
return self._host
def get_hostgroups(self):
return self._hostgroups
def get_identifier(self):
return self._identifier
def get_interval(self):
return self._interval
def get_monitor_configuration(self):
return self._monitor_configuration
def get_monitor_configuration_option(self, section, option):
if self._monitor_configuration.has_option(section, option):
return self._monitor_configuration.get(section, option)
else:
return None
def get_service(self):
return self._service
def set_enabled(self, enabled=True):
self._enabled = enabled
def set_job(self, scheduler_job):
self._scheduler_job = scheduler_job
def __str__(self):
return str(self.__dict__)
|
class Solution(object):
def find132pattern(self, nums):
"""
:type nums: List[int]
:rtype: bool
"""
if len(nums) < 3:
return False
minRecord = self.buildMinReocrd(nums)
stack = []
for i in range(len(nums) - 1, -1, -1):
if nums[i] > minRecord[i]:
if not stack or stack[-1] > nums[i]:
stack.append(nums[i])
else:
while stack and stack[-1] <= minRecord[i]:
stack.pop()
if stack and minRecord[i] < stack[-1] < nums[i]:
return True
return False
def buildMinReocrd(self, nums):
result = [0 for _ in range(0, len(nums))]
minNum = nums[0]
for i in range(0, len(nums)):
minNum = min(minNum, nums[i])
result[i] = minNum
return result
|
#PyBank Challenge - calculate
#The total number of months included in the dataset
#The net total amount of "Profit/Losses" over the entire period
#The average of the changes in "Profit/Losses" over the entire period
#The greatest increase in profits (date and amount) over the entire period
#The greatest decrease in losses (date and amount) over the entire period
import os
import csv
#setting path to the initial file
path = os.path.join("Resources", "budget_data.csv")
#setting up lists
date = []
profitloss = []
change = []
#creating function to define average
def average (numbers):
return sum(numbers) / len(numbers)
#reading file
with open(path, "r", newline = '') as initialfile:
initialread = csv.reader(initialfile, delimiter = ',')
#remover header row
header = next(initialread)
#loop through the file
for row in initialread:
#loop through rows and append values to appropriate list
date.append(row[0])
profitloss.append(int(row[1]))
#obtain the first value of the profitloss list as the seed value
x = profitloss[0]
#rerun through the file to populate the change list, which is change in the profit loss column for each month
with open(path, "r", newline = '') as initialfile:
initialread = csv.reader(initialfile, delimiter = ',')
#remover header row
header = next(initialread)
#loop through the file
for row in initialread:
#calculate change per month in the profit/loss column
change.append(int(row[1])-x)
x = int(row[1])
#sum all amounts in "Profit/Losses" column
sumtotal = sum(profitloss)
print (x)
print (sumtotal)
print (change)
change.pop(0)
mean = round(average(change),2) #average of the change in the profit/loss column
maxprofit = max(change) #max value in the list of change in the profit/loss column
maxindex = change.index(maxprofit) #find the index of the max profit so that the associated date can be found (+1 due to pop in change)
maxloss = min(change) #min value in the list of change in the profit/loss column
lossindex = change.index(maxloss)
print("Financial Analysis")
print("------------------------")
print("Total Months: " + str(len(date))) #count the number of elements in the date list
print("Total: $" + str(sumtotal))
print("Average Change: $" + str(mean))
print (f'Greatest Increase in Profits: {date[maxindex + 1]} (${change[maxindex]})')
print (f'Greatest Decrease in Profits: {date[lossindex + 1]} (${change[lossindex]})')
#setting path for outputfile
summaryfilepath = os.path.join("analysis", "budgetdata_summary.txt")
#writing output to file
with open(summaryfilepath, "w", newline='') as summaryfile:
summaryfile.write("Financial Analysis\n")
summaryfile.write("----------------------------\n")
summaryfile.write("Total Months: " + str(len(date))+"\n")
summaryfile.write("Total: $" + str(sumtotal)+"\n")
summaryfile.write("Average Change: $" + str(mean)+"\n")
summaryfile.write(f'Greatest Increase in Profits: {date[maxindex + 1]} (${change[maxindex]})\n')
summaryfile.write(f'Greatest Decrease in Profits: {date[lossindex + 1]} (${change[lossindex]})\n')
|
import math
import statistics
import numpy as np
import scipy.stats
import pandas as pd
x = [8.0, 1, 2.5, 4, 28.0]
x_with_nan = [8.0, 1, 2.5, math.nan, 4, 28.0]
# print(x)
# print(x_with_nan)
y = np.array(x) # massive
y_with_nan = np.array(x_with_nan) # massive
z = pd.Series(x) # 1D object
z_with_nan = pd.Series(x_with_nan) # 1D Object
# print(y)
# print(y_with_nan)
# print(z)
# print(z_with_nan)
mean_ = np.mean(y)
mean2_ = np.mean(y_with_nan)
print(mean_, mean2_, sep="; ")
mean_ignore_nan = np.nanmean(y_with_nan) # ignores any NaN
print(mean2_, mean_ignore_nan, sep=' ---> ')
mean_ = z.mean()
print(mean_)
mean_ = z_with_nan.mean() # ignores NaN defaultly, parameter 'skipna' is on
print(mean_)
# Средневзвешенное:
x = [8.0, 1, 2.5, 4, 28.0]
w = [0.1, 0.2, 0.3, 0.25, 0.15]
w_mean = sum(w[i] * x[i] for i in range(len(x))) / sum(w)
print(w_mean)
w_mean = sum(x_ * w_ for (x_, w_) in zip(x, w)) / sum(w)
print(w_mean)
|
# -*- coding: utf-8 -*-
import subprocess
import requests
# Get project ID from gcloud config
project_id = subprocess.check_output(
"gcloud config list project --format 'value(core.project)'",
shell=True
).rstrip()
# Add pull queues to App Engine
url = "https://{}.appspot.com/pw/add-pull-queues".format(project_id)
payload = {"tasks": []}
for i in range(3):
payload["tasks"].append({"file": "file{}".format(i)})
res = requests.post(url, json=payload)
|
# -*- coding: utf-8 -*-
import scrapy
class VolSpider(scrapy.Spider):
name = 'volleyball'
start_urls = [
'http://www.funtable.ru/table/sport/vse-chempiony-sssr-sng-i-rossii-po-voleybolu-muzhchiny.html'
]
def parse(self, response):
SET_SELECTOR='//*[@class="catalog-item-desc-float itemFullText"]//table/tbody/tr'
for i in response.xpath(SET_SELECTOR):
yield {
'date': i.xpath('td[1]//text()').extract_first(default='').replace('\r', '').replace('\n', '').replace('\t', ''),
'first': i.xpath('td[2]//text()').extract_first(default='').replace('\r', '').replace('\n', '').replace('\t', ''),
'second': i.xpath('td[3]//text()').extract_first(default='').replace('\r', '').replace('\n', '').replace('\t', ''),
'third': i.xpath('td[4]//text()').extract_first(default='').replace('\r', '').replace('\n', '').replace('\t', ''),
} |
# Generated by Django 3.1.2 on 2021-05-21 21:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0006_saildetail_ticket'),
]
operations = [
migrations.AddField(
model_name='ticket',
name='token',
field=models.CharField(max_length=80, null=True),
),
]
|
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
import sys
import json
from datetime import datetime
from elasticsearch import Elasticsearch
from geopy.geocoders import Nominatim
#from requests_aws4auth import AWS4Auth
#Variables that contains the user credentials to access Twitter API
access_token = "919581457982640128-ullOyY52aA057rB3bBp0P0j7xBrEJFg"
access_token_secret = "7qnHxbaEE3NnG5XwAxC1IKGBZnPcuAWIWG6Gpl7LiOdpQ"
consumer_key = "ZSh6PfXZzVZl2iSrwpMXeiNkW"
consumer_secret = "sc7RXbO7LXocrdnuWfaube6VinvWx6HWKc0IxTvyEoLMnCf2xZ"
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
es = Elasticsearch()
class listener(StreamListener):
def on_data(self, data):
tweet = json.loads(data)
try:
print tweet["coordinates"]["coordinates"]
except:
pass
else:
json_data = json.loads(data)
id = str(json_data['id'])
lat = json_data["coordinates"]["coordinates"][0]
lon = json_data["coordinates"]["coordinates"][1]
es.index(index="index1", id=id, doc_type="tweet", body={"tweets": tweets, "location" : {"lat":lat, "lon":lon}})
print lat
print lon
return (True)
def on_error(self, status):
print status
def get_twitter_stream():
try:
twitterStream = Stream(auth, listener())
twitterStream.filter(locations=[-180, -90, 180, 90])
except:
pass
if __name__ == "__main__":
get_twitter_stream() |
#EVEN THIS CHANGE
from Class import myPoint
import math
import matplotlib.pyplot as plt
def validNumber(x):
'''
Forces the user to insert a number
'''
while not x.isnumeric() and not x[1:].isnumeric():
x = input("please insert a number: ")
return int(x)
def validIndex(l,i):
'''
Forces the user to input a valid index "i" for the list "l"
'''
while True:
if i.isnumeric():
if 0 <= int(i) and int(i) < len(l):
break
i = input("invalid index, please insert a valid one: ")
return int(i)
def addPoint(l,x,y,c):
'''
Adds a point to the repository l(list)
'''
try:
p = myPoint(x,y,c)
l.append(p)
except ValueError as msg:
print(str(msg))
def getAllPoints(l):
'''
Prints the string representation of all the points from the list "l"
'''
for elem in l:
print(elem.__str__())
def getPointGivenIndex(l,i):
'''
Prints the string representation of the point at the index "i" from the list "l"
'''
print(l[i].__str__())
def isInSquare(x,y,a,b,length):
'''
checks if a point (a,b) is situated inside the square with the up-left corner (x,y) and the length "len"
'''
if (x <= a and a <= x+length) and (y >= b and b >= y-length):
return True
else:
return False
def getPointsSquare(l,x,y,length):
'''
Prints all points that are inside a given square: up-left corner (x,y) and "length" given from the list "l"
'''
for elem in l:
if isInSquare(int(x),int(y),elem.getX(),elem.getY(),length):
print(elem.__str__())
def getMinDistance(l):
'''
Returns the minimum distance between two points from the list "l"
'''
mini = -1
for i in range (len(l)-1):
for j in range(i+1,len(l)):
d = getDistance(l,i,j)
if d < mini or mini == -1:
mini = d
return mini
def getDistance(l,a,b):
'''
Returns the value of the distance between the points "a" and "b" from the list of points "l"
a,b - index of the points
'''
return math.sqrt((l[b].getX()-l[a].getX())*(l[b].getX()-l[a].getX())+(l[b].getY()-l[a].getY())*(l[b].getY()-l[a].getY()))
def updatePoint(l,i,x,y,c):
'''
Updates the values of a point at a given index "i" from the list "l"
x - new x coordinate
y - new y coordinate
c - new color
'''
l[i].setX(x)
l[i].setY(y)
l[i].setColour(c)
def deletePoint(l,i):
'''
Deletes the point with the index "i" from the list "l"
'''
del l[i]
def deleteByCoord(l,x,y):
'''
Deletes the point with the index "i" from the list "l"
'''
for i in range(len(l)):
if l[i].getX() == x and l[i].getY() == y:
del l[i]
break
def deleteFromSquare(l,x,y,length):
'''
Deletes all the points situated inside a given square
x - the x coordinate of the top-left vertex of the square
y - the y coordinate of the top-left vertex of the square
length - the length of one side of the square
'''
for i in range(len(l)-1,-1,-1):
if isInSquare(x,y,l[i].getX(),l[i].getY(),length):
deletePoint(l,i)
def plotPoints(l):
x = []
y = []
col = []
for elem in l:
x.append(elem.getX())
y.append(elem.getY())
col.append(elem.getColour())
plt.scatter(x, y, c = col)
plt.show()
def getAllColour(l,c):
'''
Prints all the points form the list "l" with the colour "c"
'''
for elem in l:
if elem.getColour() == c:
print(elem.__str__())
#ITERATION 2 STARTS FROM HERE!
def getPointsRectangle(l,x,y,length,width):
'''
Prints all points that are inside a given rectangle: up-left corner (x,y), "length" and "width" given, from the list "l"
'''
for elem in l:
a = int(elem.getX())
b = int(elem.getY())
if (int(x) <= a and a <= int(x)+length) and (int(y) >= b and b >= int(y)-width):
print(elem.__str__())
def isInCircle(x,y,r,a,b):
'''
Checks if a point (a,b) is situated inside the circle of center (x,y) and radius "r"
'''
distance = math.sqrt((a-x)*(a-x)+(b-y)*(b-y))
if distance <= r:
return True
else:
return False
def getPointsCircle(l,x,y,r):
'''
Prints all points that are inside a given circle: of center (x,y) and radius "r" given, from the list "l"
'''
for elem in l:
if isInCircle(x,y,r,elem.getX(),elem.getY()):
print(elem.__str__())
def getMaxDistance(l):
'''
Returns the maximum distance between two points from the list "l"
'''
maxi = 0
for i in range (len(l)-1):
for j in range(i+1,len(l)):
d = getDistance(l,i,j)
if d > maxi:
maxi = d
return maxi
def getColourNumber(l,c):
'''
Returns the number of points from the list "l" that have the same color "c" given as a string
'''
count = 0
for elem in l:
if elem.getColour() == c:
count +=1
return count
def updateColour(l,x,y,c):
'''
Updates the colour of the point of coordinates(x,y) from the list "l" or prints a msg in case there is no such point
c = new colour
'''
x = int(x)
y = int(y)
found = False
for elem in l:
if elem.getX() == x and elem.getY() == y:
elem.setColour(c)
found = True
break
if not found:
print("There is no such point")
def shiftOnXRight(l):
for elem in l:
elem.setX(elem.getX()+1)
def shiftOnXLeft(l):
for elem in l:
elem.setX(elem.getX()-1)
def shiftOnYUp(l):
for elem in l:
elem.setY(elem.getY()+1)
def shiftOnYDown(l):
for elem in l:
elem.setY(elem.getY()-1)
def deleteInCircle(l,x,y,r):
'''
Deletes all points that are inside a given circle: of center (x,y) and radius "r" given, from the list "l"
'''
for i in range(len(l)-1,-1,-1):
if isInCircle(x,y,r,l[i].getX(),l[i].getY()):
deletePoint(l,i)
def deleteWithinDistance(l,x,y,d):
'''
Deletes all points that are within a certain distance from a given point: of center (x,y) and distance "d" given, from the list "l"
'''
for i in range(len(l)-1,-1,-1):
if math.sqrt((l[i].getX()-x)*(l[i].getX()-x)+(l[i].getY()-y)*(l[i].getY()-y)) == d:
deletePoint(l,i) |
# Modified from: https://github.com/sachin-chhabra/Pytorch-cGAN-conditional-GAN
from torch import optim
import os
import torchvision.utils as vutils
from torch.utils.data import DataLoader
import numpy as np
from torchvision import datasets
from torchvision import transforms
from usps_data import gan_trans, CustomTensorDataset
import torch
import torch.nn as nn
import torch.nn.functional as F
# Arguments
BATCH_SIZE = 256
Z_DIM = 10
LABEL_EMBED_SIZE = 5
NUM_CLASSES = 10
IMGS_TO_DISPLAY_PER_CLASS = 20
LOAD_MODEL = False
GEN_IMAGES = 10000
DB = 'USPS' # SVHN | MNIST | FashionMNIST | USPS
if DB == 'MNIST' or DB == 'FashionMNIST':
CHANNELS = 1
EPOCHS = 50
elif DB == 'USPS':
CHANNELS = 1
EPOCHS = 100
elif DB == 'SVHN':
CHANNELS = 3
EPOCHS = 100
else:
print("Incorrect dataset")
exit(0)
# Directories for storing data, model and output samples
db_path = './data'
if not os.path.exists(db_path):
os.makedirs(db_path)
model_path = os.path.join('./models', DB)
if not os.path.exists(model_path):
os.makedirs(model_path)
samples_path = os.path.join('./samples', DB)
if not os.path.exists(samples_path):
os.makedirs(samples_path)
# Data loader
transform = transforms.Compose([transforms.Resize([32, 32]),
transforms.ToTensor(),
transforms.Normalize([0.5], [0.5])])
if DB == 'MNIST':
dataset = datasets.MNIST(db_path, train=True, download=True, transform=transform)
elif DB == 'FashionMNIST':
dataset = datasets.FashionMNIST(db_path, train=True, download=True, transform=transform)
elif DB == 'USPS':
dataset = datasets.USPS(db_path, train=True, download=True, transform=transform)
elif DB == 'SVHN':
dataset = datasets.SVHN(db_path, split='train', download=True, transform=transform)
else:
print("Incorrect DB")
exit(0)
data_loader = DataLoader(dataset=dataset, batch_size=BATCH_SIZE, shuffle=True, drop_last=True)
# Method for storing generated images
def generate_imgs(z, fixed_label, epoch=0):
gen.eval()
fake_imgs = gen(z, fixed_label)
fake_imgs = (fake_imgs + 1) / 2
fake_imgs_ = vutils.make_grid(fake_imgs, normalize=False, nrow=IMGS_TO_DISPLAY_PER_CLASS)
vutils.save_image(fake_imgs_, os.path.join(samples_path, 'sample_' + str(epoch) + '.png'))
# Networks
def conv_block(c_in, c_out, k_size=4, stride=2, pad=1, use_bn=True, transpose=False):
module = []
if transpose:
module.append(nn.ConvTranspose2d(c_in, c_out, k_size, stride, pad, bias=not use_bn))
else:
module.append(nn.Conv2d(c_in, c_out, k_size, stride, pad, bias=not use_bn))
if use_bn:
module.append(nn.BatchNorm2d(c_out))
return nn.Sequential(*module)
class Generator(nn.Module):
def __init__(self, z_dim=10, num_classes=10, label_embed_size=5, channels=3, conv_dim=64):
super(Generator, self).__init__()
self.label_embedding = nn.Embedding(num_classes, label_embed_size)
self.tconv1 = conv_block(z_dim + label_embed_size, conv_dim * 4, pad=0, transpose=True)
self.tconv2 = conv_block(conv_dim * 4, conv_dim * 2, transpose=True)
self.tconv3 = conv_block(conv_dim * 2, conv_dim, transpose=True)
self.tconv4 = conv_block(conv_dim, channels, transpose=True, use_bn=False)
for m in self.modules():
if isinstance(m, nn.Conv2d) or isinstance(m, nn.ConvTranspose2d):
nn.init.normal_(m.weight, 0.0, 0.02)
if isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
def forward(self, x, label):
x = x.reshape([x.shape[0], -1, 1, 1])
label_embed = self.label_embedding(label)
label_embed = label_embed.reshape([label_embed.shape[0], -1, 1, 1])
x = torch.cat((x, label_embed), dim=1)
x = F.relu(self.tconv1(x))
x = F.relu(self.tconv2(x))
x = F.relu(self.tconv3(x))
x = torch.tanh(self.tconv4(x))
return x
class Discriminator(nn.Module):
def __init__(self, num_classes=10, channels=3, conv_dim=64):
super(Discriminator, self).__init__()
self.image_size = 32
self.label_embedding = nn.Embedding(num_classes, self.image_size*self.image_size)
self.conv1 = conv_block(channels + 1, conv_dim, use_bn=False)
self.conv2 = conv_block(conv_dim, conv_dim * 2)
self.conv3 = conv_block(conv_dim * 2, conv_dim * 4)
self.conv4 = conv_block(conv_dim * 4, 1, k_size=4, stride=1, pad=0, use_bn=False)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.normal_(m.weight, 0.0, 0.02)
if isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
def forward(self, x, label):
alpha = 0.2
label_embed = self.label_embedding(label)
label_embed = label_embed.reshape([label_embed.shape[0], 1, self.image_size, self.image_size])
x = torch.cat((x, label_embed), dim=1)
x = F.leaky_relu(self.conv1(x), alpha)
x = F.leaky_relu(self.conv2(x), alpha)
x = F.leaky_relu(self.conv3(x), alpha)
x = torch.sigmoid(self.conv4(x))
return x.squeeze()
gen = Generator(z_dim=Z_DIM, num_classes=NUM_CLASSES, label_embed_size=LABEL_EMBED_SIZE, channels=CHANNELS)
dis = Discriminator(num_classes=NUM_CLASSES, channels=CHANNELS)
# Load previous model
if LOAD_MODEL:
gen.load_state_dict(torch.load(os.path.join(model_path, 'gen.pkl')))
dis.load_state_dict(torch.load(os.path.join(model_path, 'dis.pkl')))
# Model Summary
print("------------------Generator------------------")
print(gen)
print("------------------Discriminator------------------")
print(dis)
# Define Optimizers
g_opt = optim.Adam(gen.parameters(), lr=0.0002, betas=(0.5, 0.999), weight_decay=2e-5)
d_opt = optim.Adam(dis.parameters(), lr=0.0002, betas=(0.5, 0.999), weight_decay=2e-5)
# Loss functions
loss_fn = nn.BCELoss()
# Fix images for viz
fixed_z = torch.randn(1, IMGS_TO_DISPLAY_PER_CLASS, Z_DIM)
fixed_z = torch.repeat_interleave(fixed_z, NUM_CLASSES, 0).reshape(-1, Z_DIM)
fixed_label = torch.arange(0, NUM_CLASSES)
fixed_label = torch.repeat_interleave(fixed_label, IMGS_TO_DISPLAY_PER_CLASS)
# Labels
real_label = torch.ones(BATCH_SIZE)
fake_label = torch.zeros(BATCH_SIZE)
# GPU Compatibility
is_cuda = torch.cuda.is_available()
if is_cuda:
gen, dis = gen.cuda(), dis.cuda()
real_label, fake_label = real_label.cuda(), fake_label.cuda()
fixed_z, fixed_label = fixed_z.cuda(), fixed_label.cuda()
total_iters = 0
max_iter = len(data_loader)
# Training
print('Epoch\tD loss\tG loss')
for epoch in range(EPOCHS):
gen.train()
dis.train()
g_loss_list, d_loss_list, size = [], [], []
for i, data in enumerate(data_loader):
total_iters += 1
# Loading data
x_real, x_label = data
z_fake = torch.randn(BATCH_SIZE, Z_DIM)
if is_cuda:
x_real = x_real.cuda()
x_label = x_label.cuda()
z_fake = z_fake.cuda()
# Generate fake data
x_fake = gen(z_fake, x_label)
# Train Discriminator
fake_out = dis(x_fake.detach(), x_label)
real_out = dis(x_real.detach(), x_label)
d_loss = (loss_fn(fake_out, fake_label) + loss_fn(real_out, real_label)) / 2
d_opt.zero_grad()
d_loss.backward()
d_opt.step()
# Train Generator
fake_out = dis(x_fake, x_label)
g_loss = loss_fn(fake_out, real_label)
g_opt.zero_grad()
g_loss.backward()
g_opt.step()
g_loss_list.append(g_loss.item())
d_loss_list.append(d_loss.item())
size.append(x_real.shape[0])
print("%d\t%f\t%f" % (epoch, np.average(d_loss_list, weights=size), np.average(g_loss_list, weights=size)))
if (epoch + 1) % 5 == 0:
torch.save(gen.state_dict(), os.path.join(model_path, 'gen.pkl'))
torch.save(dis.state_dict(), os.path.join(model_path, 'dis.pkl'))
generate_imgs(fixed_z, fixed_label, epoch=epoch + 1)
generate_imgs(fixed_z, fixed_label)
# Generate Images
# Sample noise and labels as generator input
z_gen = torch.randn(GEN_IMAGES, Z_DIM)
gen_labels = torch.randint(0, NUM_CLASSES, (GEN_IMAGES,))
# Generate a batch of images
gen_imgs = gen(z_gen, gen_labels)
dataset = CustomTensorDataset(tensors=(gen_imgs, gen_labels), transform=gan_trans)
torch.save(dataset, './data/cgan_usps.pt')
|
"""Cloud object compatibles standard library 'io' equivalent functions."""
from contextlib import contextmanager
from io import open as io_open, TextIOWrapper
from airfs._core.storage_manager import get_instance
from airfs._core.functions_core import format_and_is_storage
@contextmanager
def cos_open(
file,
mode="r",
buffering=-1,
encoding=None,
errors=None,
newline=None,
closefd=True,
opener=None,
storage=None,
storage_parameters=None,
unsecure=None,
**kwargs
):
"""Open file and return a corresponding file object.
Equivalent to "io.open" or builtin "open".
File can also be binary opened file-like object.
.. versionadded:: 1.0.0
Args:
file (path-like object or file-like object or int): File path, object URL,
opened file-like object, or file descriptor.
mode (str): mode in which the file is opened (default to 'rb').
see "io.open" for all possible modes. Note that all modes may
not be supported by all kinds of file and storage.
buffering (int): Set the buffering policy.
-1 to use default behavior,
0 to switch buffering off,
1 to select line buffering (only usable in text mode),
and an integer > 1 to indicate the size in bytes of a
fixed-size chunk buffer.
See "io.open" for more information.
encoding (str): The name of the encoding used to
decode or encode the file. This should only be used in text mode.
See "io.open" for more information.
errors (str): Specifies how encoding and decoding errors
are to be handled.
This should only be used in text mode.
See "io.open" for more information.
newline (str): Controls how universal newlines mode works.
This should only be used in text mode.
See "io.open" for more information.
closefd (bool): If closefd is False and a file descriptor rather than a filename
was given, the underlying file descriptor will be kept open when the file
is closed. Must be True elsewhere (the default) otherwise an error will be
raised.
opener: A callable used as custom opener.
see the standard library "open()" documentation for more information.
Not supported on storage objects.
storage (str): Storage name.
storage_parameters (dict): Storage configuration parameters.
Generally, client configuration and credentials.
unsecure (bool): If True, disables TLS/SSL to improve
transfer performance. But makes connection unsecure.
Default to False.
kwargs: Other arguments to pass to the opened object.
Note that these arguments may not be compatible with all kinds of files and
storage.
Returns:
file-like object: opened file.
Raises:
OSError: If the file cannot be opened.
FileExistsError: File open in 'x' mode already exists.
"""
if hasattr(file, "read"):
with _text_io_wrapper(file, mode, encoding, errors, newline) as wrapped:
yield wrapped
return
file, file_is_storage = format_and_is_storage(file, storage=storage)
if file_is_storage:
if not closefd:
raise NotImplementedError("Cannot use closefd=False with a storage")
with get_instance(
name=file,
cls="raw" if buffering == 0 else "buffered",
storage=storage,
storage_parameters=storage_parameters,
mode=mode,
unsecure=unsecure,
**kwargs
) as stream:
with _text_io_wrapper(
stream, mode=mode, encoding=encoding, errors=errors, newline=newline
) as wrapped:
yield wrapped
else:
with io_open(
file, mode, buffering, encoding, errors, newline, closefd, opener
) as stream:
yield stream
@contextmanager
def _text_io_wrapper(stream, mode, encoding, errors, newline):
"""Wrap a binary stream to Text stream.
Args:
stream (file-like object): binary stream.
mode (str): Open mode.
encoding (str): Stream encoding.
errors (str): Decoding error handling.
newline (str): Universal newlines
"""
if "t" in mode and not hasattr(stream, "encoding"):
text_stream = TextIOWrapper(
stream, encoding=encoding, errors=errors, newline=newline
)
yield text_stream
text_stream.flush()
else:
yield stream
|
# List/String/Array is somehow equal in Python
class Solution(object):
def plusOneLong(self, digits):
"""
:type digits: List[int]
:rtype: List[int]
"""
carry = 0
length = len(digits)
digits[length-1] += 1
for i in range(length):
digits[length-1-i] += carry
if digits[length-1-i] >= 10:
digits[length-1-i] = digits[length-1-i] % 10
carry = 1
else:
carry = 0
if carry == 1:
digits = [1] + digits
return digits
def plusOne(self, digits):
"""
:type digits: List[int]
:rtype: List[int]
"""
num = 0
for i in range(len(digits)):
num = num * 10 + digits[i]
return [int(i) for i in str(num + 1)]
def plusOneReduce(self, digits):
"""
:type digits: List[int]
:rtype: List[int]
"""
num = reduce(lambda x, y: x * 10 + y, digits) +1
return [int(i) for i in str(num)]
|
"""
data_gen.py
File containing the functionality to generate test files and folders
for testing bellerophon.
"""
import os
import random
import time
def dir_gen(directory_name):
"""
Function to create files.
:param string directory_name: Fully qualified dierctory name which is
to be created
"""
if not os.path.exists(directory_name):
os.makedirs(directory_name)
return True
return False
def file_gen(file_name):
"""
Function to create folders.
:param string filename: Fully qualified file name which is to be craeted
"""
if not os.path.exists(file_name):
file_hanlder = open(file_name, 'wb+')
file_hanlder.write(b'This is a test file')
file_hanlder.close()
return True
return False
def millis_time():
"""
Function to return current time in milliseconds
"""
return str(int(round(time.time())))
def main():
parent_directory = input('Enter the path for test data: ')
epochs = 10000 # Increase the epochs to create more files and dirs
f_count = 10 # Number of files per directory
max_path_length = 200 # Number of characters allowed in the path name
is_directory = [True, False]
go_inside = [True, False]
name_list = ['amal', 'hurry', 'test', 'random', 'orange', 'goat', 'normal']
extension = ['txt', 'mp3', 'cpp', 'hpp', 'mkv', 'wav', 'mp4', 'lst', 'cap']
dir_count = 0
file_count = 0
dir_gen(parent_directory)
dir_list = [parent_directory]
for _ in range(epochs):
if random.choice(is_directory):
choice = random.choice(name_list)
dir_name = os.path.join(*dir_list, choice)
check = dir_gen(dir_name)
if check:
dir_count += 1
if random.choice(go_inside) and len(dir_name) < max_path_length:
dir_list.append(choice)
elif (
not(os.path.join(*dir_list) == parent_directory) and
len([name for name in os.listdir(dir_name)]) >=
(epochs/f_count)
):
del dir_list[-1]
else:
extn = random.choice(extension)
name_part = random.choice(name_list)
full_name = name_part + '_' + millis_time() + '.' + extn
file_name = os.path.join(*dir_list, full_name)
check = file_gen(file_name)
if check:
file_count += 1
print('Created {} files and {} directories in {}'
.format(file_count, dir_count, parent_directory))
if __name__ == '__main__':
main()
|
import sys, os
from sphinx.highlighting import lexers
from pygments.lexers.web import PhpLexer
sys.path.append(os.path.abspath('_exts'))
extensions = []
master_doc = 'index'
highlight_language = 'php'
project = u'PrestoPHP'
copyright = u'2010-2021 Fabien Potencier, Gunnar Beushausen'
html_theme = "bizstyle"
version = '2.4'
release = '2.4.2'
lexers['php'] = PhpLexer(startinline=True)
|
import wiringpi
/*
MPU6050 Interfacing with Raspberry Pi
http://www.electronicwings.com
*/
#include <wiringPiI2C.h>
#include <stdlib.h>
#include <stdio.h>
#include <wiringPi.h>
#define Device_Address 0x68 /*Device Address/Identifier for MPU6050*/
#define PWR_MGMT_1 0x6B
#define SMPLRT_DIV 0x19
#define CONFIG 0x1A
#define GYRO_CONFIG 0x1B
#define INT_ENABLE 0x38
#define ACCEL_XOUT_H 0x3B
#define ACCEL_YOUT_H 0x3D
#define ACCEL_ZOUT_H 0x3F
#define GYRO_XOUT_H 0x43
#define GYRO_YOUT_H 0x45
#define GYRO_ZOUT_H 0x47
int fd;
void MPU6050_Init(){
wiringPiI2CWriteReg8 (fd, SMPLRT_DIV, 0x07); /* Write to sample rate register */
wiringPiI2CWriteReg8 (fd, PWR_MGMT_1, 0x01); /* Write to power management register */
wiringPiI2CWriteReg8 (fd, CONFIG, 0); /* Write to Configuration register */
wiringPiI2CWriteReg8 (fd, GYRO_CONFIG, 24); /* Write to Gyro Configuration register */
wiringPiI2CWriteReg8 (fd, INT_ENABLE, 0x01); /*Write to interrupt enable register */
}
short read_raw_data(int addr){
short high_byte,low_byte,value;
high_byte = wiringPiI2CReadReg8(fd, addr);
low_byte = wiringPiI2CReadReg8(fd, addr+1);
value = (high_byte << 8) | low_byte;
return value;
}
void ms_delay(int val){
int i,j;
for(i=0;i<=val;i++)
for(j=0;j<1200;j++);
}
int main(){
float Acc_x,Acc_y,Acc_z;
float Gyro_x,Gyro_y,Gyro_z;
float Ax=0, Ay=0, Az=0;
float Gx=0, Gy=0, Gz=0;
fd = wiringPiI2CSetup(Device_Address); /*Initializes I2C with device Address*/
MPU6050_Init(); /* Initializes MPU6050 */
while(1)
{
/*Read raw value of Accelerometer and gyroscope from MPU6050*/
Acc_x = read_raw_data(ACCEL_XOUT_H);
Acc_y = read_raw_data(ACCEL_YOUT_H);
Acc_z = read_raw_data(ACCEL_ZOUT_H);
Gyro_x = read_raw_data(GYRO_XOUT_H);
Gyro_y = read_raw_data(GYRO_YOUT_H);
Gyro_z = read_raw_data(GYRO_ZOUT_H);
/* Divide raw value by sensitivity scale factor */
Ax = Acc_x/16384.0;
Ay = Acc_y/16384.0;
Az = Acc_z/16384.0;
Gx = Gyro_x/131;
Gy = Gyro_y/131;
Gz = Gyro_z/131;
printf("\n Gx=%.3f °/s\tGy=%.3f °/s\tGz=%.3f °/s\tAx=%.3f g\tAy=%.3f g\tAz=%.3f g\n",Gx,Gy,Gz,Ax,Ay,Az);
delay(500);
}
return 0;
}
*/
|
# pytorch
# -*- coding: utf-8 -*-
# @Author : Tangzhao
# @Blog:https://blog.csdn.net/tangzhaotz
# 加载数据
import torch
import torchvision
import torchvision.transforms as transforms
import torch.utils.data
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.5,0.5,0.5),(0.5,0.5,0.5))
])
trainset = torchvision.datasets.CIFAR10(root='./data',train=True,download=True,transform=transform)
trainloader = torch.utils.data.DataLoader(trainset,batch_size=4,shuffle=True,num_workers=0)
testset = torchvision.datasets.CIFAR10(root='./data',train=False,download=True,transform=transform)
testloader = torch.utils.data.DataLoader(testset,batch_size=4,shuffle=False,num_workers=0)
classes = ('plane','car','bird','cat','deer','dog','frog','horse','ship','truck')
# 随即查看部分数据
import matplotlib.pyplot as plt
import numpy as np
# 显示图像
def imshow(img):
img = img / 2 + 0.5 # 非标准话
npimg = img.numpy()
plt.imshow(np.transpose(npimg,(1,2,0)))
plt.show()
# 获取部分随机数据
dataiter = iter(trainloader)
images,labels = dataiter.next()
imshow(torchvision.utils.make_grid(images))
print(' '.join('%5s' % classes[labels[j]] for j in range(4)))
# 构建网络
import torch.nn as nn
import torch.nn.functional as F
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
class CNNNet(nn.Module):
def __init__(self):
super(CNNNet,self).__init__()
self.conv1 = nn.Conv2d(in_channels=3,out_channels=16,kernel_size=5,stride=1)
self.pool1 = nn.MaxPool2d(kernel_size=2,stride=2)
self.conv2 = nn.Conv2d(in_channels=16,out_channels=36,kernel_size=3,stride=1)
self.pool2 = nn.MaxPool2d(kernel_size=2,stride=2)
self.fc1 = nn.Linear(1296,128)
self.fc2 = nn.Linear(128,10)
def forward(self,x):
x = self.pool1(F.relu(self.conv1(x)))
x = self.pool2(F.relu(self.conv2(x)))
# print(x.shape)
x = x.view(-1,36 * 6 * 6)
x = F.relu(self.fc2(F.relu(self.fc1(x))))
return x
net = CNNNet()
net = net.to(device)
# 查看网络结构
print(net)
"""
CNNNet(
(conv1): Conv2d(3, 16, kernel_size=(5, 5), stride=(1, 1))
(pool1): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
(conv2): Conv2d(16, 36, kernel_size=(3, 3), stride=(1, 1))
(pool2): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
(fc1): Linear(in_features=1296, out_features=128, bias=True)
(fc2): Linear(in_features=128, out_features=10, bias=True)
)
"""
# 查看网络中的前几层
# 查看前四层
# nn.Sequential(*list(net.children())[:4])
# 初始化参数
for m in net.modules():
if isinstance(m,nn.Conv2d):
nn.init.normal_(m.weight)
nn.init.xavier_normal_(m.weight)
nn.init.kaiming_normal_(m.weight) # 卷积层参数初始化
nn.init.constant_(m.bias,0)
elif isinstance(m,nn.Linear):
nn.init.normal_(m.weight) # 全连接层参数初始化
# 训练模型
# 选择优化器
import torch.optim as optim
criterion = nn.CrossEntropyLoss()
optimizer = optim.SGD(net.parameters(),lr=0.001,momentum=0.9)
# 训练
for epoch in range(10):
net.train()
running_loss = 0.0
for i,data in enumerate(trainloader,0):
# 获取训练数据
inputs,labels = data
inputs,labels = inputs.to(device),labels.to(device)
# 权重参数梯度清零
optimizer.zero_grad()
# 正向及反向出传播
outputs = net(inputs)
loss = criterion(outputs,labels)
loss.backward()
optimizer.step()
# 显示损失值
running_loss += loss.item()
if i % 2000 == 1999:
print('[%d %5d] loss: %.3f' % (epoch + 1,i + 1,running_loss / (i+1)))
running_loss = 0.0
print("Finished training")
# 测试模型
correct = 0
total = 0
with torch.no_grad():
net.eval()
for data in testloader:
images,labels = data
images,labels = images.to(device),labels.to(device)
outputs = net(images)
_,predicted = torch.max(outputs.data,1)
total += labels.size(0)
correct += (predicted == labels).sum().item()
print('Accuracy of the network on the 10000 test images: %d %%' % (100 * correct / total))
# 计算各种类别的准确率
class_correct = list(0. for i in range(10))
class_total = list(0. for i in range(10))
with torch.no_grad():
for data in testloader:
images,labels = data
images,labels = images.to(device),labels.to(device)
outputs = net(images)
_,predicted = torch.max(outputs,1)
c = (predicted == labels).squeeze()
for i in range(4):
label = labels[i]
class_correct[label] += c[i].item()
class_total[label] += 1
for i in range(10):
print("Accuracy of %5s: %2d %% " % (classes[i],100 * class_correct[i] / class_total[i]))
|
class Solution:
def divisibilityArray(self, word: str, m: int) -> List[int]:
res, v = [], 0
for w in word:
v = (v*10 + int(w)) % m
res.append(int(v==0))
return res
|
import functools
import pickle
import scipy.sparse as sp
from flask import (
Blueprint, flash, g, redirect, render_template, request, session, url_for
)
bp = Blueprint('user', __name__, url_prefix='/user')
sparse_user_item = sp.load_npz('confi.npz')
model = pickle.load(open('model.pkl','rb'))
problem_name = pickle.load(open('problem_name.pkl','rb'))
user_id = pickle.load(open('user_id.pkl','rb'))
@bp.route('/preds', methods=('GET', 'POST'))
def preds():
links = []
username = ""
if request.method == 'POST':
username = request.form['username']
error = None
if not username:
error = 'Username is required.'
if error is None:
rec = model.recommend(user_id[username], sparse_user_item)
print(rec)
links = []
for r in rec:
name = problem_name[r[0]]
contest = ""
problem = ""
for x in range(len(name)):
if name[x].isalpha():
problem = name[x:]
contest = name[:x]
links.append("https://codeforces.com/contest/" + str(contest) + "/problem/" + str(problem))
else:
flash(error)
return render_template('user/preds.html', links = links, username=username)
@bp.route('/test', methods=('GET', 'POST'))
def test():
posts = model.recommend(24593, sparse_user_item)
return render_template('user/test.html',hdr = "yoyoyo",posts=posts) |
#Coded by R Praveen Ram
string = input()
result = string[0]
for index in range(1, len(string)):
if(string[index] == result[-1]):
continue
result += string[index]
print(result)
|
import random
import math
def brute():
z = 0
maxPro = float("-inf")
for num in range(l, r + 1):
pro = (x & num) * (y & num)
if pro > maxPro:
maxPro = pro
z = num
return z
def toBinary(num):
binary = ""
while num > 0:
binary += str(num%2)
num = num >> 1
return binary[::-1]
def toDeci(num, length):
deci = 0
for pos in range(0, len(num)):
deci += (1 << (length-pos-1))*int(num[pos])
return deci
def powerOf2(num):
return num and (not(num&(num-1)))
def getSecond(small, big, res = 0):
diff = len(big)- len(small)
pos = 0
lengthOfSmall = len(small)
while pos < lengthOfSmall:
if small[pos] == '1' or big[pos+diff] == '1':
res += 1 << (lengthOfSmall-pos-1)
pos += 1
return res
# def computeFarther2():
def computeFurther(binNum):
possVal = 0
length = len(binNum)
cnt = length - 1
for pos in range(0, length):
tempVal = possVal + (1 << cnt)
if int(binNum[pos]) and tempVal <= r:
possVal += 1 << cnt
cnt -= 1
return possVal
t = int(input())
for _ in range(t):
# x, y = random.randrange(0,100), random.randrange(0,100)
# l = 0
# r = random.randrange(random.randrange(0,100))
# print(x, y, l, r)
x, y, l, r = map(int, input().rstrip().split())
finalResult = 0
if x == 0 or y == 0:
finalResult = 0
elif l == r:
finalResult = l
elif r == 0:
finalResult = r
else:
maxPro = float("-inf")
temp = x
x = min(x, y)
y = max(temp, y)
binaryX = toBinary(x)
binaryY = toBinary(y)
if len(binaryY) == len(binaryX):
secondPart = getSecond(binaryX, binaryY)
finalResult = secondPart
else:
val = toDeci(binaryY[0:len(binaryY) - len(binaryX)], len(binaryY))
secondPart = getSecond(binaryX, binaryY, val)
finalResult = secondPart
if finalResult > r:
binaryForm = toBinary(finalResult)
maxProduct = (x&finalResult)*(y&finalResult)
finalResult = computeFurther(binaryForm)
temp = brute()
print(temp)
if finalResult != temp:
print("FAILED HERE")
print(finalResult, (x&finalResult)*(y&finalResult))
print(temp, (x&temp)*(y&temp))
# break
"""
23 72 0 20
23 5 0 >23
6 52 0 33 my is 31 right is 22
11 28 0 29 my is 29 right is 27
1 86 0 5
80 46 0 9
72 90 0 7
30 11 0 1
30 98 0 13
""" |
import requests
experience = 2
response = requests.get("http://127.0.0.1:8000/predict?experience={}".format(experience))
output = response.json()
print(output) |
import sys
from collections import defaultdict
import json
class relation_linker(object):
"""docstring for relation_linker"""
def __init__(self, fp_path,train_path,lf_path):
self.relation_pool=defaultdict(int)
self.file_path=fp_path
self.train_path=train_path
self.lf_path=lf_path
def generatePool(self,min_sup):
with open(self.file_path,'r') as IN:
for line in IN:
tmp=line.strip().split('\t')
if int(tmp[1])>=min_sup:
self.relation_pool[tmp[2]]=int(tmp[1])
def sliding_window(self,min_len,max_len):
#self.relation_type=defaultdict(int)
with open(self.train_path,'r') as IN, open('./tmp_remine/relation_token.txt','w') as rt,open('./tmp_remine/pos_relation_token.txt','w') as rp:
cnt=0
for line in IN:
cnt+=1
tmp=json.loads(line.strip())
for rm in tmp['relationMentions']:
if rm[2]=='None':
continue
start=rm[0][1]
end=rm[1][0]
assert(len(tmp['tokens'])==len(tmp['pos']))
sub=tmp['tokens'][start:end]
sub_pos=tmp['pos'][start:end]
#print sub
for i in range(min_len,max_len+1):
for j in xrange(end-start+1-i):
#print j,j+i
candidate=' '.join(sub[j:j+i])
if candidate in self.relationlist:
for w in sub[j:j+i]:
rt.write(self.token_mapping[w]+' ')
#rt.write(w+' ')
rp.write(' '.join(sub_pos[j:j+i])+'\n')
rt.write('\n')
#print candidate,'\t',' '.join(tmp['tokens'])
#self.relation_type[(rm[2],candidate)]+=1
#if cnt>10000:
# break
#for k,v in self.relation_type.iteritems():
# if v>10:
# print k,v
def readLF(self):
self.relationlist=set()
with open(self.lf_path,'r') as IN:
for line in IN:
tmp=json.loads(line.strip())
candidate=tmp['Texture'].replace('<JJ> ','')
#can be simplified by comment next line
#if candidate in self.relation_pool:
self.relationlist.add(candidate)
def loadMapping(self,token_mapping):
self.token_mapping=dict()
with open(token_mapping,'r') as IN:
for line in IN:
tmp=line.strip().split('\t')
self.token_mapping[tmp[1]]=tmp[0]
if __name__ == '__main__':
tmp=relation_linker(sys.argv[1],sys.argv[2],sys.argv[3])
#tmp.generatePool(int(sys.argv[4]))
tmp.readLF()
tmp.loadMapping('./tmp_remine/token_mapping.txt')
tmp.sliding_window(2,4) |
#!/usr/bin/env python
#coding=utf8
import httplib
import md5
import urllib
import random
import json
import time
import re
debug=0
if debug == 1:
fh = open("a.log","w")
from pandocfilters import toJSONFilter, Emph, Para, Str, stringify, Header , Strong, Plain, Link
def rep(v):
tt={}
tt['t']="Str"
tt['c']=re.sub(u"。"," ",v)
return [tt]
def transPara(key, value, format, meta):
if key == 'Header':
if debug == 1:
fh.write("Header\n")
lev = value[0]
newlev = 0
if lev == 2 :
newlev = 1
elif lev == 3:
newlev = 2
elif lev == 4:
newlev = 3
elif lev >= 5:
newlev = 5
if newlev == 5:
v2=rep(stringify(value[2]))
v2[0]['c']=re.sub(u"\.","-",v2[0]['c'])
return Para([Strong(v2)])
else:
tt=("",[],[])
v2=rep(stringify(value[2]))
return Header(newlev,tt,v2)
#elif key == 'Link':
# #print "Link"
# if debug == 1:
# fh.write("Link\n")
# hstr = stringify(value)
# trStr=""
# if len(hstr) < 1:
# trStr=""
# return []
# else:
# trStr=transWrapper(hstr,"Link")
# return Link(value[0],[Str(trStr)],value[2])
else:
return None
if __name__ == "__main__":
toJSONFilter(transPara)
|
import time
import xml.etree.ElementTree as ET
from StringIO import StringIO
from pyspark import SparkConf, SparkContext, HiveContext
from multiprocessing.pool import ThreadPool
from pyspark.sql.types import StructType, StructField, StringType
__author__ = 'Pirghie Dimitrie'
conf = SparkConf().setAppName("Preprocessing")
sc = SparkContext(conf=conf)
hc = HiveContext(sc)
# Read Files Content
files = sc.wholeTextFiles('gs://bda-p2-ddc/pmc_oa_2/1000/*/records*.xml.gz', use_unicode=True, minPartitions=256);
def build_xml_object(string):
it = ET.iterparse(StringIO(string))
for _, el in it:
if '}' in el.tag:
el.tag = el.tag.split('}', 1)[1] # strip all namespaces
return it.root
def extract_test_from_p_element(p_element):
text = ''.join(list(p_element.itertext()));
text = text.strip()
text_len = len(text)
if text_len > 0:
if text[text_len - 1] not in ['.', ';', '!', '?']:
text += '. '
return text + '\n'
def extract_test_from_sec_element(sec_element):
text = ''
for child in sec_element:
if child.tag == 'p':
text += extract_test_from_p_element(child)
elif child.tag == 'sec':
text = text + extract_test_from_sec_element(child)
return text + '\n'
def extract_text_from_body_element(body_element):
try:
root_secs = body_element.findall('sec')
text = ''
for sec in root_secs:
text += extract_test_from_sec_element(sec)
return text
except Exception as e:
return ''
def extract_information_from_article(article):
filename = article[0]
article_xml_root = article[1]
id = article_xml_root.find('front/article-meta/article-id[@pub-id-type="pmcid"]').text
abstract = ''
try:
abstract = ''.join(list(article_xml_root.find('front/article-meta/abstract').itertext()))
except:
abstract = ''
text = extract_text_from_body_element(article_xml_root.find('body'))
return filename, id, abstract.strip().replace('\n', ' '), text.strip().replace('\n', ' '), ''
def extract_arcticles(xml_file):
article_roots = xml_file[1].findall('ListRecords/record/metadata/article')
args = [(xml_file[0], articleRoot) for articleRoot in article_roots]
pool = ThreadPool(8)
results = [row for row in pool.map(extract_information_from_article, args)]
pool.close()
return results
start = time.time()
# Parse as XML objects
xmlFiles = files.map(lambda file: (file[0], build_xml_object(file[1].encode('utf-8'))))
# Extract article XML nodes
articles = xmlFiles.flatMap(lambda xml_file: [(xml_file[0], articleRoot) for articleRoot in xml_file[1].findall('ListRecords/record/metadata/article')])
# Extract id, abstract, text from articles
rows = articles.map(extract_information_from_article)
schema = StructType([
StructField('File', StringType(), True),
StructField('ArticleId', StringType(), True),
StructField('ArticleAbstract', StringType(), True),
StructField('ArticleText', StringType(), True)
])
hc.createDataFrame(rows, schema).write.mode("append").saveAsTable('articles')
end = time.time()
print('Elapsed time:')
print(end - start)
sc.stop()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
import bcrypt, re # bcrypt + regex
from django.contrib import messages # flash messages
from datetime import datetime, date, timedelta # datetime
import pytz # for time comparison
from ..login_registration.models import User # from login_registration app
class MessageReplyManager(models.Manager):
# for both messages and replies
def validate(self, request):
post_data = request.POST
data_check = True
if len(post_data['content']) < 1:
data_check = False
messages.info(request, 'message and reply content must be more than zero characters')
if len(post_data['content']) > 500:
data_check = False
messages.info(request, 'message and reply content may not be longer than 500 characters')
return data_check
def create_message(self, request, rid, aid):
post_data = request.POST
try:
recipient = User.objects.get_user(rid)[1]
author = User.objects.get_user(aid)[1]
Message.objects.create(
content = post_data['content'],
author = author,
recipient = recipient,
)
except:
pass
return self
def create_reply(self, request, mid, aid):
post_data = request.POST
try:
message = Message.objects.get(id=mid)
author = User.objects.get_user(aid)[1]
Reply.objects.create(
content = post_data['content'],
author = author,
message = message,
)
except:
pass
return self
def get_messages_by_recipient(self, rid):
recipient = User.objects.get_user(rid)[1]
messages = Message.objects.filter(recipient = recipient).order_by('-created_at')
# checking for messages
if len(messages) > 0:
now = datetime.now(pytz.timezone('America/Chicago'))
for message in messages:
# rendering 'time past' data for message
created_at = message.created_at
time = (now - created_at)
# default date format
message.time_past = str(created_at.strftime('%B %d, %Y'))
message.time = " "
# less than one week
if ((time.seconds)/((60*60)*7)) < 7:
message.time_past = ((time.seconds)/((60*60)*7))
message.time = " days ago"
if message.time_past == 1:
message.time = " day ago"
# less than one day
if ((time.seconds)/(60*60)) < 24:
message.time_past = ((time.seconds)/(60*60))
message.time = " hours ago"
if message.time_past == 1:
message.time = " hour ago"
# less than one hour
if ((time.seconds)/60) < 60:
message.time_past = (time.seconds)/(60)
message.time = " minutes ago"
if message.time_past == 1:
message.time = " minute ago"
# checking for message replies
message.reply_messages = Reply.objects.filter(message = message).order_by('created_at')
if len(message.reply_messages) > 0:
# rendering 'time past' data for reply
for reply in message.reply_messages:
created_at = reply.created_at
time = (now - created_at)
# default date format
reply.time_past = str(created_at.strftime('%B %d, %Y'))
reply.time = " "
# less than one week
if ((time.seconds)/((60*60)*7)) < 7:
reply.time_past = ((time.seconds)/((60*60)*7))
reply.time = " days ago"
if reply.time_past == 1:
reply.time = " day ago"
# less than one day
if ((time.seconds)/(60*60)) < 24:
reply.time_past = (time.seconds)/(60*60)
reply.time = " hours ago"
if reply.time_past == 1:
reply.time = " hour ago"
# less than one hour
if ((time.seconds)/60) < 60:
reply.time_past = ((time.seconds)/60)
reply.time = " minutes ago"
if reply.time_past == 1:
reply.time = " minute ago"
return messages
# specific for finding recipient id (create_reply() POST handling)
def recipient_id(self, mid):
data_check = False
try:
message = Message.objects.get(id=mid)
rid = message.recipient.id
data_check = True
except:
rid = 0
return data_check, rid
class Message(models.Model):
content = models.CharField(max_length=500)
author = models.ForeignKey(User, on_delete=models.CASCADE, related_name='messages_sent')
recipient = models.ForeignKey(User, on_delete=models.CASCADE, related_name='messages_received')
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
objects = MessageReplyManager()
class Reply(models.Model):
content = models.CharField(max_length=500)
author = models.ForeignKey(User, on_delete=models.CASCADE, related_name='replies')
message = models.ForeignKey(Message, on_delete=models.CASCADE, related_name='replies')
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
objects = MessageReplyManager() |
from collections import *
from itertools import *
from random import *
from time import *
from functools import *
'''
Considering quadratics of the form:
n2+an+b, where |a|<1000 and |b|≤1000
where |n| is the modulus/absolute value of n
e.g. |11|=11 and |−4|=4
Find the product of the coefficients, a and b, for the quadratic expression that
produces the maximum number of primes for consecutive values of n, starting with n=0.
'''
def main():
def generateprimes(limit):
primes = [2]
def isPrime(num):
for p in primes:
if p**2 > num:
break
if num%p == 0:
return False
return True
for i in range(3, limit, 2):
if isPrime(i):
primes.append(i)
return set(primes)
p = generateprimes(2000000)
best = 0
bestab = 0
for a in range(-999,1000):
for b in range(-1000, 1001):
numprimes = 0
n = 0
while True:
if (n*n + a*n + b) in p:
numprimes += 1
n += 1
else:
break
if numprimes > best:
best = numprimes
bestab = a*b
print(bestab)
start = time()
main()
print('Program took %.02f seconds' % (time()-start)) |
# Generated by Django 3.0.6 on 2020-07-17 09:55
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='ClasseForfait',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('designation', models.IntegerField(choices=[(0, 'Classe 1'), (1, 'classe 2'), (2, 'classe 3')], default=0)),
('puissance_max', models.IntegerField(default=75)),
],
),
migrations.CreateModel(
name='ReglagesGeneral',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('prix_par_watt', models.DecimalField(decimal_places=2, max_digits=11, verbose_name='Prix par watt')),
('alert_level', models.IntegerField(verbose_name="Niveau d'alerte minimal des transfos")),
],
),
]
|
import wiringpi as wpi
from settings import LCDAddr
fd_1602 = wpi.wiringPiI2CSetup(LCDAddr) # I2C初始化
def send_bit(comm, rs=1):
# 先送最高4位
buf = comm & 0xF0
buf = buf | 0x04 | rs # rs=1是写数据, rs=0是写指令
wpi.wiringPiI2CWrite(fd_1602, buf)
wpi.delay(2)
buf &= 0xFB # EN 1 -> 0
wpi.wiringPiI2CWrite(fd_1602, buf)
# 后送低4位
buf = (comm & 0x0F) << 4
buf = buf | 0x04 | rs
wpi.wiringPiI2CWrite(fd_1602, buf)
wpi.delay(2)
buf &= 0xFB
wpi.wiringPiI2CWrite(fd_1602, buf)
def init_1602():
send_bit(0x33, 0) # 先初始化为8位数据线
wpi.delay(5)
send_bit(0x32, 0) # 然后初始化为4位数据线
wpi.delay(5)
send_bit(0x28, 0) # 2 Lines & 5*7 dots
wpi.delay(5)
send_bit(0x0C, 0) # Enable display without cursor
wpi.delay(5)
send_bit(0x01, 0) # Clear Screen
def clear(): # 清屏
send_bit(0x01, 0)
|
from enthought.mayavi.core.registry import registry
from enthought.mayavi.core.metadata import SourceMetadata
from enthought.mayavi.core.pipeline_info import PipelineInfo
def mat_reader(fname, engine):
"""Reader for .zzz files.
Parameters:
-----------
fname -- Filename to be read.
engine -- The engine the source will be associated with.
"""
import os.path
dir=os.path.dirname(fname)
phparams=os.path.join(dir, "phasingparams.py")
from JesseMatLabCoordSource import JesseMatLabCoordSource
src = JesseMatLabCoordSource(file_name=fname, engine=engine)
src.scalar_name=fname
src.name=os.path.split(fname)[-1]
return src
mat_reader_info = SourceMetadata(
id = "Matlab File Reader",
factory = 'mat_reader.mat_reader',
tooltip = "Load a mat file",
desc = "Load a mat file",
help = "Load a mat file",
menu_name = "&mat file",
extensions = ['rec','REC','mat'],
wildcard = 'Rec files (*.rec)|*.Rec',
output_info = PipelineInfo(datasets=['image_data','structured_grid'],
attribute_types=['any'],
attributes=['any'])
)
# Inject this information in the mayavi registry
registry.sources.append(mat_reader_info)
if __name__=='__main__':
source=Sp4ArrayFileSource(file_name='/Users/rharder/PhasingProjects/diffmap-moyuAu/Au708-81-Imask400rs/BD-data.sp4')
source.update()
print source._scalar_data
source.configure_traits()
|
"""Module implements various notification methods."""
import os
import smtplib
import logging
from socket import gaierror
from email.message import EmailMessage
from requests import post
from .helpers import env_exists
class Notifications:
"""Handles notifications requests.
Attributes:
message: str, plain text msg that will be send
"""
def __init__(self, message: str) -> None:
"""Initializes class Notifications.
Args:
message: str, plain text body that will be sent
"""
self.message = message
self.send_email_notification()
self.send_telegram_notification()
def send_telegram_notification(self) -> bool:
""""Handler for Telegram notifications.
Returns:
Fail: if no env variables were provided
True: if environment values exists and msg was sent
"""
telegram_chat_id = os.getenv('TELEGRAM_CHAT_ID')
telegram_token = os.getenv('TELEGRAM_TOKEN')
if env_exists(telegram_chat_id) and env_exists(telegram_token):
telegram_send_message = f'https://api.telegram.org/bot{telegram_token}/sendMessage'
text_body = {'chat_id': telegram_chat_id, 'text': self.message}
try:
post(telegram_send_message, text_body).json()
logging.info('Telegram notification sent to "%s".',
telegram_chat_id)
return True
except ConnectionError as error_response:
details = str(error_response).split(':')[0]
logging.error(details)
return False
def send_email_notification(self) -> bool:
"""Handler for Email notifications.
Returns:
Fail: if no env variables were provided
True: if environment values exists and msg was sent
"""
email = os.getenv('EMAIL')
smtp_relay = os.getenv('SMTP_RELAY')
if env_exists(email):
if not env_exists(smtp_relay):
smtp_relay = 'localhost'
try:
msg = EmailMessage()
msg['Subject'] = '\U0001F4D1 [mongo-dump] status report'
msg['From'] = 'mongo-dump@service.io'
msg['To'] = email
msg.set_content(self.message)
with smtplib.SMTP(smtp_relay) as smtp:
smtp.send_message(msg)
logging.info('Email was sent to "%s" via smtp relay "%s".',
email, smtp_relay)
return True
except gaierror:
logging.error(
'smtp relay server "%s" is not available. Please check.',
smtp_relay)
except OSError:
logging.error(
'smtp relay server name "%s" could not be resolved over DNS. Please check.',
smtp_relay)
return False
|
def cal(A, B):
x = np.linalg.solve(A, B)
return x
import numpy as np
import math
def ct(n):
PI = math.pi
de = PI / 100
return de * n
x11 = [ct(0), ct(7), ct(20), ct(29), ct(32), ct(50), ct(64), ct(70), ct(82), ct(90), ct(100)]
x21 = [ct(0), ct(4), ct(10), ct(12), ct(14), ct(20), ct(23), ct(27), ct(35), ct(40), ct(43), ct(55), ct(61), ct(65),
ct(70), ct(83),
ct(87), ct(91), ct(94), ct(98), ct(100)]
def initial(x0=0.0, xn=math.pi, n=10, x=x11):
y = []
for i in range(n + 1):
y.append(math.sin(x[i]))
a = np.array(y)
h = []
for i in range(n):
h.append(x[i + 1] - x[i])
return a, h, y
def createAb(h, a, choice=0, n=10):
if choice == 0:
A = np.zeros((n + 1, n + 1))
A[0][0] = 1
A[n][n] = 1
for i in range(1, n):
A[i][i - 1] = h[i - 1]
A[i][i] = 2 * (h[i - 1] + h[i])
A[i][i + 1] = h[i]
b = np.zeros(n + 1)
for i in range(1, n):
b[i] = 3 * (a[i + 1] - a[i]) / h[i] - 3 * (a[i] - a[i - 1]) / h[i - 1]
return A, b
if choice == 1:
A = np.zeros((n + 1, n + 1))
A[0][0] = 2 * h[0]
A[0][1] = h[0]
A[n][n - 1] = 2 * h[n - 1]
A[n][n] = h[n - 1]
for i in range(1, n):
A[i][i - 1] = h[i - 1]
A[i][i] = 2 * (h[i - 1] + h[i])
A[i][i + 1] = h[i]
b = np.zeros(n + 1)
for i in range(1, n):
b[i] = 3 * (a[i + 1] - a[i]) / h[i] - 3 * (a[i] - a[i - 1]) / h[i - 1]
return A, b
if choice == 2:
A = np.zeros((n + 1, n + 1))
A[0][0] = h[0] * 2 / 3
A[0][1] = h[0] * 2
A[0][n - 1] = h[n - 1] * 2 / 3
A[0][n] = h[n - 1] / 3
A[n][0] = 1
A[n][n - 1] = -1
for i in range(1, n):
A[i][i - 1] = h[i - 1]
A[i][i] = 2 * (h[i - 1] + h[i])
A[i][i + 1] = h[i]
b = np.zeros(n + 1)
b[0] = (a[1] - a[0]) / h[0] - (a[n] - a[n - 1]) / h[n - 1]
for i in range(1, n):
b[i] = 3 * (a[i + 1] - a[i]) / h[i] - 3 * (a[i] - a[i - 1]) / h[i - 1]
return A, b
if choice == 3:
A = np.zeros((n + 1, n + 1))
A[0][0] = 1
A[0][1] = -1
A[n][n] = 1
A[n][n - 1] = -1
for i in range(1, n):
A[i][i - 1] = h[i - 1]
A[i][i] = 2 * (h[i - 1] + h[i])
A[i][i + 1] = h[i]
b = np.zeros(n + 1)
for i in range(1, n):
b[i] = 3 * (a[i + 1] - a[i]) / h[i] - 3 * (a[i] - a[i - 1]) / h[i - 1]
return A, b
def calcd(a, c, h, n=10):
b = np.zeros(n)
d = np.zeros(n)
for i in range(n):
b[i] = (a[i + 1] - a[i]) / h[i] - h[i] * (c[i + 1] + 2 * c[i]) / 3
d[i] = (c[i + 1] - c[i]) / (3 * h[i])
return b, d
from matplotlib import pyplot as plt
def everysection(a, b, c, d, x, i):
xt = np.linspace(x[i], x[i + 1], 100)
y = d[i] * (xt - x[i]) ** 3 + c[i] * (xt - x[i]) ** 2 + b[i] * (xt - x[i]) + a[i]
plt.plot(xt, y, color="green", linewidth=0.5)
def inplot():
x = np.linspace(0, np.pi, 100, endpoint=True)
y = np.sin(x)
plt.plot(x, y,color='pink',linewidth=3)
fk=0
def pAll(a, b, c, d, x, N):
for i in range(N):
everysection(a, b, c, d, x, i)
print(i)
plt.scatter(x, a, color='red', marker='o')
global fk
plt.savefig(str(fk)+'_'+str(len(x))+'.png')
fk+=1
plt.show()
def run(N=10,X=x11,ch=2):
a, h, y = initial(n=N, x=X)
A, B = createAb(h, a, choice=ch, n=N)
c = cal(A, B)
b, d = calcd(a, c, h, N)
print(len(a))
print(len(b))
print(len(c))
print(len(d))
inplot()
pAll(a, b, c, d, x=X, N=N)
run(10,x11,0)
run(10,x11,1)
run(10,x11,2)
run(10,x11,3)
|
# Generated by Django 2.2 on 2020-05-18 14:35
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('app1', '0004_message_seen'),
]
operations = [
migrations.AddField(
model_name='message',
name='create_time',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='创建时间'),
preserve_default=False,
),
migrations.AddField(
model_name='message',
name='modified_time',
field=models.DateTimeField(auto_now=True, verbose_name='最新修改时间'),
),
]
|
import os
import pandas as pd
column_name_mapping = {
"Start_iViewX_micros": "start",
"End_iViewX_micros": "end",
"Location_X": "x",
"Location_Y": "y",
"Duration_micros": "duration",
}
def rewrite_df(df):
try:
df = df.rename(columns=column_name_mapping)
df["duration"] = df["duration"] * 1000
df["start"] = df["start"] * 1000
df["end"] = df["end"] * 1000
except Exception as e:
print(df)
raise e
return df
basepath = "../datasets/fractions/data"
for file in os.listdir(basepath):
with open(os.path.join(basepath, file)) as f:
df = pd.read_csv(f, sep=",")
rewritten_df = rewrite_df(df)
rewritten_df.to_csv(f"../datasets/fractions/data/{file}")
|
# -*- coding: cp949 -*-
# Galaxy Explorer
# By Park Changhwi
#레벨이 올라갈수록 행성과 셔틀이 작아진다
#회전할수록 연료가 닳는다
#빨간 행성을 먹으면 점수업
#검은 행성을 먹으면 연료 증가
#밖으로 나가면 다음 레벨
#연료가 다하면 게임 끝
#파란 행성에서는 연료 닳지 않음
import pygame, random, time, math, sys, copy
from pygame.locals import *
try:
import android
except ImportError:
android = None
class Planet(): # 행성 클래스
def __init__(self, level):
self.level = level
self.numberOfPlanets = int(self.level**0.4) * 4 # 전체 지도에 생성될 행성의 개수
self.size = int(120 / int(self.level ** 0.4)) + 10
self.planetList = [] # 생성할 행성들의 정보를 담는 리스트
self.count = 0
self.blueplanet = pygame.transform.scale(BLUEPLANET, (self.size, self.size))
self.planetXList = list(range(0, int(SCREENSIZE[0] / self.size)))
self.planetYList = list(range(0, int(SCREENSIZE[1] / self.size)))
self.planetXList.remove(int(SCREENSIZE[0] / (self.size * 2)))
self.planetXList.remove(int(SCREENSIZE[1] / (self.size * 2)))
self.planetXList.remove(int(1 + SCREENSIZE[0] / (self.size * 2)))
self.planetXList.remove(int(1 + SCREENSIZE[1] / (self.size * 2)))
while self.count < self.numberOfPlanets:
self.x = random.choice(self.planetXList)
self.y = random.choice(self.planetYList)
self.planetXList.remove(self.x)
self.planetYList.remove(self.y)
planetDict = {'type': random.choice([BLUEPLANET, BLACKPLANET, REDPLANET]),
'x': self.size * self.x,
'y': self.size * self.y,
'size': self.size - random.randint(0, self.size / 2),
'degree': random.randint(0, 360)} # 개개의 행성의 정보를 담는 딕셔너리
self.planetList.append(planetDict)
self.count += 1
def update(self, draw = True): # 정보를 업데이트함
self.planetObjList = []
self.draw = draw
for p in self.planetList:
self.randomFact = random.randint(0, 10)
self.planetObj = pygame.transform.scale(p['type'], (p['size'], p['size']))
self.planetObj = pygame.transform.rotate(self.planetObj, p['degree'])
self.planetRect = self.planetObj.get_rect(center = (p['x'], p['y']))
if draw == True:
DISPLAYSURF.blit(self.planetObj, self.planetRect)
DISPLAYSURF.blit(self.blueplanet, (SCREENSIZE[0] / 2 - self.size / 2, SCREENSIZE[1] / 2 - self.size / 2))
self.planetObjList.append((self.planetRect, p['x'], p['y'], p['type'], p['size']))
return self.planetObjList
def flag(self, flagData):
self.flagData = flagData
for f in self.flagData:
flagrect = FLAG.get_rect(center = (f[0], f[1]))
DISPLAYSURF.blit(FLAG, flagrect)
self.defaultFlagRect = FLAG.get_rect(center = (SCREENSIZE[0] / 2, SCREENSIZE[1] / 2))
DISPLAYSURF.blit(FLAG, self.defaultFlagRect)
class Shuttle(): # 셔틀 클래스
def update(self, x, y, angle, launchMode): #angle은 셔틀의 각도
self.shuttleX = x
self.shuttleY = y
self.angle = angle
self.launchMode = launchMode
self.distance = 5 # 셔틀의 속도를 정의
if self.launchMode == False:
self.rotatedShuttle = pygame.transform.rotate(SHUTTLE, self.angle)
else:
self.rotatedShuttle = pygame.transform.rotate(LAUNCHEDSHUTTLE, self.angle)
def launch(self, launchFact, count): # 셔틀 발사
self.launchFact = launchFact
self.count = count
if self.launchFact == True:
self.shuttleX -= self.count**1.6 * self.distance * math.sin(math.radians(self.angle))
self.shuttleY -= self.count**1.6 * self.distance * math.cos(math.radians(self.angle))
def draw(self, launchMode, count):
self.launchMode = launchMode
self.count = count
self.launch(self.launchMode, self.count)
self.shuttleRect = self.rotatedShuttle.get_rect(center = (self.shuttleX, self.shuttleY))
DISPLAYSURF.blit(self.rotatedShuttle, self.shuttleRect)
def returnRect(self):
return self.shuttleRect
class Display():
def level(self, currentLevel):
self.currentLevel = currentLevel
self.levelText = BASICFONT.render('Level: %s'%self.currentLevel, True, WHITE, BACKGROUNDCOLOR)
DISPLAYSURF.blit(self.levelText, (400, 600))
def score(self, currentScore):
self.currentScore = currentScore
self.scoreText = BASICFONT.render('Score: %s'%self.currentScore, True, WHITE, BACKGROUNDCOLOR)
DISPLAYSURF.blit(self.scoreText, (400, 640))
def fuel(self, currentFuel):
self.currentFuel = currentFuel
self.gaugeAngle = - 0.9 * self.currentFuel + 45
self.gauge1Rect = GAUGE1.get_rect(center = (640, 620))
self.rotatedGauge2 = pygame.transform.rotate(GAUGE2, self.gaugeAngle)
self.gauge2Rect = self.rotatedGauge2.get_rect(center = (640, 620))
self.fuelText = BASICFONT.render('FUEL GAUGE', True, WHITE, BACKGROUNDCOLOR)
DISPLAYSURF.blit(GAUGE1, self.gauge1Rect)
DISPLAYSURF.blit(self.rotatedGauge2, self.gauge2Rect)
DISPLAYSURF.blit(self.fuelText, (573, 655))
class Control():
def __init__(self, level, score, fuel):
self.done = False
self.currentLevel = level
self.currentScore = score
self.currentFuel = fuel
self.planet = Planet(self.currentLevel)
self.shuttle = Shuttle()
self.display = Display()
self.x = SCREENSIZE[0] / 2
self.y = SCREENSIZE[1] / 2
self.angle = 0 # 초기 셔틀 각도
self.degree = 3# 프레임당 증가할 셔틀의 각도
self.speed = 5 # 프레임당 움직일 셔틀의 거리
self.count = 0 # 셔틀이 움직인 횟수
self.launchMode = False # 셔틀 발사 여부
self.flagData = []
self.fuelMode = 'blue'
#self.startMenu()
self.planetData = self.planet.update(False)
while not self.done:
self.mainloop()
def mainloop(self): # 메인 루프
self.FPSCLOCK = pygame.time.Clock()
while True:
DISPLAYSURF.fill(BLACK)
self.planet.update() # 행성을 생성
self.planet.flag(self.flagData)
self.shuttle.update(self.x, self.y, self.angle, self.launchMode) # 셔틀을 각도에 맞게 생성
self.shuttle.draw(self.launchMode , self.count) # 발사 모드에 따라 발사
self.event() # 이벤트 처리 함수
self.angle += self.degree # 셔틀 각도 증가
if self.launchMode == True:
self.count += 1 # 셔틀이 움직인 거리 증가
self.checkForCollide()
if self.fuelMode == 'blue':
self.currentFuel -= 0
elif self.fuelMode == 'black':
self.currentFuel -= 0.2
elif self.fuelMode == 'red':
self.currentFuel -= 0.35
elif self.fuelMode == 'launch':
self.currentFuel -= 0.45
self.display.level(self.currentLevel)
self.display.score(self.currentScore)
self.display.fuel(self.currentFuel)
pygame.display.update()
self.FPSCLOCK.tick(FPS)
def event(self):
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
if event.type == KEYDOWN:
if event.key == K_ESCAPE:
pygame.quit()
sys.exit()
if event.type == MOUSEBUTTONDOWN:
self.degree = 0 # 셔틀이 더이상 회전하지 않음
self.launchMode = True # 발사 여부를 True로 변환
self.fuelMode = 'launch'
if android:
if android.check_pause():
android.wait_for_resume()
def checkForCollide(self): # 충돌 감지
for p in self.planetData:
if p[0].colliderect(self.shuttle.returnRect()):
self.launchMode = False # 발사 모드를 False로 변환
self.x = p[1] #셔틀의 x를 행성의 x로 대체
self.y = p[2] #셔틀의 y를 행성의 y로 대체
if p[3] == BLUEPLANET:
self.currentScore += p[4] / 9
self.fuelMode = 'blue'
elif p[3] == BLACKPLANET:
self.currentScore += p[4] / 6
self.currentFuel += p[4] / 2
self.fuelMode = 'black'
elif p[3] == REDPLANET:
self.currentScore += p[4] / 3
self.fuelMode = 'red'
self.degree = 3
self.count = 0
self.flagData.append((p[1], p[2]))
self.planetData.remove(p)
if not DISPLAYRECT.colliderect(self.shuttle.returnRect()):
self.done = True
self.currentLevel += 1
self.fuelMode = 'blue'
run = Control(self.currentLevel, self.currentScore, self.currentFuel )
def main():
global BASICFONT, FPS, SCREENSIZE, BACKGROUNDCOLOR, BLACK, WHITE, DISPLAYSURF, DISPLAYRECT, BLUEPLANET, BLACKPLANET, REDPLANET, SHUTTLE, LAUNCHEDSHUTTLE, FLAG, GAUGE1, GAUGE2
pygame.init()
pygame.font.init()
if android:
android.init()
android.map_key(android.KEYCODE_BACK, pygame.K_ESCAPE)
BASICFONT = pygame.font.SysFont("comicsansms", 20)
FPS = 30
SCREENSIZE = (1280, 720)
BACKGROUNDCOLOR = (0, 0, 0)
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
pygame.display.set_caption("galaxyExplorer")
DISPLAYSURF = pygame.display.set_mode(SCREENSIZE)
DISPLAYRECT = DISPLAYSURF.get_rect()
BLUEPLANET = pygame.image.load("blueplanet.png")
BLACKPLANET = pygame.image.load("blackplanet.png")
REDPLANET = pygame.image.load("redplanet.png")
SHUTTLE = pygame.image.load("shuttle.png")
LAUNCHEDSHUTTLE = pygame.image.load("launchedshuttle.png")
FLAG = pygame.image.load("flag.png")
FLAG = pygame.transform.scale(FLAG, (30, 30))
SHUTTLE = pygame.transform.scale(SHUTTLE, (50, 75))
LAUNCHEDSHUTTLE = pygame.transform.scale(LAUNCHEDSHUTTLE, (55, 78))
GAUGE1 = pygame.image.load("gauge1.png")
GAUGE2 = pygame.image.load("gauge2.png")
GAUGE1 = pygame.transform.scale(GAUGE1, (150, 150))
GAUGE2 = pygame.transform.scale(GAUGE2, (150, 150))
run = Control(1, 0, 100) #초기 레벨 1, 초기 점수 0, 초기 연료 100
if __name__ == "__main__":
main()
|
import logging.config
import os
import sys
import traceback
import log_setting
logging.config.dictConfig(log_setting.LOGGING)
logger = logging.getLogger('alarm_combine')
def detail(msg):
msgstr = "File Name:" + os.path.basename(__file__) + "\t output:" + msg
return msgstr
def func():
return 3 / 0
try:
a = func()
except Exception as e:
logger.error(traceback.format_exc())
|
from numpy import array
from helpers import draw_line
from lab0.polygon import Polygon
def draw_point(point: array):
draw_line(point + (-3, -3), point + (3, 3))
draw_line(point + (-3, 3), point + (3, -3))
def get_object(origin):
p0 = array((origin[0], origin[1]))
p1 = p0 + (70, 0)
p2 = p1 + (-10, 20)
p3 = p2 + (-50, 0)
p4 = p3 + (5, 0)
p5 = p4 + (0, 55)
p6 = p5 + (40, 0)
p7 = p6 + (0, -55)
p8 = p4 + (5, 0)
p9 = p8 + (0, 40)
p10 = p9 + (30, 0)
p11 = p10 + (0, -40)
edges = [p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11]
sides = [
(0, 1),
(1, 2),
(2, 3),
(3, 0),
(4, 5),
(5, 6),
(6, 7),
(8, 9),
(9, 10),
(10, 11),
]
return Polygon(edges, sides)
|
import hashlib
from Crypto.Hash import SHA
def findCollision():
i = 0
hashes = set()
notdone = True
prevsize = 0
size = 0
while notdone:
if i % 50000 == 0:
print(i)
'''
if i == 13644212:
print(str(i))
result = SHA.new(str(i))
print(str(int(result.hexdigest(), 16) & 0x3ffffffffffff))
if i == 16937134:
print(str(i))
result = SHA.new(str(i))
print(str(int(result.hexdigest(), 16) & 0x3ffffffffffff))
'''
fh = str(SHA.new(str(i)).hexdigest())
key = int(fh, 16) & 0x3ffffffffffff
hashes.add(key)
if key == 764686465868945:
print(str(i))
break
if i == len(hashes):
print("Collision!")
print("Key (Hash): " + str(key))
print("Val 1: " + str(i))
#print("Val 2: " + str(i))
notdone = False
i += 1
findCollision()
|
import re
import glob, os
cwd = os.getcwd()
# os.chdir(cwd)
# for file in glob.glob("*.srt"):
# print(file)
def main():
# read file line by line
for file in glob.glob("*.srt"):
file = open(file, "r")
lines = print(file.read())
file.close()
text = ''
for line in lines:
if re.search('^[0-9]+$', line) is None and re.search('^[0-9]{2}:[0-9]{2}:[0-9]{2}', line) is None and re.search('^$', line) is None:
text += ' ' + line.rstrip('\n')
text = text.lstrip()
sys.stdout = open(file+".txt", "w")
print(text)
sys.stdout.close()
main() |
#!/usr/bin/env.python
# -*- coding:utf-8 -*-
'''
简单模拟加动态规划严重超时
考虑到都是正整数,到0点的距离时递增的,可以用二分法
S[i]表示0点到i点的累积和。从而S[j]-S[i]+l[i]即为i-j的和
'''
N, M = map(int, input().split())
l = list(map(int, input().split()))
s = [0] * N
s[0] = l[0]
e = 1
while e < N:
s[e] = s[e - 1] + l[e]
e += 1
# print(s)
def cal(i, j):
global xm, p, q
ss = s[j] - s[i] + l[i]
if ss == M:
print('{}-{}'.format(i + 1, j + 1))
return 1
elif ss < M:
return 0
else:
head, tail = i, j
while head <= j and tail >= i and head <= tail:
mid = (head + tail) // 2
x = s[mid] - s[i] + l[i]
if x == M:
print('{}-{}'.format(i + 1, mid + 1))
return 1
elif x > M:
tail = mid - 1
if flag == 0:
if x < xm:
xm = x
p, q = [i], [mid]
elif x == xm:
p.append(i)
q.append(mid)
else:
head = mid + 1
return 0
xm = float('inf')
p, q = [], []
flag = 0
for i in range(N):
flag = cal(i, N - 1) if flag == 0 else flag+cal(i, N-1)
if flag == 0:
for a, b in zip(p, q):
print('{}-{}'.format(a + 1, b + 1))
|
import random
import numpy as np
from functions import *
from warehouse import warehouse
import time
#small_order_list = [[104,104],[166,266,625],[920,1182,999],[1182,1319]]
small_order_list = [[104,1182,357,206,453,1123]]
#small_order_list = [[104],[104]]
#small_order_list = [[104],[106],[108],[110],[112],[210],[110,110,110,110],[112,110,110,110],[210,692,694,939],[1190,1344]]
#big_order_list = [[[104,1182,357,206,453,1123]]]
#big_temp = [[[206,357]]] # works in under 1 sec
big_temp = [[[202,204]]] # have on paper
#big_temp = [[[839], [204, 178], [357], [106, 176]]] ## 8020
#big_temp = [[[839],[357], [106]]] # gives 103 sol too many
#big_temp = [[[839],[204],[357], [106]]] # 675 solutions
#big_temp = [[[1092, 206, 1190, 700], [202, 1188, 104, 1186, 369, 1190], [1188], [1084], [847], [104, 600, 451], [210], [235, 1190, 598, 104, 453, 1182]]]
left_pickups = [104,106,108,110,112,
202,204,206,208,210,
349,351,353,355,357,
447,449,451,453,455,
594,596,598,600,602,
692,694,696,698,700,
839,841,843,845,847,
937,939,941,943,945,
1084,1086,1088,1090,1092,
1182,1184,1186,1188,1190]
_, temp, _ = create_Astar_graph(warehouse)
right_pickups = [x.id for x in temp if x.id not in left_pickups]
all_pickups = left_pickups + right_pickups
def simulate_8020_orders(num_orders):
orders = []
for i in range(0, num_orders):
current_order = []
num_items = int(np.random.exponential()) + 1
current_amount = 0
while True:
if random.random() > 0.2:
current_order.append(left_pickups[random.randint(0, len(left_pickups)-1)])
else:
current_order.append(right_pickups[random.randint(0, len(right_pickups)-1)])
current_amount += 1
if current_amount == num_items:
orders.append(current_order)
break
return orders
def simulate_uniform_orders(num_orders):
orders = []
for i in range(0, num_orders):
current_order = []
num_items = int(np.random.exponential()) + 1
current_amount = 0
while True:
current_order.append(all_pickups[random.randint(0, len(all_pickups)-1)])
current_amount += 1
if current_amount == num_items:
orders.append(current_order)
break
return orders
def simulate_big_order_list(uniform, num_simulations, num_orders, average_item_per_order):
big_order_list = []
for i in range(0, num_simulations):
if uniform:
big_order_list.append(simulate_uniform_orders(num_orders, average_item_per_order))
else:
big_order_list.append(simulate_8020_orders(num_orders, average_item_per_order))
return big_order_list
def control_check_order_sim():
test = simulate_uniform_orders(20, 3)
print(test)
avg_size = 0
for i in range(0, len(test)):
avg_size += len(test[i])
print("average order size is: %.2f" % (avg_size/len(test)))
pop_item = 0
total_items = 0
for i in range(0, len(test)):
for j in range(0, len(test[i])):
total_items += 1
if test[i][j] in left_pickups:
pop_item += 1
print("%d %% of the items belong to popular ones" % ((pop_item/total_items)*100))
|
class Racer:
def __init__(self, name, track_number):
self.name = name
self.track_number = track_number
self.lap_times = []
|
# -*- coding: utf-8 -*-
"""
pdverify.py: verify TDDA constraints for feather dataset
"""
from __future__ import division
from __future__ import print_function
import os
import sys
import pandas as pd
import numpy as np
USAGE = """Usage:
pdverify df.feather [constraints.tdda]
where df.feather is a feather file containing a dataframe.
If constraints.tdda is provided, this is a JSON .tdda file
constaining constraints. If no constraints file is provided,
a file with the same path as the feather file, a .tdda extension
will be tried.
"""
try:
from pmmif import featherpmm
except ImportError:
featherpmm = None
try:
import feather as feather
except ImportError:
print('pdverify required feather to be available.\n'
'Use:\n pip install feather\nto add capability.\n',
file=sys.stderr)
raise
from tdda.constraints.base import DatasetConstraints
from tdda.constraints.pdconstraints import PandasConstraintVerifier, verify_df
def verify_feather_df(df_path, constraints_path, **kwargs):
df = load_df(df_path)
print(verify_df(df, constraints_path, **kwargs))
def load_df(path):
if featherpmm:
ds = featherpmm.read_dataframe(path)
return ds.df
else:
return feather.read_dataframe(path)
def get_params(args):
params = {
'df_path': None,
'constraints_path': None,
'report': 'all',
'one_per_line': False,
}
for a in args:
if a.startswith('-'):
if a in ('-a', '--all'):
params['report'] = 'all'
elif a in ('-f', '--fields'):
params['report'] = 'fields'
elif a in ('-c', '--constraints'):
params['report'] = 'constraints'
elif a in ('1', 'oneperline'):
params['one_per_line'] = True
else:
usage_error()
elif params['df_path'] is None:
params['df_path'] = a
elif params['constraints_path'] is None:
params['constraints_path'] = a
else:
usage_error()
return params
def usage_error():
print(USAGE, file=sys.stderr)
sys.exit(1)
if __name__ == '__main__':
params = get_params(sys.argv[1:])
if not(params['constraints_path']):
print(USAGE, file=sys.stderr)
sys.exit(1)
verify_feather_df(**params)
|
from PyQt5 import QtCore, QtGui, QtWidgets
import sys
import sqlite3
i=-1
conn = sqlite3.connect('FeedB.db')
cur = conn.cursor()
cur.execute("""CREATE TABLE IF NOT EXISTS FeedTable
(
username varchar(15),
Faculty varchar(15),
Feed TEXT,
Review varchar(15),
PRIMARY KEY(username, Faculty)
)""")
cur.close()
conn.commit()
conn.close()
class Ui_MainWindow(object):
def storeFeedback(self, user, text, fac):
from rnn import fbsrnn
conn = sqlite3.connect('FeedB.db')
cur = conn.cursor()
try:
cur.execute('''insert into FeedTable Values("%s","%s","%s","%s")'''%(user, str(fac), text, fbsrnn(text)))
self.status.setText('Feedback is Submitted')
except:
self.status.setText('You Have Already Given Feedback to The Faculty')
finally:
cur.close()
conn.commit()
conn.close()
print(fbsrnn(text))
def nextDB(self, fac):
global i
conn = sqlite3.connect('FeedB.db')
cur = conn.cursor()
cur.execute('''select username, Feed, Review from FeedTable where Faculty = "%s"'''%(str(fac)))
res = cur.fetchall()
if i < len(res)-1:
i += 1
self.feedgiver.setText(res[i][0])
self.FeedText.setText(res[i][1])
self.reviewans.setText(res[i][2])
cur.close()
conn.close()
def prevDB(self, fac):
global i
conn = sqlite3.connect('FeedB.db')
cur = conn.cursor()
cur.execute('''select username, Feed, Review from FeedTable where Faculty = "%s"'''%(str(fac)))
res = cur.fetchall()
if i > 0:
i -= 1
self.feedgiver.setText(res[i][0])
self.FeedText.setText(res[i][1])
self.reviewans.setText(res[i][2])
cur.close()
conn.close()
def delDB(self, fac, usr):
conn = sqlite3.connect('FeedB.db')
cur = conn.cursor()
cur.execute('''delete from FeedTable where Faculty = "%s" and username = "%s"'''%(str(fac), usr))
cur.close()
conn.commit()
conn.close()
self.prevDB(str(fac))
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(491, 265)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.tabWidget = QtWidgets.QTabWidget(self.centralwidget)
self.tabWidget.setGeometry(QtCore.QRect(0, 0, 491, 261))
self.tabWidget.setObjectName("tabWidget")
self.Homepage = QtWidgets.QWidget()
self.Homepage.setObjectName("Homepage")
self.textBrowser = QtWidgets.QTextBrowser(self.Homepage)
self.textBrowser.setGeometry(QtCore.QRect(10, 60, 450, 120))
self.textBrowser.setFrameShape(QtWidgets.QFrame.NoFrame)
self.textBrowser.setObjectName("textBrowser")
self.tabWidget.addTab(self.Homepage, "")
self.Feedback = QtWidgets.QWidget()
self.Feedback.setObjectName("Feedback")
self.Userid = QtWidgets.QLineEdit(self.Feedback)
self.Userid.setGeometry(QtCore.QRect(10, 40, 131, 20))
self.Userid.setStyleSheet("background-color: rgb(255, 255, 255);")
self.Userid.setInputMask("")
self.Userid.setObjectName("Userid")
self.textEdit = QtWidgets.QTextEdit(self.Feedback)
self.textEdit.setGeometry(QtCore.QRect(160, 10, 311, 201))
self.textEdit.setObjectName("textEdit")
self.status = QtWidgets.QLabel(self.Feedback)
self.status.setGeometry(QtCore.QRect(170,210,320,20))
self.status.setObjectName("Status")
self.label_4 = QtWidgets.QLabel(self.Feedback)
self.label_4.setGeometry(QtCore.QRect(10, 20, 64, 13))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_4.setFont(font)
self.label_4.setObjectName("label_4")
self.Faculty = QtWidgets.QComboBox(self.Feedback)
self.Faculty.setGeometry(QtCore.QRect(10, 100, 131, 22))
self.Faculty.setStyleSheet("background-color: rgb(51, 51, 51);\n"
"color: rgb(255, 255, 255);\n"
"border-color: rgb(0, 0, 0);")
self.Faculty.setObjectName("Faculty")
self.Faculty.addItem("")
self.Faculty.addItem("")
self.Faculty.addItem("")
self.Faculty.addItem("")
self.FeedB = QtWidgets.QPushButton(self.Feedback)
self.FeedB.setGeometry(QtCore.QRect(10, 180, 131, 23))
self.FeedB.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.FeedB.setStyleSheet("background-color: rgb(51, 51, 51);\n"
"color: rgb(255, 255, 255);\n"
"border-color: rgb(0, 0, 0);")
self.FeedB.setDefault(False)
self.FeedB.setFlat(False)
self.FeedB.setObjectName("FeedB")
self.FeedB.clicked.connect(lambda: self.storeFeedback(self.Userid.text(), self.textEdit.toPlainText(), self.Faculty.currentText()))
self.label_5 = QtWidgets.QLabel(self.Feedback)
self.label_5.setGeometry(QtCore.QRect(10, 80, 91, 16))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_5.setFont(font)
self.label_5.setObjectName("label_5")
self.tabWidget.addTab(self.Feedback, "")
self.Table = QtWidgets.QWidget()
self.Table.setObjectName("Table")
self.FacultyDB = QtWidgets.QComboBox(self.Table)
self.FacultyDB.setGeometry(QtCore.QRect(10, 70, 131, 22))
self.FacultyDB.setStyleSheet("background-color: rgb(51, 51, 51);\n"
"color: rgb(255, 255, 255);\n"
"border-color: rgb(0, 0, 0);")
self.FacultyDB.setObjectName("FacultyDB")
self.FacultyDB.addItem("")
self.FacultyDB.addItem("")
self.FacultyDB.addItem("")
self.FacultyDB.addItem("")
self.review = QtWidgets.QLabel(self.Table)
self.review.setGeometry(QtCore.QRect(10, 110, 161, 22))
self.review.setObjectName("Review")
self.reviewans = QtWidgets.QLabel(self.Table)
self.reviewans.setGeometry(QtCore.QRect(70, 110, 131, 22))
self.reviewans.setObjectName("Reviewans")
self.label_7 = QtWidgets.QLabel(self.Table)
self.label_7.setGeometry(QtCore.QRect(10, 50, 91, 16))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_7.setFont(font)
self.label_7.setObjectName("label_7")
self.label_8 = QtWidgets.QLabel(self.Table)
self.label_8.setGeometry(QtCore.QRect(160, 10, 101, 21))
self.label_8.setObjectName("label_8")
self.feedgiver = QtWidgets.QLabel(self.Table)
self.feedgiver.setGeometry(QtCore.QRect(280, 10, 171, 16))
self.feedgiver.setObjectName("feedgiver")
self.FeedText = QtWidgets.QTextBrowser(self.Table)
self.FeedText.setGeometry(QtCore.QRect(160, 40, 311, 151))
self.FeedText.setObjectName("FeedText")
self.prevB = QtWidgets.QPushButton(self.Table)
self.prevB.setGeometry(QtCore.QRect(170, 200, 75, 23))
self.prevB.setObjectName("prevB")
self.prevB.clicked.connect(lambda: self.prevDB(self.FacultyDB.currentText()))
self.delB = QtWidgets.QPushButton(self.Table)
self.delB.setGeometry(QtCore.QRect(390, 200, 75, 23))
self.delB.setObjectName("delB")
self.delB.clicked.connect(lambda: self.delDB(self.FacultyDB.currentText(), self.feedgiver.text()))
self.nextB = QtWidgets.QPushButton(self.Table)
self.nextB.setGeometry(QtCore.QRect(280, 200, 75, 23))
self.nextB.setObjectName("nextB")
self.nextB.clicked.connect(lambda: self.nextDB(self.FacultyDB.currentText()))
self.tabWidget.addTab(self.Table, "")
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "Feedback Analyzing System"))
self.textBrowser.setHtml(_translate("MainWindow", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:28pt; font-weight:600;\">Feedback Analyzing</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:28pt; font-weight:600;\">System</span></p></body></html>"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.Homepage), _translate("MainWindow", "Homepage"))
self.label_4.setText(_translate("MainWindow", "Username :"))
self.Faculty.setItemText(0, _translate("MainWindow", "Fac1"))
self.Faculty.setItemText(1, _translate("MainWindow", "Fac2"))
self.Faculty.setItemText(2, _translate("MainWindow", "Fac3"))
self.Faculty.setItemText(3, _translate("MainWindow", "Fac4"))
self.FeedB.setText(_translate("MainWindow", "Submit Feedback"))
self.label_5.setText(_translate("MainWindow", "Select Faculty:"))
self.status.setText(_translate("Ui_MainWindow","Status"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.Feedback), _translate("MainWindow", "Feedback"))
self.prevB.setText(_translate("MainWindow", "Prev"))
self.nextB.setText(_translate("MainWindow", "Next"))
self.delB.setText(_translate("MainWindow", "Delete"))
self.FacultyDB.setItemText(0, _translate("MainWindow", "Fac1"))
self.FacultyDB.setItemText(1, _translate("MainWindow", "Fac2"))
self.FacultyDB.setItemText(2, _translate("MainWindow", "Fac3"))
self.FacultyDB.setItemText(3, _translate("MainWindow", "Fac4"))
self.review.setText(_translate("MainWindow", "Review: "))
self.reviewans.setText(_translate("MainWindow", "Answer "))
self.label_7.setText(_translate("MainWindow", "Select Faculty:"))
self.label_8.setText(_translate("MainWindow", "Feedback Given By: "))
self.feedgiver.setText(_translate("MainWindow", "Username"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.Table), _translate("MainWindow", "Table"))
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
FrameMain = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(FrameMain)
FrameMain.show()
sys.exit(app.exec_()) |
import logging.config
import os
from mycrypt import encrypt, decrypt
from mydatabase import conn, query_conn
from coinpayments import CoinPaymentsAPI
import time
import datetime
from blockchain import blockexplorer
from icq.bot import ICQBot
from icq.filter import MessageFilter
from icq.handler import (
MessageHandler, UserAddedToBuddyListHandler, FeedbackCommandHandler, UnknownCommandHandler, CommandHandler
)
cd = os.path.dirname(os.path.abspath(__file__))
aaa = os.path.join(cd, "logging.ini")
NAME = "PLUG_BOT"
VERSION = "0.0.19"
# TOKEN = "001.3907104052.2241892471:747224570"
TOKEN = "001.1440389349.0257997562:747582474"
OWNER = "000000000"
LOG_FILES = True
LOG_DATABASE = True
LOG_PATTERNS = {}
FLAG_USER_BAN = False
TEXT_RESPONSE_LIMIT = 10000
FORMAT_INDENT = 4
START_FLAG = {}
STOP_FLAG = {}
PP_PRICE = 5
DEBIT_PRICE = 30
CREDIT_PRICE = 50
DEAD_FULLZ_PRICE = 3
DEPOSIT_ADDRESS = {}
DEPOSIT_AMOUNT = {}
GBP_PAY_AMOUNT = {}
BIN_TYPE = {}
FULLZ_FLAG = {}
DEAD_FULLZ_FLAG = {}
PP_FLAG = {}
BUY_FLAG = {}
FULLZ_CONFIRM = {}
DEAD_FULLZ_CONFIRM = {}
PP_CONFIRM = {}
FULLZ_NAME = {}
FULLZ_NUM = {}
FULLZ_TYPE = {}
DEAD_FULLZ_NAME = {}
DEAD_FULLZ_NUM = {}
DEAD_FULLZ_TYPE = {}
PP_NUM = {}
bins_list = []
pp_amount = {}
fullz_list = {}
dead_fullz_list = {}
# log patterns
def log_pattern():
global LOG_PATTERNS
LOG_PATTERNS = {}
try:
query_conn.execute("select id, content, params from log where 1 ")
l_list = query_conn.fetchall()
for ptn in l_list:
LOG_PATTERNS[ptn[0]] = {'content': '\n'.join(ptn[1].split('\\n')), 'params': ptn[2]}
except:
print('log pattern error occurs.')
return
# chat & log
def log_output(index, command, aimId, flag, result, chats=[], insert_command=True):
global LOG_FILES, LOG_DATABASE
time_now = int((datetime.datetime.now() - datetime.datetime(1970, 1, 1)).total_seconds())
try:
if LOG_FILES == True:
sample_text = "index=>%s,time=>%s,command=>%s,aimId=>%s,flag=>%s,result=>%s@#@"
file_name = "../../var/www/html/logs/" + datetime.datetime.now().strftime("%Y-%m-%d") + ".txt"
file_name = "logs/" + datetime.datetime.now().strftime("%Y-%m-%d") + ".txt"
result_text = sample_text % (index, datetime.datetime.now().strftime("%H:%M:%S"), command, aimId, flag, result)
with open(file_name, "a") as log_file:
log_file.write(result_text)
if LOG_DATABASE == True:
if insert_command == True:
query = "insert into `chat` (`uin`, `time`, `log_id`, `log_params`, `command`) values ('%s', '%s', '0', '', '/%s')" % (aimId, time_now, command)
query_conn.execute(query)
conn.commit()
if len(chats) > 0:
for chat in chats:
if len(chat) > 1:
query = 'insert into `chat` (`uin`, `time`, `log_id`, `log_params`, `command`) values ("%s", "%s", "%s", "%s", "")' % (aimId, time_now, chat[0], chat[1])
query_conn.execute(query)
conn.commit()
except Exception as e:
print('log output error occurs.')
print(e)
def get_coin_info():
try:
#api = CoinPaymentsAPI(public_key='a289262d2662321065c8f156d1adac989dd632d3b5f5a38ffef7a8a2d0feaf17', private_key='09a5c13f34b6C5397eEFFCcDe3f5716a2de0CF229a7bbcF8a7cA1335Be84ad96')
api = CoinPaymentsAPI(public_key='d5c643530e2c861fd66c1ba935eff906af6c09c653e10ec0a3e6f8d9090ca2be',
private_key='51C995de635f484210ada1Cc9b6cC300313309f52eA1279BD44b2fac1b6fc1F9')
coin_address = api.get_callback_address(currency='BTC')['result']['address']
coin_rate = api.rates()['result']['GBP']['rate_btc']
except:
print('get coin info error occurs.')
return coin_address, coin_rate
# calcualte payment
def calc_payment(source, num=0, pay_amount=-1.0):
global FULLZ_NAME, FULLZ_NUM, FULLZ_TYPE, DEAD_FULLZ_NAME, DEAD_FULLZ_NUM, DEAD_FULLZ_TYPE, PP_NUM, PP_FLAG, FULLZ_FLAG, DEAD_FULLZ_FLAG, PP_PRICE, CREDIT_PRICE, DEBIT_PRICE, DEAD_FULLZ_PRICE
try:
if pay_amount < 0.0:
if PP_FLAG[source]:
pay_amount = PP_PRICE * int(num)
elif FULLZ_FLAG[source]:
if FULLZ_TYPE[source] == "credit":
pay_amount = CREDIT_PRICE * int(FULLZ_NUM[source])
elif FULLZ_TYPE[source] == "debit":
pay_amount = DEBIT_PRICE * int(FULLZ_NUM[source])
elif DEAD_FULLZ_FLAG[source]:
pay_amount = DEAD_FULLZ_PRICE * int(DEAD_FULLZ_NUM[source])
gbp_pay_amount = pay_amount
deposit_address, coin_rate = get_coin_info()
coin_rate = float("{:.5f}".format(float(coin_rate))) + 0.00001
pay_amount = "{:.5f}".format(float(coin_rate) * pay_amount)
fee = "{:.5f}".format(float("{:.5f}".format(float(pay_amount) * 0.5 / 100)) + 0.00001)
total_amount = "{:.5f}".format(float(pay_amount) + float(fee))
return deposit_address, pay_amount, fee, total_amount, gbp_pay_amount
except:
print('calcualte payment error occurs.')
# insert or get user
def process_user(source):
global FLAG_USER_BAN
try:
time_now = int((datetime.datetime.now() - datetime.datetime(1970, 1, 1)).total_seconds())
# selecte user
query = "select id, uin, buy_time, ban, chat_time from `user` where uin=%s order by id desc" % source
query_conn.execute(query)
row = query_conn.fetchone()
if row is not None and len(row) > 0:
if row[3] == 'Y':
FLAG_USER_BAN = True
else:
FLAG_USER_BAN = False
# update user
query = "update `user` set chat_time='%s' where id=%s" % (time_now, row[0])
query_conn.execute(query)
conn.commit()
update_count = query_conn.rowcount
log_output('174', 'user', source, 'success', ('%d update user' % (update_count)), [], False)
else:
FLAG_USER_BAN = False
# insert user
query = "insert into `user` (`uin`, `buy_time`, `chat_time`, `ban`) values ('%s', '%s', '%s', 'N')" % (source, time_now, time_now)
query_conn.execute(query)
conn.commit()
insert_count = query_conn.rowcount
log_output('173', 'user', source, 'success', ('%d insert user' % (insert_count)), [], False)
except Exception as e:
print('user process error occurs.')
print(e)
# insert order
def insert_order(source):
global BIN_TYPE, DEPOSIT_AMOUNT, GBP_PAY_AMOUNT
try:
time_now = int((datetime.datetime.now() - datetime.datetime(1970, 1, 1)).total_seconds())
# current id of `chat`
query = "select id from `chat` where uin=%s order by id desc" % source
query_conn.execute(query)
row = query_conn.fetchone()
current_chat_id = row[0]
if BIN_TYPE[source] == 'Fullz':
table_name = 'fullz'
elif BIN_TYPE[source] == 'dead_fullz':
table_name = 'dead_fullz'
elif BIN_TYPE[source] == 'PP':
table_name = 'pp'
else:
return
# insert order
query = "select id from %s where lock_customer=%s and status='lock'" % (table_name, source)
query_conn.execute(query)
f_list = query_conn.fetchall()
temp_ids = ''
for f in f_list:
temp_ids += str(f[0]) + ','
query = "insert into `order` (`uin`, `time`, `product_type`, `product_id`, `btc`, `gbp`, `success`, `ongoing`, `chat_id`, `canceled`) values ('%s', '%s', '%s', '%s', '%s', '%s', 'N', 'Y', '%s', 'N')" % (source, time_now, table_name, temp_ids, DEPOSIT_AMOUNT[source], GBP_PAY_AMOUNT[source], current_chat_id)
query_conn.execute(query)
conn.commit()
except Exception as e:
print('insert order error occurs.')
print(e)
def start_cb(bot, event):
source = event.data["source"]["aimId"]
# process user
process_user(source)
log_output('170', 'start', source, 'success', '', [[37, '']])
def stop_cb(bot, event):
source = event.data["source"]["aimId"]
# process user
process_user(source)
log_output('171', 'stop', source, 'success', '', [[37, '']])
def help_cb(bot, event):
source = event.data["source"]["aimId"]
# process user
process_user(source)
log_output('172', 'help', source, 'success', '', [[38, '']])
bot.send_im(target=source, message=LOG_PATTERNS[38]['content']) #38
def support_uin_cb(bot, event):
source = event.data["source"]["aimId"]
# process user
process_user(source)
message = LOG_PATTERNS[53]['content'] #53
bot.send_im(target=source, message=message)
def command_cb(bot, event):
global START_FLAG, FULLZ_CONFIRM, DEAD_FULLZ_CONFIRM, PP_CONFIRM, LOG_PATTERNS, FLAG_USER_BAN
source = event.data["source"]["aimId"]
START_FLAG[source] = True
try:
# process user
process_user(source)
if FLAG_USER_BAN == True:
log_output('181', 'command', source, 'fail', 'ban', [[43, '']])
message = LOG_PATTERNS[43]['content'] #43
bot.send_im(target=source, message=message)
return
if (source in FULLZ_CONFIRM and FULLZ_CONFIRM[source]) or (source in DEAD_FULLZ_CONFIRM and DEAD_FULLZ_CONFIRM[source]) or (source in PP_CONFIRM and PP_CONFIRM[source]):
log_output('168', 'command', source, 'fail', 'not CONFIRM', [[1, '']])
bot.send_im(target=source, message=LOG_PATTERNS[1]['content']) #1
return
log_output('169', 'command', source, 'success', 'command', [[2, '']])
bot.send_im(target=source, message=LOG_PATTERNS[2]['content']) #2
except:
print('command cb error occurs.')
def chat_cb(bot, event):
global bins_list, BUY_FLAG, FULLZ_CONFIRM, DEAD_FULLZ_CONFIRM, PP_CONFIRM, FULLZ_FLAG, DEAD_FULLZ_FLAG, PP_FLAG, fullz_list, dead_fullz_list, pp_amount, DEPOSIT_ADDRESS, DEPOSIT_AMOUNT, BIN_TYPE, PP_NUM, FLAG_USER_BAN
source = event.data["source"]["aimId"]
if source not in START_FLAG:
START_FLAG[source] = True
BUY_FLAG[source] = False
FULLZ_CONFIRM[source] = False
DEAD_FULLZ_CONFIRM[source] = False
PP_CONFIRM[source] = False
FULLZ_FLAG[source] = False
DEAD_FULLZ_FLAG[source] = False
PP_FLAG[source] = False
fullz_list[source] = []
dead_fullz_list[source] = []
pp_amount[source] = ""
bins_list = []
DEPOSIT_ADDRESS[source] = ""
DEPOSIT_AMOUNT[source] = ""
BIN_TYPE[source] = ""
PP_NUM[source] = ""
# process user
process_user(source)
command = event.data["message"].strip()
if FLAG_USER_BAN == True:
log_output('182', command, source, 'fail', 'ban', [[43, '']])
message = LOG_PATTERNS[43]['content'] #43
bot.send_im(target=source, message=message)
return
if (source in FULLZ_CONFIRM and FULLZ_CONFIRM[source]) or (source in DEAD_FULLZ_CONFIRM and DEAD_FULLZ_CONFIRM[source]) or (source in PP_CONFIRM and PP_CONFIRM[source]):
log_output('165', command, source, 'fail', 'not CONFIRM', [[1, '']])
message = LOG_PATTERNS[1]['content'] #1
elif FULLZ_FLAG[source] or DEAD_FULLZ_FLAG[source] or PP_FLAG[source] or BUY_FLAG[source]:
BUY_FLAG[source], FULLZ_FLAG[source], DEAD_FULLZ_FLAG[source], PP_FLAG[source] = False, False, False, False
log_output('166', command, source, 'fail', 'BUY_FLAG', [[3, '']])
message = LOG_PATTERNS[3]['content'] #3
else:
log_output('167', command, source, 'success', 'chat', [[4, '']])
message = LOG_PATTERNS[4]['content'] #4
bot.send_im(target=source, message=message)
def bins_cb(bot, event):
global bins_list, BUY_FLAG, FULLZ_CONFIRM, DEAD_FULLZ_CONFIRM, PP_CONFIRM, FULLZ_FLAG, DEAD_FULLZ_FLAG, PP_FLAG, fullz_list, dead_fullz_list, pp_amount, DEPOSIT_ADDRESS, DEPOSIT_AMOUNT, BIN_TYPE, PP_NUM, FLAG_USER_BAN
result_list = []
result_text = ""
source = event.data["source"]["aimId"]
# process user
process_user(source)
if FLAG_USER_BAN == True:
log_output('180', 'bins', source, 'fail', 'ban', [[43, '']])
message = LOG_PATTERNS[43]['content'] #43
bot.send_im(target=source, message=message)
return
if (source in FULLZ_CONFIRM and FULLZ_CONFIRM[source]) or (source in DEAD_FULLZ_CONFIRM and DEAD_FULLZ_CONFIRM[source]) or (source in PP_CONFIRM and PP_CONFIRM[source]):
log_output('162', 'bins', source, 'fail', 'not CONFIRM', [[1, '']])
message = LOG_PATTERNS[1]['content'] #1
bot.send_im(target=source, message=message)
return
START_FLAG[source] = True
BUY_FLAG[source] = False
#FULLZ_CONFIRM[source] = False
PP_CONFIRM[source] = False
#FULLZ_FLAG[source] = False
PP_FLAG[source] = False
fullz_list[source] = []
dead_fullz_list[source] = []
pp_amount[source] = ""
bins_list = []
DEPOSIT_ADDRESS[source] = ""
DEPOSIT_AMOUNT[source] = ""
BIN_TYPE[source] = ""
PP_NUM[source] = ""
#if FULLZ_CONFIRM[source] or PP_CONFIRM[source]:
# message = "(INVALID COMMAND) Please only use commands /confirm and /cancel please note that if you have already paid and you cancel you will lose your product.\nPlease type /cancel if you want to cancel your current purchase."#1
# bot.send_im(target=source, message=message)
# return
try:
query_conn.execute("select card_bin, type from fullz where status='on'")
bin_list = query_conn.fetchall()
for index, bin in enumerate(bin_list):
bin1 = decrypt(bin[0]).replace("| Card BIN : ", "")
result_text += bin1.replace("\n", "") +"(" + decrypt(bin[1]) + ")\n"
result_list.append(bin1.replace("\n", ""))
if result_text == "":
log_output('164', 'bins', source, 'fail', 'none fullz', [[41, '']])
result_text = LOG_PATTERNS[41]['content'] #41
else:
log_output('163', 'bins', source, 'success', ('bin=%s' % bin1.replace("\n", "")), [[0, result_text]])
bot.send_im(target=source, message=result_text)
except:
print('bin cb error occurs.')
def buy_cb(bot, event):
global DEPOSIT_AMOUNT, DEPOSIT_ADDRESS, BIN_TYPE, BUY_FLAG, FULLZ_CONFIRM, DEAD_FULLZ_CONFIRM, PP_CONFIRM, FULLZ_FLAG, DEAD_FULLZ_FLAG, PP_FLAG, PP_NUM, fullz_list, dead_fullz_list, pp_amount, bins_list, FLAG_USER_BAN
source = event.data["source"]["aimId"]
time_now = int((datetime.datetime.now() - datetime.datetime(1970, 1, 1)).total_seconds())
if (source in FULLZ_CONFIRM and FULLZ_CONFIRM[source]) or (source in DEAD_FULLZ_CONFIRM and DEAD_FULLZ_CONFIRM[source]) or (source in PP_CONFIRM and PP_CONFIRM[source]):
log_output('160', 'buy', source, 'fail', 'not CONFIRM', [[1, '']])
message = LOG_PATTERNS[1]['content'] #1
bot.send_im(target=source, message=message)
return
START_FLAG[source] = True
BUY_FLAG[source] = True
FULLZ_CONFIRM[source] = False
DEAD_FULLZ_CONFIRM[source] = False
PP_CONFIRM[source] = False
FULLZ_FLAG[source] = False
DEAD_FULLZ_FLAG[source] = False
PP_FLAG[source] = False
fullz_list[source] = []
dead_fullz_list[source] = []
pp_amount[source] = ""
bins_list = []
DEPOSIT_ADDRESS[source] = ""
DEPOSIT_AMOUNT[source] = ""
BIN_TYPE[source] = ""
PP_NUM[source] = ""
try:
# process user
process_user(source)
if FLAG_USER_BAN == True:
log_output('179', 'buy', source, 'fail', 'ban', [[43, '']])
message = LOG_PATTERNS[43]['content'] #43
bot.send_im(target=source, message=message)
return
query = "update fullz set status='on', lock_time=0, lock_customer='' where status='lock' and lock_customer=%s" % source
query_conn.execute(query)
conn.commit()
update_count1 = query_conn.rowcount
query = "update dead_fullz set status='on', lock_time=0, lock_customer='' where status='lock' and lock_customer=%s" % source
query_conn.execute(query)
conn.commit()
update_count2 = query_conn.rowcount
query = "update pp set status='on', lock_time=0, lock_customer='' where status='lock' and lock_customer=%s" % source
query_conn.execute(query)
conn.commit()
update_count3 = query_conn.rowcount
log_output('161', 'buy', source, 'success', ('%d update fullz %d update dead_fullz %d update pp' % (update_count1, update_count2, update_count3)), [[42, str(update_count1)+'###'+str(update_count2)+'###'+str(update_count3)]])
message = LOG_PATTERNS[42]['content'] #42
bot.send_im(target=source, message=message)
except:
print('buy cb error occurs.')
def pp_cb(bot, event):
global BUY_FLAG, PP_FLAG, PP_CONFIRM, PP_NUM, FULLZ_FLAG, DEAD_FULLZ_FLAG, FULLZ_CONFIRM, DEAD_FULLZ_CONFIRM, FLAG_USER_BAN
source = event.data["source"]["aimId"]
PP_NUM[source] = "0"
try:
# process user
process_user(source)
if FLAG_USER_BAN == True:
log_output('178', 'pp', source, 'fail', 'ban', [[43, '']])
message = LOG_PATTERNS[43]['content'] #43
bot.send_im(target=source, message=message)
return
if (source in FULLZ_CONFIRM and FULLZ_CONFIRM[source]) or (source in DEAD_FULLZ_CONFIRM and DEAD_FULLZ_CONFIRM[source]) or (source in PP_CONFIRM and PP_CONFIRM[source]):
log_output('153', 'pp', source, 'fail', 'not CONFIRM', [[1, '']])
message = LOG_PATTERNS[1]['content'] #1
bot.send_im(target=source, message=message)
return
elif FULLZ_FLAG[source]:
log_output('154', 'pp', source, 'fail', 'FULLZ_FLAG', [[5, '']])
message = LOG_PATTERNS[5]['content'] #5
bot.send_im(target=source, message=message)
elif DEAD_FULLZ_FLAG[source]:
log_output('155', 'pp', source, 'fail', 'DEAD_FULLZ_FLAG', [[6, '']])
message = LOG_PATTERNS[6]['content'] #6
bot.send_im(target=source, message=message)
else:
if source in BUY_FLAG:
if BUY_FLAG[source]:
PP_FLAG[source] = True
query_conn.execute("select email, password from pp where status='on'")
pp_list = query_conn.fetchall()
if pp_list == []:
log_output('156', 'pp', source, 'fail', 'none pp', [[7, '']])
message = LOG_PATTERNS[7]['content'] #7
else:
PP_NUM[source] = str(len(pp_list))
log_output('157', 'pp', source, 'success', 'pp_list', [[8, str(PP_NUM[source])]])
message = LOG_PATTERNS[8]['content'] % PP_NUM[source] #8
BUY_FLAG[source] = False
bot.send_im(target=source, message=message)
else:
log_output('158', 'pp', source, 'fail', 'not BUY_FLAG', [[9, '']])
message = LOG_PATTERNS[9]['content'] #9
bot.send_im(target=source, message=message)
else:
log_output('159', 'pp', source, 'fail', 'not BUY_FLAG', [[9, '']])
message = LOG_PATTERNS[9]['content'] #9
bot.send_im(target=source, message=message)
except Exception as e:
print('pp cb error occurs.')
print(e)
def fullz_cb(bot, event):
global BUY_FLAG, PP_FLAG, PP_CONFIRM, PP_NUM, FULLZ_FLAG, DEAD_FULLZ_FLAG, FULLZ_CONFIRM, DEAD_FULLZ_CONFIRM, bins_list, FLAG_USER_BAN, FULLZ_NUM
source = event.data["source"]["aimId"]
try:
# process user
process_user(source)
if FLAG_USER_BAN == True:
log_output('177', 'fullz', source, 'fail', 'ban', [[43, '']])
message = LOG_PATTERNS[43]['content'] #43
bot.send_im(target=source, message=message)
return
if (source in FULLZ_CONFIRM and FULLZ_CONFIRM[source]) or (source in DEAD_FULLZ_CONFIRM and DEAD_FULLZ_CONFIRM[source]) or (source in PP_CONFIRM and PP_CONFIRM[source]):
log_output('146', 'fullz', source, 'fail', 'not CONFIRM', [[1, '']])
message = LOG_PATTERNS[1]['content'] #1
bot.send_im(target=source, message=message)
return
if PP_FLAG[source]:
log_output('147', 'fullz', source, 'fail', 'PP_FLAG', [[10, '']])
message = LOG_PATTERNS[10]['content'] #10
bot.send_im(target=source, message=message)
elif DEAD_FULLZ_FLAG[source]:
log_output('148', 'fullz', source, 'fail', 'DEAD_FULLZ_FLAG', [[11, '']])
message = LOG_PATTERNS[11]['content'] #11
bot.send_im(target=source, message=message)
else:
if source in BUY_FLAG:
if BUY_FLAG[source]:
# customer cannot buy any more after buy a fullz.
if source in FULLZ_NUM and FULLZ_NUM[source]:
log_output('186', 'fullz', source, 'fail', 'FULLZ_NUM > 0', [[54, '']])
message = LOG_PATTERNS[54]['content'] #54
bot.send_im(target=source, message=message)
else:
FULLZ_FLAG[source] = True
bins_list = []
result_list = []
result_text = LOG_PATTERNS[12]['content'] #12
query_conn.execute("select card_bin, type from fullz where status='on'")
bin_list = query_conn.fetchall()
if bin_list == []:
log_output('149', 'fullz', source, 'fail', 'none fullz', [[13, '']])
message = LOG_PATTERNS[13]['content'] #13
bot.send_im(target=source, message=message)
else:
for index, bin in enumerate(bin_list):
bin_name = decrypt(bin[0]).replace("| Card BIN : ", "")
bin_type = decrypt(bin[1])
result_list.append([bin_name.replace("\n", ""), bin_type])
for bin in result_list:
temp1 = bin[0] + "X" + str(result_list.count(bin))
# temp2 = [bin, result_list.count(bin)]
temp2 = [bin[0], bin[1], result_list.count(bin)]
if temp2 not in bins_list:
bins_list.append(temp2)
result_text += temp1 + "\n"
BUY_FLAG[source] = False
log_output('150', 'fullz', source, 'success', result_text, [[0, result_text]])
bot.send_im(target=source, message=result_text)
else:
log_output('151', 'fullz', source, 'fail', 'not BUY_FLAG', [[14, '']])
message = LOG_PATTERNS[14]['content'] #14
bot.send_im(target=source, message=message)
else:
log_output('152', 'fullz', source, 'fail', 'not BUY_FLAG', [[14, '']])
message = LOG_PATTERNS[14]['content'] #14
bot.send_im(target=source, message=message)
except Exception as e:
print('fullz cb error occurs.')
print(e)
def dead_fullz_cb(bot, event):
global BUY_FLAG, PP_FLAG, PP_CONFIRM, PP_NUM, FULLZ_FLAG, DEAD_FULLZ_FLAG, FULLZ_CONFIRM, DEAD_FULLZ_CONFIRM, bins_list, FLAG_USER_BAN, DEAD_FULLZ_NUM
source = event.data["source"]["aimId"]
try:
# process user
process_user(source)
if FLAG_USER_BAN == True:
log_output('176', 'dead_fullz', source, 'fail', 'ban', [[43, '']])
message = LOG_PATTERNS[43]['content'] #43
bot.send_im(target=source, message=message)
return
if (source in FULLZ_CONFIRM and FULLZ_CONFIRM[source]) or (source in DEAD_FULLZ_CONFIRM and DEAD_FULLZ_CONFIRM[source]) or (source in PP_CONFIRM and PP_CONFIRM[source]):
log_output('141', 'dead_fullz', source, 'fail', 'not CONFIRM', [[1, '']])
message = LOG_PATTERNS[1]['content'] #1
bot.send_im(target=source, message=message)
return
if PP_FLAG[source]:
log_output('142', 'dead_fullz', source, 'fail', 'PP_FLAG', [[15, '']])
message = LOG_PATTERNS[15]['content'] #15
bot.send_im(target=source, message=message)
elif FULLZ_FLAG[source]:
log_output('143', 'dead_fullz', source, 'fail', 'FULLZ_FLAG', [[16, '']])
message = LOG_PATTERNS[16]['content'] #16
bot.send_im(target=source, message=message)
else:
if source in BUY_FLAG:
if BUY_FLAG[source]:
# customer cannot buy any more after buy a dead_fullz.
if source in DEAD_FULLZ_NUM and DEAD_FULLZ_NUM[source]:
log_output('187', 'dead_fullz', source, 'fail', 'DEAD_FULLZ_NUM > 0', [[55, '']])
message = LOG_PATTERNS[55]['content'] #55
bot.send_im(target=source, message=message)
else:
DEAD_FULLZ_FLAG[source] = True
bins_list = []
result_list = []
result_text = LOG_PATTERNS[17]['content'] #17
query_conn.execute("select card_bin, type from dead_fullz where status='on'")
bin_list = query_conn.fetchall()
if bin_list == []:
log_output('144', 'dead_fullz', source, 'fail', 'none dead_fullz', [[18, '']])
message = LOG_PATTERNS[18]['content'] #18
bot.send_im(target=source, message=message)
else:
for index, bin in enumerate(bin_list):
bin_name = decrypt(bin[0]).replace("| Card BIN : ", "")
bin_type = decrypt(bin[1])
result_list.append([bin_name.replace("\n", ""), bin_type])
for bin in result_list:
temp1 = bin[0] + "X" + str(result_list.count(bin))
# temp2 = [bin, result_list.count(bin)]
temp2 = [bin[0], bin[1], result_list.count(bin)]
if temp2 not in bins_list:
bins_list.append(temp2)
result_text += temp1 + "\n"
BUY_FLAG[source] = False
log_output('145', 'dead_fullz', source, 'success', result_text, [[0, result_text]])
bot.send_im(target=source, message=result_text)
else:
log_output('140', 'dead_fullz', source, 'fail', 'not BUY_FLAG', [[19, '']])
message = LOG_PATTERNS[19]['content'] #19
bot.send_im(target=source, message=message)
else:
log_output('139', 'dead_fullz', source, 'fail', 'not BUY_FLAG', [[19, '']])
message = LOG_PATTERNS[19]['content'] #19
bot.send_im(target=source, message=message)
except Exception as e:
print('dead fullz cb error occurs.')
print(e)
def unknown_cb(bot, event):
global bins_list, fullz_list, dead_fullz_list, pp_amount, FULLZ_NAME, FULLZ_NUM, FULLZ_TYPE, DEAD_FULLZ_NAME, DEAD_FULLZ_NUM, DEAD_FULLZ_TYPE, PP_NUM, PP_FLAG, FULLZ_FLAG, DEAD_FULLZ_FLAG, FULLZ_CONFIRM, DEAD_FULLZ_CONFIRM, PP_CONFIRM, GBP_PAY_AMOUNT, PP_PRICE, CREDIT_PRICE, DEBIT_PRICE, DEAD_FULLZ_PRICE, FLAG_USER_BAN
source = event.data["source"]["aimId"]
fullz_list[source] = []
dead_fullz_list[source] = []
FULLZ_NAME[source] = ""
FULLZ_NUM[source] = ""
FULLZ_TYPE[source] = ""
DEAD_FULLZ_NAME[source] = ""
DEAD_FULLZ_NUM[source] = ""
DEAD_FULLZ_TYPE[source] = ""
message = ""
note_text = LOG_PATTERNS[20]['content'] #20
confirm_text = LOG_PATTERNS[21]['content'] #21
command = event.data["message"].strip()
command_string = command[1:]
try:
# process user
process_user(source)
if FLAG_USER_BAN == True:
log_output('175', command_string, source, 'fail', 'ban', [[43, '']])
message = LOG_PATTERNS[43]['content'] #43
bot.send_im(target=source, message=message)
return
if (source in FULLZ_CONFIRM and FULLZ_CONFIRM[source]) or (source in DEAD_FULLZ_CONFIRM and DEAD_FULLZ_CONFIRM[source]) or (source in PP_CONFIRM and PP_CONFIRM[source]):
log_output('101', command_string, source, 'fail', 'not CONFIRM', [[1, '']])
message = LOG_PATTERNS[1]['content'] #1
bot.send_im(target=source, message=message)
return
if source not in START_FLAG:
START_FLAG[source] = True
BUY_FLAG[source] = False
FULLZ_CONFIRM[source] = False
DEAD_FULLZ_CONFIRM[source] = False
PP_CONFIRM[source] = False
FULLZ_FLAG[source] = False
DEAD_FULLZ_FLAG[source] = False
PP_FLAG[source] = False
fullz_list[source] = []
dead_fullz_list[source] = []
pp_amount[source] = ""
bins_list = []
DEPOSIT_ADDRESS[source] = ""
DEPOSIT_AMOUNT[source] = ""
GBP_PAY_AMOUNT[source] = ""
BIN_TYPE[source] = ""
PP_NUM[source] = ""
message = LOG_PATTERNS[22]['content'] #22
else:
if PP_FLAG[source]:
num = command[1:]
try:
if num == "0" or num == "":
log_output('102', command_string, source, 'fail', 'pp num=0', [[23, '']])
message = LOG_PATTERNS[23]['content'] #23
elif int(num) > int(PP_NUM[source]):
log_output('103', command_string, source, 'fail', 'pp num>PP_NUM', [[23, '']])
message = LOG_PATTERNS[23]['content'] #23
elif int(num) <= int(PP_NUM[source]):
query_conn.execute("select id from pp where status='on'")
a_list = query_conn.fetchall()
for i in range(int(num)):
p_id = a_list[i][0]
lock_time = int((datetime.datetime.now() - datetime.datetime(1970, 1, 1)).total_seconds())
query = "update pp set status='lock', lock_time=%s, lock_customer=%s where id=%s" % (
lock_time, source, p_id)
query_conn.execute(query)
conn.commit()
update_count = query_conn.rowcount
log_output('104', command_string, source, 'success', ('%d update pp status=lock where id=%s' % (update_count, p_id)), [], False)
deposit_address, pay_amount, fee, total_amount, gbp_pay_amount = calc_payment(source, num)
# message = "Deposit Address: %s\nDeposit Amount: %s" % (deposit_address, str(pay_amount))
# message = LOG_PATTERNS[24]['content'] % (deposit_address, str(pay_amount), str(fee), str(total_amount), confirm_text, note_text) #24
message = LOG_PATTERNS[24]['content'] % (deposit_address, str(total_amount), confirm_text, note_text) #24
log_output('105', command_string, source, 'success', message, [[0, message]])
pp_amount[source] = num
PP_FLAG[source] = False
BIN_TYPE[source] = "PP"
DEPOSIT_ADDRESS[source] = deposit_address
DEPOSIT_AMOUNT[source] = total_amount
GBP_PAY_AMOUNT[source] = gbp_pay_amount
PP_CONFIRM[source] = True
insert_order(source)
else:
log_output('106', command_string, source, 'fail', 'not pp', [[25, '']])
message = LOG_PATTERNS[25]['content'] #25
except:
log_output('107', command_string, source, 'fail', 'not pp', [[25, '']])
message = LOG_PATTERNS[25]['content'] #25
elif FULLZ_FLAG[source]:
bin = ""
num = ""
if "\n" in command or command.count("/") > 1 or command.count("x") > 0 or command.count("X") > 0:
log_output('122', command_string, source, 'fail', 'not fullz', [[30, '']])
message = LOG_PATTERNS[30]['content'] #30
else:
# customer can buy only one fullz.
bin = command[1:]
num = "1"
for item in bins_list:
if item[0] == bin:
if num == "0":
log_output('123', command_string, source, 'fail', 'fullz num=0', [[26, '']])
message = LOG_PATTERNS[26]['content'] #26
elif int(num) > int(item[2]):
log_output('124', command_string, source, 'fail', 'fullz num>BIN_amount', [[27, '']])
message = LOG_PATTERNS[27]['content'] #27
else:
FULLZ_NAME[source] = bin
FULLZ_NUM[source] = num
FULLZ_TYPE[source] = item[1]
if FULLZ_NAME[source] == "" or FULLZ_NUM[source] == "" or FULLZ_TYPE[source] == "":
if message == "":
log_output('125', command_string, source, 'fail', 'not fullz', [[22, '']])
message = LOG_PATTERNS[22]['content'] #22
else:
lock_num = 0
query_conn.execute("select id, card_bin from fullz where status='on'")
f_list = query_conn.fetchall()
for f in f_list:
bin_name = decrypt(f[1]).replace("| Card BIN : ", "").replace("\n", "")
bin_id = f[0]
if bin_name == FULLZ_NAME[source]:
lock_time = int(
(datetime.datetime.now() - datetime.datetime(1970, 1, 1)).total_seconds())
query = "update fullz set status='lock', lock_time=%s, lock_customer=%s where id=%s" % (
lock_time, source, bin_id)
query_conn.execute(query)
conn.commit()
update_count = query_conn.rowcount
log_output('126', command_string, source, 'success', ('%d update fullz status=lock where id=%s' % (update_count, bin_id)), [], False)
lock_num += 1
if lock_num == int(FULLZ_NUM[source]):
break
deposit_address, pay_amount, fee, total_amount, gbp_pay_amount = calc_payment(source)
# message = LOG_PATTERNS[24]['content'] % (deposit_address, str(pay_amount), str(fee), str(total_amount), confirm_text, note_text) #24
message = LOG_PATTERNS[24]['content'] % (deposit_address, str(total_amount), confirm_text, note_text) #24
log_output('127', command_string, source, 'success', message, [[0, message]])
fullz_list[source].append([FULLZ_NAME[source], FULLZ_NUM[source]])
FULLZ_FLAG[source] = False
FULLZ_CONFIRM[source] = True
BIN_TYPE[source] = "Fullz"
DEPOSIT_ADDRESS[source] = deposit_address
DEPOSIT_AMOUNT[source] = total_amount
GBP_PAY_AMOUNT[source] = gbp_pay_amount
insert_order(source)
elif DEAD_FULLZ_FLAG[source]:
bin = ""
num = ""
if "\n" in command or command.count("/") > 1 or command.count("x") > 0 or command.count("X") > 0:
log_output('133', command_string, source, 'fail', 'not dead_fullz', [[30, '']])
message = LOG_PATTERNS[30]['content'] #30
else:
# customer can buy only one dead_fullz.
bin = command[1:]
num = "1"
for item in bins_list:
if item[0] == bin:
if num == "0":
log_output('134', command_string, source, 'fail', 'dead_fullz num=0', [[31, '']])
message = LOG_PATTERNS[31]['content'] #31
elif int(num) > int(item[2]):
log_output('135', command_string, source, 'fail', 'dead_fullz num>BIN_amount', [[32, '']])
message = LOG_PATTERNS[32]['content'] #32
else:
DEAD_FULLZ_NAME[source] = bin
DEAD_FULLZ_NUM[source] = num
DEAD_FULLZ_TYPE[source] = item[1]
if DEAD_FULLZ_NAME[source] == "" or DEAD_FULLZ_NUM[source] == "" or DEAD_FULLZ_TYPE[source] == "":
if message == "":
log_output('136', command_string, source, 'fail', 'not dead_fullz', [[25, '']])
message = LOG_PATTERNS[25]['content'] #25
else:
lock_num = 0
query_conn.execute("select id, card_bin from dead_fullz where status='on'")
f_list = query_conn.fetchall()
for f in f_list:
bin_name = decrypt(f[1]).replace("| Card BIN : ", "").replace("\n", "")
bin_id = f[0]
if bin_name == DEAD_FULLZ_NAME[source]:
lock_time = int(
(datetime.datetime.now() - datetime.datetime(1970, 1, 1)).total_seconds())
query = "update dead_fullz set status='lock', lock_time=%s, lock_customer=%s where id=%s" % (
lock_time, source, bin_id)
query_conn.execute(query)
conn.commit()
update_count = query_conn.rowcount
log_output('137', command_string, source, 'success', ('%d update dead_fullz status=lock where id=%s' % (update_count, bin_id)), [], False)
lock_num += 1
if lock_num == int(DEAD_FULLZ_NUM[source]):
break
deposit_address, pay_amount, fee, total_amount, gbp_pay_amount = calc_payment(source)
# message = LOG_PATTERNS[24]['content'] % (deposit_address, str(pay_amount), str(fee), str(total_amount), confirm_text, note_text) #24
message = LOG_PATTERNS[24]['content'] % (deposit_address, str(total_amount), confirm_text, note_text) #24
log_output('138', command_string, source, 'success', message, [[0, message]])
dead_fullz_list[source].append([DEAD_FULLZ_NAME[source], DEAD_FULLZ_NUM[source]])
DEAD_FULLZ_FLAG[source] = False
DEAD_FULLZ_CONFIRM[source] = True
BIN_TYPE[source] = "dead_fullz"
DEPOSIT_ADDRESS[source] = deposit_address
DEPOSIT_AMOUNT[source] = total_amount
GBP_PAY_AMOUNT[source] = gbp_pay_amount
insert_order(source)
else:
log_output('111', command_string, source, 'fail', 'not START_FLAG not FLAG', [[22, '']])
message = LOG_PATTERNS[22]['content'] #22
except:
print('unknown cb error occurs.')
bot.send_im(target=source, message=message)
def confirm_cb(bot, event):
global FULLZ_CONFIRM, DEAD_FULLZ_CONFIRM, PP_CONFIRM, fullz_list, dead_fullz_list, pp_amount, DEPOSIT_ADDRESS, DEPOSIT_AMOUNT, GBP_PAY_AMOUNT, BIN_TYPE, FLAG_USER_BAN
source = event.data["source"]["aimId"]
result_text = ""
amount = 0
time_now = int((datetime.datetime.now() - datetime.datetime(1970, 1, 1)).total_seconds())
try:
# process user
process_user(source)
if FLAG_USER_BAN == True:
log_output('185', 'confirm', source, 'fail', 'ban', [[43, '']])
message = LOG_PATTERNS[43]['content'] #43
bot.send_im(target=source, message=message)
return
if source in FULLZ_CONFIRM and FULLZ_CONFIRM[source]:
confirm_time = int((datetime.datetime.now() - datetime.datetime(1970, 1, 1)).total_seconds())
query = "update fullz set status='confirm', lock_time=%s, deposit_address='%s' where lock_customer=%s and status='lock'" % (confirm_time, DEPOSIT_ADDRESS[source], source)
query_conn.execute(query)
conn.commit()
update_count = query_conn.rowcount
query = "insert into `deposit` (`bin_type`,`deposit_address`, `deposit_amount`,`uin`, `start_time`) values ('%s', '%s', '%s', '%s', '%s')" % (BIN_TYPE[source], DEPOSIT_ADDRESS[source], DEPOSIT_AMOUNT[source], source, time_now)
query_conn.execute(query)
conn.commit()
insert_count = query_conn.rowcount
FULLZ_FLAG[source], FULLZ_CONFIRM[source] = False, False
log_output('112', 'confirm', source, 'success', ('%d update fullz %d insert deposit' % (update_count, insert_count)), [[33, '']])
result_text = LOG_PATTERNS[33]['content'] #33
bot.send_im(target=source, message=result_text)
elif source in DEAD_FULLZ_CONFIRM and DEAD_FULLZ_CONFIRM[source]:
confirm_time = int((datetime.datetime.now() - datetime.datetime(1970, 1, 1)).total_seconds())
query = "update dead_fullz set status='confirm', lock_time=%s, deposit_address='%s' where lock_customer=%s and status='lock'" % (confirm_time, DEPOSIT_ADDRESS[source], source)
query_conn.execute(query)
conn.commit()
update_count = query_conn.rowcount
query = "insert into `deposit` (`bin_type`,`deposit_address`, `deposit_amount`,`uin`, `start_time`) values ('%s', '%s', '%s', '%s', '%s')" % (BIN_TYPE[source], DEPOSIT_ADDRESS[source], DEPOSIT_AMOUNT[source], source, time_now)
query_conn.execute(query)
conn.commit()
insert_count = query_conn.rowcount
DEAD_FULLZ_FLAG[source], DEAD_FULLZ_CONFIRM[source] = False, False
log_output('113', 'confirm', source, 'success', ('%d update dead_fullz %d insert deposit' % (update_count, insert_count)), [[33, '']])
result_text = LOG_PATTERNS[33]['content'] #33
bot.send_im(target=source, message=result_text)
elif source in PP_CONFIRM and PP_CONFIRM[source]:
confirm_time = int((datetime.datetime.now() - datetime.datetime(1970, 1, 1)).total_seconds())
query = "update pp set status='confirm', lock_time=%s, deposit_address='%s' where lock_customer=%s and status='lock'" % (confirm_time, DEPOSIT_ADDRESS[source], source)
query_conn.execute(query)
conn.commit()
update_count = query_conn.rowcount
query = "insert into `deposit` (`bin_type`, `deposit_address`, `deposit_amount`,`uin`, `start_time`) values ('%s', '%s', '%s', '%s', '%s')" % (BIN_TYPE[source], DEPOSIT_ADDRESS[source], DEPOSIT_AMOUNT[source], source, time_now)
query_conn.execute(query)
conn.commit()
insert_count = query_conn.rowcount
PP_FLAG[source], PP_CONFIRM[source] = False, False
log_output('114', 'confirm', source, 'success', ('%d update pp %d insert deposit' % (update_count, insert_count)), [[33, '']])
result_text = LOG_PATTERNS[33]['content'] #33
bot.send_im(target=source, message=result_text)
else:
log_output('115', 'confirm', source, 'fail', 'not CONFIRM', [[34, '']])
message = LOG_PATTERNS[34]['content'] #34
bot.send_im(target=source, message=message)
except:
print('confirm cb error occurs.')
def cancel_cb(bot, event):
global bins_list, fullz_list, dead_fullz_list, pp_amount, FULLZ_NAME, FULLZ_NUM, FULLZ_TYPE, DEAD_FULLZ_NAME, DEAD_FULLZ_NUM, DEAD_FULLZ_TYPE, PP_NUM, PP_FLAG, FULLZ_FLAG, DEAD_FULLZ_FLAG, FULLZ_CONFIRM, DEAD_FULLZ_CONFIRM, PP_CONFIRM, PP_PRICE, CREDIT_PRICE, DEBIT_PRICE, FLAG_USER_BAN
source = event.data["source"]["aimId"]
message = ""
# print(source)
try:
if source in FULLZ_CONFIRM and FULLZ_CONFIRM[source]:
query = "update fullz set status='on', lock_time=0, lock_customer='', deposit_address='' where status='lock' and lock_customer=%s" % source
query_conn.execute(query)
conn.commit()
update_count = query_conn.rowcount
if source in FULLZ_NUM:
FULLZ_NAME[source], FULLZ_NUM[source], FULLZ_TYPE[source] = ("", "", "")
query = "update `order` set `canceled`='Y' where uin=%s and product_type='fullz' and ongoing='Y'" % source
query_conn.execute(query)
conn.commit()
log_output('116', 'cancel', source, 'success', ('%d update fullz status=on' % update_count), [[35, '']])
message = LOG_PATTERNS[35]['content'] #35
FULLZ_CONFIRM[source] = False
DEPOSIT_AMOUNT[source], DEPOSIT_ADDRESS[source], BIN_TYPE[source] = ("", "", "")
elif source in DEAD_FULLZ_CONFIRM and DEAD_FULLZ_CONFIRM[source]:
query = "update dead_fullz set status='on', lock_time=0, lock_customer='', deposit_address='' where status='lock' and lock_customer=%s" % source
query_conn.execute(query)
conn.commit()
update_count = query_conn.rowcount
if source in DEAD_FULLZ_NUM:
DEAD_FULLZ_NAME[source], DEAD_FULLZ_NUM[source], DEAD_FULLZ_TYPE[source] = ("", "", "")
query = "update `order` set `canceled`='Y' where uin=%s and product_type='dead_fullz' and ongoing='Y'" % source
query_conn.execute(query)
conn.commit()
log_output('117', 'cancel', source, 'success', ('%d update dead_fullz status=on' % update_count), [[35, '']])
message = LOG_PATTERNS[35]['content'] #35
DEAD_FULLZ_CONFIRM[source] = False
DEPOSIT_AMOUNT[source], DEPOSIT_ADDRESS[source], BIN_TYPE[source] = ("", "", "")
elif source in PP_CONFIRM and PP_CONFIRM[source]:
query = "update pp set status='on', lock_time=0, lock_customer='', deposit_address='' where status='lock' and lock_customer=%s" % source
query_conn.execute(query)
conn.commit()
update_count = query_conn.rowcount
if source in PP_NUM:
PP_NUM[source] = ""
query = "update `order` set `canceled`='Y' where uin=%s and product_type='pp' and ongoing='Y'" % source
query_conn.execute(query)
conn.commit()
log_output('118', 'cancel', source, 'success', ('%d update pp status=on' % update_count), [[35, '']])
message = LOG_PATTERNS[35]['content'] #35
PP_CONFIRM[source] = False
PP_NUM[source] = ""
else:
log_output('119', 'cancel', source, 'fail', 'bitcoin', [[36, '']])
message = LOG_PATTERNS[36]['content'] #36
except:
print('cancel cb error occurs.')
print(e)
bot.send_im(target=source, message=message)
def main():
# get log patterns
log_pattern()
# Creating a new bot instance.
bot = ICQBot(token=TOKEN, name=NAME, version=VERSION)
# Registering handlers.
bot.dispatcher.add_handler(UserAddedToBuddyListHandler(chat_cb))
bot.dispatcher.add_handler(UnknownCommandHandler(callback=unknown_cb))
bot.dispatcher.add_handler(MessageHandler(filters=MessageFilter.text, callback=chat_cb))
bot.dispatcher.add_handler(MessageHandler(filters=MessageFilter.chat, callback=chat_cb))
bot.dispatcher.add_handler(CommandHandler(command="command", callback=command_cb))
bot.dispatcher.add_handler(CommandHandler(command="bins", callback=bins_cb))
bot.dispatcher.add_handler(CommandHandler(command="support_uin", callback=support_uin_cb))
bot.dispatcher.add_handler(CommandHandler(command="buy", callback=buy_cb))
bot.dispatcher.add_handler(CommandHandler(command="PP", callback=pp_cb))
bot.dispatcher.add_handler(CommandHandler(command="Fullz", callback=fullz_cb))
bot.dispatcher.add_handler(CommandHandler(command="dead_fullz", callback=dead_fullz_cb))
bot.dispatcher.add_handler(CommandHandler(command="confirm", callback=confirm_cb))
bot.dispatcher.add_handler(CommandHandler(command="start", callback=start_cb))
bot.dispatcher.add_handler(CommandHandler(command="stop", callback=stop_cb))
bot.dispatcher.add_handler(CommandHandler(command="help", callback=help_cb))
bot.dispatcher.add_handler(CommandHandler(command="cancel", callback=cancel_cb))
bot.dispatcher.add_handler(FeedbackCommandHandler(target=OWNER))
# Starting a polling thread watching for new events from server. This is a non-blocking call.
bot.start_polling()
# Blocking the current thread while the bot is working until SIGINT, SIGTERM or SIGABRT is received.
bot.idle()
if __name__ == "__main__":
main()
|
import numpy as np #import numpy
import matplotlib.pyplot as plt #import Matplotlib
import networkx as nx #import networkx
my_obj = open("test1.py","r") #created an object to open a file
cif = 0 #count for if
cfor = 0 #count for for
k = 0 #initializing variable to store number of nodes
for line in my_obj:
subif = "if" #sub string if
subfor = "for" #sub string for
cif = line.count(subif) + cif #number of if(s)
cfor = line.count(subfor) + cfor#number of for(s)
c = cfor + cif
k = k + 3*cif
k = k + 3*cfor
mat = np.zeros((k,k)) #initializing zero matrix
cur = 0 #current node
while (c>0):
ex = cur + 1 #exit node
nxt = ex + 1 #next node
mat[cur][ex] = 1 #writing in matrix
mat[cur][nxt] = 1
mat[nxt][ex] = 1
cur = nxt +1
c = c-1
if (c>0):
mat[ex][cur] = 1
print("adjacency matrix")
print(mat) #printing Adjecency matrix
ed = [] #edges list
for i in range(k):
for j in range(k):
if(mat[i][j] ==1):
x = i,j
ed.append(x)
print("Edges are")
print(ed) #printing number of edges
G = nx.Graph() #creating graph
G.add_edges_from(ed) #adding nodes in Graph
num = 0
print("Independent paths are :")
for path in nx.all_simple_paths(G, source=0, target=k-1):
print(path) #printing paths one by one
num = num+1
print("number of independent paths are :",num)
Bounded_regions = num - 1
print("Number of bounded regions are :",Bounded_regions) #Number of bounded regions
plt.subplot(121)
nx.draw(G, with_labels=True, font_weight='bold')
plt.show() #ploting CFG
print("Program Over...")
my_obj.close() #closing file |
# Copyright 2019 Arie Bregman
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import crayons
def missing_reqs(inst, hosts, failure):
"""Message on missing requirements"""
if hosts:
loc = "on {}".format(' '.join(hosts))
else:
loc = "on this host"
if failure:
failures = "The following failure happened:\n\n{}".format(
crayons.red("\n".join(failure)))
else:
failures = ""
message = """
There seems to be a problem {0}
{1}
Perhaps try the following:
{2}
""".format(crayons.red(loc), failures, crayons.cyan("\n".join(inst)))
return message
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.