text stringlengths 38 1.54M |
|---|
"""print "Hello World!"
x = "Hello Python"
print x
y = 42
print y """
"""# define a function that says hello to the name provided
# this starts a new block
def say_hello(name):
#these lines are indented therefore part of the function
if name:
print 'Hello, ' + name + 'from inside the function'
else:
print 'No name'
# now we're unindented and have ended the previous block
print 'Outside of the function' """
# name = "Zen"
# print "My name is", name
# print "My name is " + name
# age = 20
# print "My age is", age
# print "My age is " + age
first_name = "Zen"
last_name = "Coder"
middle_name = "m"
print "My name is {} {} {}".format(first_name, last_name, middle_name)
hw = "hello %s" % 'world'
print hw
# the output would be:
# hello world
x = "Hello World"
print x.upper()
# output:
"HELLO WORLD"
U = "happy"
print U.count("p")
print U.endswith("p")
print U.endswith("y")
print U.endswith("y")
fruits = ['apple', 'banana', 'orange', 'strwaberry']
vegetables = ['lettuce, cucumber', 'carrots']
fruits_and_vegetables = fruits + vegetables
print fruits_and_vegetables
salad = 3 * vegetables
print salad
x = [99,4,2,5,-3]
print x[:]
#the output would be [99,4,2,5,-3]
print x[1:]
#the output would be [4,2,5,-3];
print x[:4]
#the output would be [99,4,2,5]
print x[2:4]
#the output would be [2,5];
my_list = [1, 'Zen', 'hi']
print len(my_list)
"""enumerate(sequence) used in a for loop context to return two-item-tuple for each item in the list indicating the index followed by the value at that index.
map(function, sequence) applies the function to every item in the sequence you pass in. Returns a list of the results.
min(sequence) returns the lowest value in a sequence.
sorted(sequence) returns a sorted sequence
list.extend(list2) adds all values from a second sequence to the end of the original sequence.
list.pop(index) remove a value at given position. if no parameter is passed, defaults to final value in the list.
list.index(value) returns the index position in a list for the given parameter."""
#conditions
age = 15
if age >= 18:
print 'Legal age'
else:
print 'You are so young!'
#loops
for count in range(0, 5):
print "looping ", count |
#!/usr/bin/env python3
import argparse
import sys
from pathlib import Path
import htcondor
import classad
def parse_args(args=None):
parser = argparse.ArgumentParser(
description="Request a token that allow remote submission of jobs to an HTCondor pool."
)
parser.add_argument(
"--submit-host",
default=None,
help="The name of the submit host to request a token from (e.g., submit3.chtc.wisc.edu). If omitted, you will be asked for this interactively.",
)
parser.add_argument(
"--username",
default=None,
help="Your username on the submit host you are requesting a token from. If omitted, you will be asked for this interactively.",
)
default_pool = "cm.chtc.wisc.edu"
parser.add_argument(
"--pool",
default=default_pool,
help=f"The address of the pool's central manager. The default pool is {default_pool}",
)
default_token_dir = Path.home() / ".condor" / "tokens.d"
parser.add_argument(
"--token-dir",
default=default_token_dir,
type=Path,
help=f"The directory to store the acquired token in. Defaults to {default_token_dir}",
)
default_bounding_set = ["READ", "WRITE"]
parser.add_argument(
"--authz",
dest="authorizations",
nargs="*",
default=default_bounding_set,
help=f"Which authorizations to request. Defaults to {' '.join(default_bounding_set)}, the minimum necessary for remote submission and management.",
)
parser.add_argument(
"--debug",
default=False,
action="store_true",
help="Enable HTCondor debug logging.",
)
args = parser.parse_args(args)
return args
def main(
submit_host=None,
username=None,
pool=None,
authorizations=None,
token_dir=None,
modify_config=True,
debug=False,
):
if debug:
htcondor.param["TOOL_DEBUG"] = "D_SECURITY D_FULLDEBUG"
htcondor.enable_debug()
# We need to override existing settings and authenticate via SSL (anonymously)
htcondor.param["SEC_CLIENT_AUTHENTICATION_METHODS"] = "SSL"
if not sys.platform.startswith("win32"):
# on Ubuntu, set to the correct path (the default is for CentOS)
if not Path("/etc/pki/tls/certs/ca-bundle.crt").exists():
htcondor.param[
"AUTH_SSL_CLIENT_CAFILE"
] = "/etc/ssl/certs/ca-certificates.crt"
token_dir.mkdir(parents=True, exist_ok=True)
token_dir.chmod(0o700)
htcondor.param["SEC_TOKEN_DIRECTORY"] = str(token_dir)
if modify_config:
config_path = Path.home() / ".condor" / "user_config"
config_path.parent.mkdir(parents=True, exist_ok=True)
config_lines = [
f"SEC_TOKEN_DIRECTORY = {str(token_dir)}",
]
if not config_path.exists() or any(
config_line not in config_path.read_text() for config_line in config_lines
):
with config_path.open(mode="a") as f:
f.writelines(config_lines)
print(f"Added config to {config_path}")
if submit_host is None:
submit_host = input(
"What is the address of your submit host (e.g., submit3.chtc.wisc.edu)? "
)
if username is None:
username = input("What is your username on your submit host? ")
identity = f"{username}@fs"
if pool is None:
pool = input("What is the address of your pool's central manager? ")
if authorizations is None:
authorizations = [
authz.upper()
for authz in input(
"What are the authorizations you need (e.g., READ WRITE)? "
).split()
]
collector = htcondor.Collector(pool)
target = collector.locate(htcondor.DaemonTypes.Schedd, submit_host)
# Pretend that the target is a collector
target["MyType"] = "Collector"
print("Requesting token...")
request = make_token_request(identity, authorizations, target)
lines = [
f"Your token request id is: {request.request_id}",
f"To approve the token request, log in to {username}@{submit_host} via SSH and run:",
f"condor_token_request_approve -name {submit_host} -type SCHEDD -reqid {request.request_id}",
]
print("\n".join(lines))
token = request.result(0)
token_name = f"remote-submit-for-{submit_host}"
token.write(token_name)
print(f"Success! Token saved to {token_dir / token_name}")
def make_token_request(identity, authorizations, target_ad):
req = htcondor.TokenRequest(identity, bounding_set=authorizations)
req.submit(target_ad)
# TODO: temporary fix for https://htcondor-wiki.cs.wisc.edu/index.cgi/tktview?tn=7641
# Yes, we could, in principle, hit the recursion limit here, but we would have to
# get exceedingly unlucky, and this is a simple, straightforward fix.
# Once we upgrade the server to whatever version of HTCondor this is fixed in,
# we can drop this code entirely.
if req.request_id.startswith("0"):
return make_token_request(identity, authorizations, target_ad)
return req
if __name__ == "__main__":
args = parse_args()
main(**vars(args))
|
__copyright__ = "Copyright 2016, http://radical.rutgers.edu"
__license__ = "MIT"
import os
import subprocess
import radical.utils as ru
from .base import ResourceManager
# ------------------------------------------------------------------------------
#
class PBSPro(ResourceManager):
# --------------------------------------------------------------------------
#
def __init__(self, cfg, session):
ResourceManager.__init__(self, cfg, session)
# --------------------------------------------------------------------------
#
def _configure(self):
# TODO: $NCPUS?!?! = 1 on archer
pbspro_nodefile = os.environ.get('PBS_NODEFILE')
if pbspro_nodefile is None:
msg = "$PBS_NODEFILE not set!"
self._log.error(msg)
raise RuntimeError(msg)
self._log.info("Found PBSPro $PBS_NODEFILE %s." % pbspro_nodefile)
# Dont need to parse the content of nodefile for PBSPRO, only the length
# is interesting, as there are only duplicate entries in it.
pbspro_nodes = [line.strip() for line in open(pbspro_nodefile)]
pbspro_nodes_length = len(pbspro_nodes)
# Number of Processors per Node
val = os.environ.get('NUM_PPN')
if not val:
val = os.environ.get('SAGA_PPN')
if not val:
raise RuntimeError("$NUM_PPN / $SAGA_PPN not set!")
pbspro_num_ppn = int(val)
# Number of Nodes allocated
val = os.environ.get('NODE_COUNT')
if val:
pbspro_node_count = int(val)
else:
pbspro_node_count = len(set(pbspro_nodes))
self._log.warn("$NODE_COUNT not set - use %d" % pbspro_node_count)
# Number of Parallel Environments
val = os.environ.get('NUM_PES')
if val:
pbspro_num_pes = int(val)
else:
pbspro_num_pes = len(pbspro_nodes)
self._log.warn("$NUM_PES not set - use %d" % pbspro_num_pes)
try:
pbspro_vnodes = self._parse_pbspro_vnodes()
except:
self._log.exception('node parsing failed')
raise
# Verify that $NUM_PES == $NODE_COUNT * $NUM_PPN == len($PBS_NODEFILE)
if not (pbspro_node_count * pbspro_num_ppn == pbspro_num_pes == pbspro_nodes_length):
self._log.warning("NUM_PES != NODE_COUNT * NUM_PPN != len($PBS_NODEFILE)")
# node names are unique, so can serve as node uids
self.node_list = [[node, node] for node in pbspro_vnodes]
self.cores_per_node = pbspro_num_ppn
self.gpus_per_node = self._cfg.get('gpus_per_node', 0) # FIXME GPU
self.lfs_per_node = {'path' : ru.expand_env(
self._cfg.get('lfs_path_per_node')),
'size' : self._cfg.get('lfs_size_per_node', 0)
}
# --------------------------------------------------------------------------
#
def _parse_pbspro_vnodes(self):
# PBS Job ID
val = os.environ.get('PBS_JOBID')
if val:
pbspro_jobid = val
else:
msg = "$PBS_JOBID not set!"
self._log.error(msg)
raise RuntimeError(msg)
# Get the output of qstat -f for this job
output = subprocess.check_output(["qstat", "-f", pbspro_jobid])
# Get the (multiline) 'exec_vnode' entry
vnodes_str = ''
for line in output.splitlines():
line = ru.as_string(line)
# Detect start of entry
if 'exec_vnode = ' in line:
vnodes_str += line.strip()
elif vnodes_str:
# Find continuing lines
if " = " not in line:
vnodes_str += line.strip()
else:
break
# Get the RHS of the entry
rhs = vnodes_str.split('=',1)[1].strip()
self._log.debug("input: %s", rhs)
nodes_list = []
# Break up the individual node partitions into vnode slices
while True:
idx = rhs.find(')+(')
node_str = rhs[1:idx]
nodes_list.append(node_str)
rhs = rhs[idx + 2:]
if idx < 0:
break
vnodes_list = []
cpus_list = []
# Split out the slices into vnode name and cpu count
for node_str in nodes_list:
slices = node_str.split('+')
for _slice in slices:
vnode, cpus = _slice.split(':')
cpus = int(cpus.split('=')[1])
self._log.debug("vnode: %s cpus: %s", vnode, cpus)
vnodes_list.append(vnode)
cpus_list.append(cpus)
self._log.debug("vnodes: %s", vnodes_list)
self._log.debug("cpus: %s", cpus_list)
cpus_list = list(set(cpus_list))
min_cpus = int(min(cpus_list))
if len(cpus_list) > 1:
self._log.debug("Detected vnodes of different sizes: %s, the minimal is: %d.", cpus_list, min_cpus)
node_list = []
for vnode in vnodes_list:
node_list.append(vnode)
# only unique node names
node_list = list(set(node_list))
self._log.debug("Node list: %s", node_list)
# Return the list of node names
return node_list
# ------------------------------------------------------------------------------
|
from otree.api import (
models, widgets, BaseConstants, BaseSubsession, BaseGroup, BasePlayer,
Currency as c, currency_range
)
import random
author = 'Your name here'
doc = """
Your app description
"""
class Constants(BaseConstants):
name_in_url = 'investment'
players_per_group = None
num_rounds = 1
endowment_principals = c(10)
# Fixed Compensation
fixed_payment = c(10)
#Variable Compensation
variable_payment = c(5) # Fixer Anteil für die Agenten
share = 25
class Subsession(BaseSubsession):
def creating_session(self):
for player in self.get_players():
player.treatment = self.session.config["treatment"]
player.compensation = self.session.config["compensation"]
player.participation_fee = self.session.config['participation_fee']
class Group(BaseGroup):
pass
class Player(BasePlayer):
# Roles:
# gerade Nummern sind Kunden
def role(self):
if self.id_in_group % 2 == 0:
return "Customer"
else:
return "Agent"
# Part II Choosing Category:
category = models.CharField(
choices=["Sehr konservativ", "Sicherheitsorientiert", "Ausgeglichen", "Wachstumsorientiert", "Offensiv"],
widget=widgets.RadioSelect(),
verbose_name="Bitte wählen Sie nun einen der fünf Begriffe:",
doc="Principals choose the category which is communicated to their agent"
)
category_received = models.CharField(
doc="Category that agents see (from their customers)."
)
# Part II Investment:
investment_single = models.CurrencyField(
min=0,
max=Constants.endowment_principals,
widget=widgets.Slider(), # Neuer Slider von Christian
verbose_name="Ihre Investitionsentscheidung für Ihren Kunden:",
doc="Agents investment for the principal in the risky asset."
)
invested_amount = models.CurrencyField(
doc="What was invested by the corresponding agent."
)
investment_outcome = models.CharField()
investment_outcome_agents = models.CharField()
payoff = models.CurrencyField()
payoff_customer = models.CurrencyField(
doc="Payoff of the agent's principal."
)
def risky_asset(self):
self.random_number=random.randint(1,3)
if self.id_in_group % 2 == 0:
if self.random_number == 1:
self.investment_outcome="Die Investition war erfolgreich."
else:
self.investment_outcome="Die Investition war nicht erfolgreich."
def payments_agents(self):
if self.id_in_group % 2 != 0: # Für Agenten
if self.session.config["compensation"] == "fixed":
self.payoff=Constants.fixed_payment
if self.session.config["compensation"] == "variable":
self.payoff=Constants.variable_payment + Constants.share/100 * self.payoff_customer
def payments_customers(self):
if self.id_in_group % 2 == 0:
if self.investment_outcome == "Die Investition war erfolgreich.":
self.payoff=self.invested_amount * 3.5 + (Constants.endowment_principals - self.invested_amount)
elif self.investment_outcome == "Die Investition war nicht erfolgreich.":
self.payoff=Constants.endowment_principals - self.invested_amount
# Results: Messages
message = models.CharField(
choices=["Ich bin sehr zufrieden mit Ihrer Entscheidung", "Ich bin zufrieden mit Ihrer Entscheidung",
"Ich bin unzufrieden mit Ihrer Entscheidung", "Ich bin sehr unzufrieden mit Ihrer Entscheidung"],
widget=widgets.RadioSelect(),
verbose_name="Wählen Sie dazu eine der vorgefertigten Mitteilungen aus:",
doc="Principals choose the message to send to the agents."
)
message_received = models.CharField(
doc="Message that agents receive from their principals."
)
#
treatment = models.CharField(
doc="Treatment (either single or group)"
)
compensation = models.CharField(
doc="Compensation scheme put in place for agents (see settings)."
)
participation_fee = models.CharField(
doc="Participation Fee for all agents."
)
# Questionnaire:
age = models.PositiveIntegerField(
max=100,
verbose_name="Wie alt sind Sie?",
doc="We ask participants for their age between 0 and 100 years"
)
gender = models.CharField(
choices=["männlich", "weiblich", "anderes"],
widget=widgets.RadioSelect(),
verbose_name="Was ist Ihr Geschlecht?",
doc="gender indication"
)
studies = models.CharField(
blank=True,
verbose_name="Was studieren Sie im Hauptfach?",
doc="field of studies indication."
)
studies2 = models.BooleanField(
widget=widgets.CheckboxInput(),
verbose_name="Kein Student",
doc="Ticking the checkbox means that the participant is a non-student.")
financial_advice = models.CharField(
choices=["Ja", "Nein"],
widget=widgets.RadioSelect(),
verbose_name="Haben Sie bereits eine Bankberatung in Anspruch genommen?",
doc="We ask participants if they ever made use of financial advice.")
income = models.CurrencyField(
verbose_name="Wie viel Geld im Monat steht Ihnen frei zur Verfügung?",
doc="We ask participants how much money they have freely available each month.")
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import time
# from bayesianchangepoint import binomial_motion
def binomial_motion(N_trials, N_blocks, tau, seed, Jeffreys=True, N_layer=3):
from scipy.stats import beta
np.random.seed(seed)
trials = np.arange(N_trials)
p = np.random.rand(N_trials, N_blocks, N_layer)
for trial in trials:
p[trial, :, 2] = np.random.rand(1, N_blocks) < 1/tau # switch
if Jeffreys: p_random = beta.rvs(a=.5, b=.5, size=N_blocks)
else: p_random = np.random.rand(1, N_blocks)
p[trial, :, 1] = (1 - p[trial, :, 2])*p[trial-1, :, 1] + p[trial, :, 2] * p_random # probability
p[trial, :, 0] = p[trial, :, 1] > np.random.rand(1, N_blocks) # Bernouilli trial
return (trials, p)
def run_video(NameVideo):
# ---------------------------------------------------
# exploration parameters
# ---------------------------------------------------
seed = 51
N_trials = 7#200
lala = [0, -.2, -.3, -.3, -.4, -.4, -.45]
N_blocks = 1#3
tau = N_trials/5.
(trials, p) = binomial_motion(N_trials, N_blocks, tau=tau, seed=seed, N_layer=3)
stim_tau = .75 /2 # in seconds # 1.5 for 'eyeMvt'
# ---------------------------------------------------
# setup values
# ---------------------------------------------------
screen_width_px = 800/1.618 #1920 #1280 for ordi enregistrement
screen_height_px = 500/1.618 #1080 #1024 for ordi enregistrement
framerate = 40 #100.for ordi enregistrement
screen_width_cm = 37 # (cm)
viewingDistance = 57. # (cm)
screen_width_deg = 2. * np.arctan((screen_width_cm/2) / viewingDistance) * 180/np.pi
px_per_deg = screen_width_px / screen_width_deg
# ---------------------------------------------------
# stimulus parameters
# ---------------------------------------------------
dot_size = 10 # (0.02*screen_height_px)
V_X_deg = 15 *2 # deg/s
V_X = px_per_deg * V_X_deg # pixel/s
RashBass = 100 # ms - pour reculer la cible à t=0 de sa vitesse * latency=RashBass
saccade_px = .618*screen_height_px
offset = 0 #.2*screen_height_px
# ---------------------------------------------------------------------------------------------------------------
# ---------------------------------------------------------------------------------------------------------------
from psychopy import visual, core, event, logging, prefs
prefs.general['audioLib'] = [u'pygame']
from psychopy import sound
# ---------------------------------------------------
win = visual.Window([screen_width_px, screen_height_px], color=(0, 0, 0), allowGUI=False, fullscr=False, screen=0, units='pix')
win.setRecordFrameIntervals(True)
win._refreshThreshold = 1/framerate + 0.004 # i've got 50Hz monitor and want to allow 4ms tolerance
# ---------------------------------------------------
print('FPS = ', win.getActualFrameRate() , 'framerate=', framerate)
# ---------------------------------------------------
target = visual.Circle(win, lineColor='white', size=dot_size, lineWidth=2)
fixation = visual.GratingStim(win, mask='circle', sf=0, color='white', size=dot_size)
if NameVideo=='Bet' :
opt_ratingScale = dict(scale=None, low=-1, high=1, precision=100, size=.7, stretch=2.5,
labels=('Left', 'unsure', 'Right'), tickMarks=[-1, 0., 1], tickHeight=-1.0,
marker='triangle', markerColor='black', lineColor='White', showValue=False, singleClick=True,
showAccept=False, pos=(0, -screen_height_px/3))
ratingScale = visual.RatingScale(win, markerStart=0, **opt_ratingScale) #size=.4
# ---------------------------------------------------
def escape_possible() :
if event.getKeys(keyList=['escape', 'a', 'q']):
win.close()
core.quit()
# ---------------------------------------------------
clock = core.Clock()
myMouse = event.Mouse(win=win)
def presentStimulus_move(dir_bool):
clock.reset()
#myMouse.setVisible(0)
dir_sign = dir_bool * 2 - 1
while clock.getTime() < stim_tau:
escape_possible()
# la cible à t=0 recule de sa vitesse * latency=RashBass (ici mis en s)
target.setPos(((dir_sign * V_X*clock.getTime())-(dir_sign * V_X*(RashBass/1000)), offset))
target.draw()
win.flip()
win.getMovieFrame()
win.flip()
win.getMovieFrame()
escape_possible()
#win.flip()
# ---------------------------------------------------
# EXPERIMENT
# ---------------------------------------------------
for block in range(N_blocks):
for trial in range(N_trials):
# ---------------------------------------------------
# FIXATION
# ---------------------------------------------------
event.clearEvents()
if NameVideo=='Bet' :
ratingScale = visual.RatingScale(win, markerStart=0, **opt_ratingScale)
tps_fixation = 0
tps_start_fix = time.time()
# ---------------------------------------------------
while (tps_fixation < 0.25) :
escape_possible()
tps_actuel = time.time()
tps_fixation = tps_actuel - tps_start_fix
escape_possible()
ratingScale.draw()
fixation.draw()
win.flip()
win.getMovieFrame()
escape_possible()
ratingScale = visual.RatingScale(win, markerStart=lala[trial], **opt_ratingScale)
tps_fixation = 0
tps_start_fix = time.time()
# ---------------------------------------------------
while (tps_fixation < 0.25) :
escape_possible()
tps_actuel = time.time()
tps_fixation = tps_actuel - tps_start_fix
#while ratingScale.noResponse :
fixation.draw()
ratingScale.draw()
escape_possible()
win.flip()
win.getMovieFrame()
#ans = ratingScale.getRating()
elif NameVideo=='eyeMvt' :
duree_fixation = np.random.uniform(0.3, 0.4) # durée du point de fixation (400-800 ms)
tps_fixation = 0
tps_start_fix = time.time()
# ---------------------------------------------------
while (tps_fixation < duree_fixation) :
escape_possible()
tps_actuel = time.time()
tps_fixation = tps_actuel - tps_start_fix
escape_possible()
fixation.draw()
win.flip()
win.getMovieFrame()
escape_possible()
# ---------------------------------------------------
# GAP
# ---------------------------------------------------
win.flip()
win.getMovieFrame()
escape_possible()
core.wait(0.3)
# ---------------------------------------------------
# Mouvement cible
# ---------------------------------------------------
escape_possible()
dir_bool = p[trial, block, 0]
presentStimulus_move(dir_bool)
escape_possible()
win.flip()
win.getMovieFrame()
win.update()
core.wait(0.5)
win.saveFrameIntervals(fileName=None, clear=True)
from moviepy.editor import ImageSequenceClip
for n, frame in enumerate(win.movieFrames):
win.movieFrames[n] = np.array(frame)
clip = ImageSequenceClip(win.movieFrames, fps=framerate)
clip.write_videofile('%s.mp4'%(NameVideo))
win.close()
core.quit()
if __name__ == '__main__':
print('Starting protocol')
for NameVideo in ['Bet', 'eyeMvt', ]:# .mp4
print('NameVideo =', NameVideo)
run_video(NameVideo)
|
import pysmurf
S = pysmurf.SmurfControl(make_logfile=False,setup=False,epics_root='test_epics2',cfg_file='/usr/local/controls/Applications/smurf/pysmurf/pysmurf/cfg_files/experiment_fp29_smurfsrv04.cfg')
|
#recursive check for a path between A and B of length <= 3
#Ernest Loveland
import copy
#Get the list of vertices you can go to from a given vertice
#returns blank if we for some arb reason ask for a vertice's list that
# doesnt exist
def findlist(G,V):
G1 = copy.deepcopy(G)
if (G1[0][0] == V):
return G1[0][1]
elif len(G1) > 0:
return findlist(G[1:],V)
else:
return []
#begin the recursive search by getting the places you can go to from the A
# vertice, returns the final result
def search(G, A, B):
L = findlist(G,A)
return gothrough(G, L, B, 2) #the 2 is what we want the maximum length to
# be less 1 (so for l <= 3 we use 2)
#go through each vertex in the list of possible paths, if C == 0 we have
# reached our max depth, so we return a false (prevents infinite loops)
# L is the current vertice list, if the list is empty we return false as there
# was no path. We check if the first item in the list is the B we are looking
# for, if it is check will return True, if it isnt, we run gothrough on the
# tail of the list.
def gothrough(G, L, B, C):
if (C == 0):
return False
elif len(L) > 0:
a = check(G, L[0], C, B)
if (a):
return True
return gothrough(G,L[1:],B,C)
else:
return False
#takes in the current vertice we are checking, the length of our path so far
# and the B we are looking for. If the B is in L we return true as we are
# <= our max length (as gothrough will never call check after the maximum
# length of path). If our B isnt in L we run gothrough with C-1 as our list
# has gotten longer by 1, still looking for original B
def check(G, V, C, B):
L = findlist(G,V)
if (B in L):
return True
else:
return gothrough(G, L, B, C-1)
#sample input 1: same graph as given in COMS1 Lab test graph {P,Q,R,S} but named
# differently, has the same structure. Returns true as there is a path
# between A and C that is length <= 3.
#G = [["A",["B"]],["B",["A","C","D"]],["C",["B","D"]],["D",["C","B"]]]
#A = "A"
#B = "C"
#sample input 2: a graph {A,B,C,D,E} where A leads to B, B to C, etc only
# you cannot go from A to E, and each vertice only goes to the next vertice
# (will work directed and undirected, this is just directed). returns false
# as the length from A to E is 4 so it never gets there.
#G = [["A",["B"]],["B",["C"]],["C",["D"]],["D",["E"]],["E",[]]]
#A = "A"
#B = "E"
#sample input 3: as above but undirected:
G = [["A",["B"]],["B",["A","C"]],["C",["B","D"]],["D",["C","E"]],["E",["D"]]]
A = "A"
B = "E"
print search(G, A, B)
|
# Generated by Django 3.1.1 on 2020-11-12 08:14
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('CustomerAndDish', '0015_auto_20201112_0807'),
]
operations = [
migrations.AlterField(
model_name='tableinfo',
name='in_time',
field=models.DateTimeField(auto_now=True),
),
]
|
#!/usr/bin/env python
'''
Some shared Python functions for Opal helper scripts.
'''
import re
import string
def fasta_reader(f):
'''Generator expression that returns a fasta sequence
Ignores quality score string of FASTQ file
'''
seq = ''
name = ''
ignore_line = False
first_line = True
while True:
line = f.readline()
if line=='':
if seq=='':
break
else:
yield (name, seq)
name = ''
seq = ''
elif line[0]=='>':
if first_line:
first_line = False
pass
else:
yield (name, seq)
name = line[1:].rstrip('\n')
seq = ''
ignore_line = False
elif line[0]=='+':
# Ignore quality score strings
ignore_line = True
else:
if ignore_line:
pass
else:
seq = seq + line.rstrip('\n')
trans = string.maketrans('ATGCatgc', 'TACGTACG')
def reverse_complement(dna):
return dna[::-1].translate(trans)
def get_all_substrings(input_string, k):
return [input_string[i:i+k] for i in xrange(len(input_string) - k + 1)]
pat = re.compile('^[ACGTacgt]*$')
def check_acgt(s):
return pat.match(s)
|
'''
선형 큐
1차원 배열을 이용한 큐
잘못된 포화 상태 인식으로 메모리를 낭비할 수 있다.
'''
def initialize():
return -1, -1
def enQueue(data):
global rear
if is_full():
return f"Queue is Full!!"
rear += 1
Queue[rear] = data
return Queue
def deQueue():
global front
if is_empty():
return f"Queue is Empty"
front += 1
return Queue[front]
def is_empty():
return front + 1 > rear
def is_full():
return rear+1 > N-1
N = 5
Queue = [-1] * N
front, rear = initialize()
print(enQueue(1))
print(enQueue(2))
print(enQueue(3))
print(enQueue(1))
print(enQueue(2))
print(enQueue(3))
print(deQueue())
print(deQueue())
print(deQueue())
print(deQueue())
print(deQueue())
print(deQueue())
|
#! /usr/bin/env python3
import re
import sys
import json
import subprocess
from pprint import pprint
observables = [
{ 'observable': 'bobby.com', 'otype': 'fqdn' },
{ 'observable': 'little.bobby.com', 'otype': 'fqdn' },
{ 'observable': 'http://www.bobby.com', 'otype': 'url' },
{ 'observable': '1.1.1.253', 'otype': 'ipv4' },
{ 'observable': 'bobby@bobby.com', 'otype': 'email' },
{ 'observable': '2001:4860:4860::8844', 'otype': 'ipv6' },
]
meta_data = {
'tlp': 'amber',
'confidence': '95',
'provider': 'csirtgadgets.org',
'group': 'everyone',
}
def get_api_key():
r = subprocess.check_output(["/opt/cif/bin/cif-tokens"]).decode("utf-8")
regex = r"(root@localhost\s+everyone\s+yes\s+yes\s+(\S+))"
try:
match = re.search(regex, r)
return match.group(2)
except AttributeError:
print("ERROR: API key not found, exiting")
sys.exit(1)
def create_dataset():
l = []
for record in observables:
d = record
for key, value in meta_data.items():
d[key] = value
l.append(d)
return l
def submit_data(dataset, tag):
for record in dataset:
record['tags'] = tag
json_encoded = json.dumps(record)
rcode = subprocess.check_call('echo \'%s\' | cif --no-verify-ssl --remote https://localhost -s --token %s' % (json_encoded, api_key), shell=True)
if rcode == 0:
print("observable: {0}, tag: {1} submitted successfully".format(record['observable'], record['tags']))
else:
print("ERROR submitting: observable: {0}, tag: {1}".format(record['observable'], record['tags']))
def check_feed():
for record in observables:
print("Generated feed for otype: {0}".format(record['otype']))
result = subprocess.check_output(["cif",
"--feed",
"--otype",
"%s" %record['otype'],
"-c",
"85",
"-f",
"json", ])
result = json.loads(result.decode("utf-8"))
for record1 in result:
if record['observable'] == record1['observable']:
print("ERROR: {0} found in feed: {1}".format(record['observable'], record1['observable']))
else:
print("{0} not found in {1} feed".format(record['observable'], record['otype']))
if __name__ == "__main__":
# get an api key with the 'write" attribute
api_key = get_api_key()
# create an list of dicts that contain indicator data used for whitelist
# testing
dataset = create_dataset()
# submit malware indicators
submit_data(dataset, 'malware')
# submit whitelist indicators
submit_data(dataset, 'whitelist')
# test for whitelisted indicators in feed
check_feed()
|
import time
from concurrent import futures
def fib(n):
if n == 0 or n == 1:
return 1
else:
pass
# time.sleep(2)
return fib(n-1) + fib(n-2)
# 使用线程池实现异步处理
# base
with futures.ThreadPoolExecutor() as executor:
f1 = executor.submit(fib, 4) # return future
f2 = executor.submit(fib, 5)
print(f1.result())
print(f2.result()) |
#define(def) a doubling function that passes arguments by value
"""
JS:
function double(x) {
return ()
}
const double = (x) => {}
"""
def mult2(x): # x is passed by value. The value of the variable is copied.
return x * 2
# define a doubling function for a list of variables passed by reference. 'l' is a pointer to some list in memory. Ex: some_nums[(list)]
# Simple variables get passed by value (str, bool, integers). Complex get passed by referenece (dicts, tuples, lists, sets, class objects) Complex is if variable passes more that one value.
def mult2_list(l):
for i in range(len(l)):
l[i] *= 2
return l
y = mult2(12)
print(y)
some_nums = [1, 2, 3, 4]
print(some_nums)
mult2_list(some_nums)
print(some_nums) |
import os
MAX_FILES_IN_FOLDER = 100
CWD = os.path.dirname(os.path.realpath(__file__))
RESOURCES_FOLDER_PATH = os.path.join(CWD, "resources")
ASSETS_FOLDER_PATH = os.path.join(CWD, "assets")
DEFAULT_IMAGE_PLACEHOLDER = os.path.join(ASSETS_FOLDER_PATH, "default_placeholder.png")
USE_XVFB = True
WEBKIT2PNG_PATH = "/workspace/python-webkit2png/webkit2png"
SMART_LOAD_LOAD_SECS = 2
|
import contextlib
import os
import tempfile
import numpy as np
import pytest
import tensorrt as trt
from polygraphy import mod, util
from polygraphy.backend.trt import Calibrator, CreateConfig, Profile, network_from_onnx_bytes, postprocess_config
from polygraphy.common.struct import MetadataTuple, BoundedShape
from polygraphy.comparator import DataLoader
from tests.helper import has_dla
from tests.models.meta import ONNX_MODELS
@pytest.fixture(scope="session")
def identity_builder_network():
builder, network, parser = network_from_onnx_bytes(ONNX_MODELS["identity"].loader)
with builder, network, parser:
yield builder, network
class TestCreateConfig:
def test_defaults(self, identity_builder_network):
builder, network = identity_builder_network
loader = CreateConfig()
assert loader.timing_cache_path is None
with loader(builder, network) as config:
with contextlib.suppress(AttributeError):
assert not config.get_flag(trt.BuilderFlag.TF32)
with contextlib.suppress(AttributeError):
assert not config.get_flag(trt.BuilderFlag.SPARSE_WEIGHTS)
assert not config.get_flag(trt.BuilderFlag.FP16)
assert not config.get_flag(trt.BuilderFlag.INT8)
if mod.version(trt.__version__) >= mod.version("8.6"):
assert not config.get_flag(trt.BuilderFlag.FP8)
assert not config.get_flag(trt.BuilderFlag.VERSION_COMPATIBLE)
assert not config.get_flag(trt.BuilderFlag.EXCLUDE_LEAN_RUNTIME)
assert config.num_optimization_profiles == 1
assert config.int8_calibrator is None
with contextlib.suppress(AttributeError):
if mod.version(trt.__version__) >= mod.version("8.5"):
assert config.get_tactic_sources() == 31
elif mod.version(trt.__version__) >= mod.version("8.4"):
assert config.get_tactic_sources() == 15
elif mod.version(trt.__version__) >= mod.version("8.0"):
assert config.get_tactic_sources() == 7
else:
assert config.get_tactic_sources() == 3
with contextlib.suppress(AttributeError):
assert not config.get_flag(trt.BuilderFlag.OBEY_PRECISION_CONSTRAINTS)
with contextlib.suppress(AttributeError):
assert config.engine_capability == trt.EngineCapability.STANDARD
with contextlib.suppress(AttributeError):
assert not config.get_flag(trt.BuilderFlag.DIRECT_IO)
def test_workspace_size(self, identity_builder_network):
builder, network = identity_builder_network
loader = CreateConfig(max_workspace_size=0)
with loader(builder, network) as config:
assert config.max_workspace_size == 0
if mod.version(trt.__version__) >= mod.version("8.0"):
@pytest.mark.parametrize(
"engine_capability",
[trt.EngineCapability.STANDARD, trt.EngineCapability.SAFETY, trt.EngineCapability.DLA_STANDALONE],
)
def test_engine_capability(self, identity_builder_network, engine_capability):
builder, network = identity_builder_network
loader = CreateConfig(engine_capability=engine_capability)
with loader(builder, network) as config:
assert config.engine_capability == engine_capability
@pytest.mark.skipif(mod.version(trt.__version__) < mod.version("8.2"), reason="Unsupported before TRT 8.2")
@pytest.mark.parametrize("flag", ["obey", "prefer", None])
def test_precision_constraints(self, identity_builder_network, flag):
builder, network = identity_builder_network
loader = CreateConfig(precision_constraints=flag)
with loader(builder, network) as config:
obey_set = config.get_flag(trt.BuilderFlag.OBEY_PRECISION_CONSTRAINTS)
prefer_set = config.get_flag(trt.BuilderFlag.PREFER_PRECISION_CONSTRAINTS)
if flag == "obey":
assert obey_set and not prefer_set
elif flag == "prefer":
assert not obey_set and prefer_set
else:
assert not obey_set and not prefer_set
@pytest.mark.skipif(mod.version(trt.__version__) < mod.version("8.6"), reason="Unsupported before TRT 8.6")
@pytest.mark.parametrize(
"kwargs, expected_flag",
[
({"version_compatible": True}, "VERSION_COMPATIBLE"),
({"version_compatible": True, "exclude_lean_runtime": True}, "EXCLUDE_LEAN_RUNTIME"),
],
)
def test_version_compatibility_flags(self, identity_builder_network, kwargs, expected_flag):
builder, network = identity_builder_network
loader = CreateConfig(**kwargs)
with loader(builder, network) as config:
assert config.get_flag(getattr(trt.BuilderFlag, expected_flag))
@pytest.mark.skipif(mod.version(trt.__version__) < mod.version("8.2"), reason="Unsupported before TRT 8.2")
def test_direct_io(self, identity_builder_network):
builder, network = identity_builder_network
loader = CreateConfig(direct_io=True)
with loader(builder, network) as config:
assert config.get_flag(trt.BuilderFlag.DIRECT_IO)
@pytest.mark.parametrize("flag", [True, False])
def test_strict_types(self, identity_builder_network, flag):
builder, network = identity_builder_network
loader = CreateConfig(strict_types=flag)
with loader(builder, network) as config:
assert config.get_flag(trt.BuilderFlag.STRICT_TYPES) == flag
@pytest.mark.skipif(mod.version(trt.__version__) < mod.version("8.0.0.0"), reason="API was added in TRT 8.0")
@pytest.mark.parametrize("flag", [True, False])
def test_restricted(self, identity_builder_network, flag):
builder, network = identity_builder_network
loader = CreateConfig(restricted=flag)
with loader(builder, network) as config:
assert config.get_flag(trt.BuilderFlag.SAFETY_SCOPE) == flag
@pytest.mark.parametrize("flag", [True, False])
def test_refittable(self, identity_builder_network, flag):
builder, network = identity_builder_network
loader = CreateConfig(refittable=flag)
with loader(builder, network) as config:
assert config.get_flag(trt.BuilderFlag.REFIT) == flag
@pytest.mark.skipif(mod.version(trt.__version__) < mod.version("7.1.0.0"), reason="API was added in TRT 7.1")
@pytest.mark.parametrize("flag", [True, False])
def test_tf32(self, identity_builder_network, flag):
builder, network = identity_builder_network
loader = CreateConfig(tf32=flag)
with loader(builder, network) as config:
assert config.get_flag(trt.BuilderFlag.TF32) == flag
@pytest.mark.parametrize("flag", [True, False])
def test_fp16(self, identity_builder_network, flag):
builder, network = identity_builder_network
loader = CreateConfig(fp16=flag)
with loader(builder, network) as config:
assert config.get_flag(trt.BuilderFlag.FP16) == flag
@pytest.mark.skipif(mod.version(trt.__version__) < mod.version("8.6"), reason="API was added in TRT 8.6")
@pytest.mark.parametrize("flag", [True, False])
def test_fp8(self, identity_builder_network, flag):
builder, network = identity_builder_network
loader = CreateConfig(fp8=flag)
with loader(builder, network) as config:
assert config.get_flag(trt.BuilderFlag.FP8) == flag
@pytest.mark.parametrize("flag", [True, False])
def test_int8(self, identity_builder_network, flag):
builder, network = identity_builder_network
loader = CreateConfig(int8=flag)
with loader(builder, network) as config:
assert config.get_flag(trt.BuilderFlag.INT8) == flag
@pytest.mark.parametrize("flag", [True, False])
def test_allow_gpu_fallback(self, identity_builder_network, flag):
builder, network = identity_builder_network
loader = CreateConfig(allow_gpu_fallback=flag)
with loader(builder, network) as config:
assert config.get_flag(trt.BuilderFlag.GPU_FALLBACK) == flag
@pytest.mark.skipif(
mod.version(trt.__version__) < mod.version("8.0"), reason="API was not available in 7.2 and older"
)
@pytest.mark.parametrize("flag", [True, False])
def test_sparse_weights(self, identity_builder_network, flag):
builder, network = identity_builder_network
loader = CreateConfig(sparse_weights=flag)
with loader(builder, network) as config:
assert config.get_flag(trt.BuilderFlag.SPARSE_WEIGHTS) == flag
def test_use_dla(self, identity_builder_network):
builder, network = identity_builder_network
loader = CreateConfig(use_dla=True)
with loader(builder, network) as config:
assert config.default_device_type == trt.DeviceType.DLA
if has_dla():
assert config.DLA_core == 0
with contextlib.suppress(AttributeError):
if mod.version(trt.__version__) < mod.version("8.0"):
TACTIC_SOURCES_CASES = [
(None, 3), # By default, all sources are enabled.
([], 0),
([trt.TacticSource.CUBLAS], 1),
([trt.TacticSource.CUBLAS_LT], 2),
([trt.TacticSource.CUBLAS, trt.TacticSource.CUBLAS_LT], 3),
]
if mod.version(trt.__version__) >= mod.version("8.0"):
TACTIC_SOURCES_CASES = [
(None, 7), # By default, all sources are enabled.
([], 0),
([trt.TacticSource.CUBLAS], 1),
([trt.TacticSource.CUBLAS_LT], 2),
([trt.TacticSource.CUDNN], 4),
([trt.TacticSource.CUBLAS, trt.TacticSource.CUBLAS_LT], 3),
([trt.TacticSource.CUBLAS, trt.TacticSource.CUDNN], 5),
([trt.TacticSource.CUBLAS_LT, trt.TacticSource.CUDNN], 6),
([trt.TacticSource.CUDNN, trt.TacticSource.CUBLAS, trt.TacticSource.CUBLAS_LT], 7),
]
if mod.version(trt.__version__) >= mod.version("8.4"):
TACTIC_SOURCES_CASES[0] = (None, 15)
TACTIC_SOURCES_CASES.extend(
[
(
[
trt.TacticSource.CUDNN,
trt.TacticSource.CUBLAS,
trt.TacticSource.CUBLAS_LT,
trt.TacticSource.EDGE_MASK_CONVOLUTIONS,
],
15,
)
]
)
if mod.version(trt.__version__) >= mod.version("8.5"):
TACTIC_SOURCES_CASES[0] = (None, 31)
TACTIC_SOURCES_CASES.extend(
[
(
[
trt.TacticSource.CUDNN,
trt.TacticSource.CUBLAS,
trt.TacticSource.CUBLAS_LT,
trt.TacticSource.EDGE_MASK_CONVOLUTIONS,
trt.TacticSource.JIT_CONVOLUTIONS,
],
31,
)
]
)
@pytest.mark.parametrize("sources, expected", TACTIC_SOURCES_CASES)
def test_tactic_sources(self, identity_builder_network, sources, expected):
builder, network = identity_builder_network
loader = CreateConfig(tactic_sources=sources)
with loader(builder, network) as config:
assert config.get_tactic_sources() == expected
def test_calibrator_metadata_set(self, identity_builder_network):
builder, network = identity_builder_network
calibrator = Calibrator(DataLoader())
loader = CreateConfig(int8=True, calibrator=calibrator)
with loader(builder, network) as config:
assert config.int8_calibrator
assert "x" in calibrator.data_loader.input_metadata
assert calibrator.data_loader.input_metadata["x"] == MetadataTuple(
shape=BoundedShape((1, 1, 2, 2)), dtype=np.dtype(np.float32)
)
def test_multiple_profiles(self, identity_builder_network):
builder, network = identity_builder_network
profiles = [
Profile().add("x", (1, 2, 1, 1), (1, 2, 2, 2), (1, 2, 4, 4)),
Profile().add("x", (1, 2, 4, 4), (1, 2, 8, 8), (1, 2, 16, 16)),
]
loader = CreateConfig(profiles=profiles)
with loader(builder, network) as config:
assert config.num_optimization_profiles == 2
@pytest.mark.skipif(mod.version(trt.__version__) < mod.version("8.0"), reason="Unsupported for TRT 7.2 and older")
@pytest.mark.parametrize("path_mode", [True, False], ids=["path", "file-like"])
def test_timing_cache(self, identity_builder_network, path_mode):
builder, network = identity_builder_network
with util.NamedTemporaryFile() as cache:
loader = CreateConfig(load_timing_cache=cache.name if path_mode else cache)
with loader(builder, network) as config:
assert config.get_timing_cache()
@pytest.mark.skipif(mod.version(trt.__version__) < mod.version("8.0"), reason="Unsupported for TRT 7.2 and older")
def test_fall_back_to_empty_timing_cache(self, identity_builder_network):
"""Tests that passing in a nonexistent timing cache path is non-fatal"""
builder, network = identity_builder_network
with tempfile.TemporaryDirectory() as tmpdir:
cache_name = os.path.join(tmpdir, "casper")
loader = CreateConfig(load_timing_cache=cache_name)
with loader(builder, network) as config:
assert config.get_timing_cache()
@pytest.mark.skipif(mod.version(trt.__version__) < mod.version("8.0"), reason="Unsupported for TRT 7.2 and older")
def test_empty_timing_cache_when_default(self, identity_builder_network):
builder, network = identity_builder_network
loader = CreateConfig()
with loader(builder, network) as config:
cache = config.get_timing_cache()
with cache.serialize() as buffer:
cache_size = len(bytes(buffer))
cache.reset()
with cache.serialize() as buffer:
new_cache_size = len(bytes(buffer))
assert cache_size == new_cache_size
@pytest.mark.skipif(mod.version(trt.__version__) < mod.version("8.0"), reason="Unsupported for TRT 7.2 and older")
def test_profiling_verbosity(self, identity_builder_network):
builder, network = identity_builder_network
expected = trt.ProfilingVerbosity.NONE
loader = CreateConfig(profiling_verbosity=expected)
with loader(builder, network) as config:
assert config.profiling_verbosity == expected
with contextlib.suppress(AttributeError):
POOL_LIMITS = [
{trt.MemoryPoolType.WORKSPACE: 25},
{trt.MemoryPoolType.DLA_MANAGED_SRAM: 25},
{trt.MemoryPoolType.DLA_LOCAL_DRAM: 25},
{trt.MemoryPoolType.DLA_GLOBAL_DRAM: 25},
# Multiple limits
{
trt.MemoryPoolType.DLA_LOCAL_DRAM: 20,
trt.MemoryPoolType.DLA_GLOBAL_DRAM: 25,
trt.MemoryPoolType.WORKSPACE: 39,
},
]
@pytest.mark.skipif(
mod.version(trt.__version__) < mod.version("8.3"), reason="Unsupported for TRT versions prior to 8.3"
)
@pytest.mark.parametrize("pool_limits", POOL_LIMITS)
def test_memory_pool_limits(self, pool_limits, identity_builder_network):
if any("dla" in key.name.lower() for key in pool_limits) and not has_dla():
pytest.skip("DLA is not available on this system")
builder, network = identity_builder_network
loader = CreateConfig(memory_pool_limits=pool_limits)
with loader(builder, network) as config:
for pool_type, pool_size in pool_limits.items():
assert config.get_memory_pool_limit(pool_type) == pool_size
if mod.version(trt.__version__) >= mod.version("8.5"):
@pytest.mark.parametrize(
"preview_features",
[
[],
[trt.PreviewFeature.FASTER_DYNAMIC_SHAPES_0805],
[
trt.PreviewFeature.FASTER_DYNAMIC_SHAPES_0805,
trt.PreviewFeature.DISABLE_EXTERNAL_TACTIC_SOURCES_FOR_CORE_0805,
],
],
)
def test_preview_features(self, identity_builder_network, preview_features):
builder, network = identity_builder_network
loader = CreateConfig(preview_features=preview_features)
with loader(builder, network) as config:
# Check that only the enabled preview features are on.
for pf in trt.PreviewFeature.__members__.values():
assert config.get_preview_feature(pf) == (pf in preview_features)
@pytest.mark.skipif(
mod.version(trt.__version__) < mod.version("8.6"), reason="Unsupported for TRT versions prior to 8.6"
)
@pytest.mark.parametrize("level", range(6))
def test_builder_optimization_level(self, identity_builder_network, level):
builder, network = identity_builder_network
loader = CreateConfig(builder_optimization_level=level)
with loader(builder, network) as config:
assert config.builder_optimization_level == level
if mod.version(trt.__version__) >= mod.version("8.6"):
@pytest.mark.parametrize(
"level",
[
trt.HardwareCompatibilityLevel.NONE,
trt.HardwareCompatibilityLevel.AMPERE_PLUS,
],
)
def test_hardware_compatibility_level(self, identity_builder_network, level):
builder, network = identity_builder_network
loader = CreateConfig(hardware_compatibility_level=level)
with loader(builder, network) as config:
assert config.hardware_compatibility_level == level
@pytest.mark.skipif(
mod.version(trt.__version__) < mod.version("8.6"), reason="Unsupported for TRT versions prior to 8.6"
)
@pytest.mark.parametrize("num_streams", range(3))
def test_max_aux_streams(self, identity_builder_network, num_streams):
builder, network = identity_builder_network
loader = CreateConfig(max_aux_streams=num_streams)
with loader(builder, network) as config:
assert config.max_aux_streams == num_streams
class TestPostprocessConfig:
def test_with_config(self, identity_builder_network):
builder, network = identity_builder_network
config = CreateConfig()(builder, network)
assert not config.get_flag(trt.BuilderFlag.INT8)
config = postprocess_config(
config,
func=lambda builder, network, config: config.set_flag(trt.BuilderFlag.INT8),
builder=builder,
network=network,
)
assert config.get_flag(trt.BuilderFlag.INT8)
def test_with_config_callable(self, identity_builder_network):
builder, network = identity_builder_network
config = CreateConfig()
config = postprocess_config(
config,
func=lambda builder, network, config: config.set_flag(trt.BuilderFlag.INT8),
builder=builder,
network=network,
)
assert config.get_flag(trt.BuilderFlag.INT8)
|
class Solution:
def findKthPositive(self, arr: List[int], k: int) -> int:
d = {}
largest = float('-inf')
for num in arr:
if num not in d:
d[num] = 0
d[num] += 1
largest = max(largest, num)
missing = 0
for num in range(1, largest + 1):
if k == 0:
return missing
if num not in d:
missing = num
k -= 1
if k > 0:
missing = largest
while k > 0:
missing += 1
k -= 1
return missing
|
# encoding:utf-8
'''
Created on 2017年8月14日
题目:有5个人坐在一起,问第五个人多少岁?
他说比第4个人大2岁。问第4个人岁数,他说比第3个人大2岁。
问第三个人,又说比第2人大两岁。问第2个人,说比第一个人大两岁。
最后问第一个人,他说是10岁。请问第五个人多大?
@author: wangtaoyuan
'''
n = 2
i = 5
year = 10
while i > 1:
year += 2
i -= 1
print year |
import yaml
from netmiko import ConnectHandler
from ciscoconfparse import CiscoConfParse
#not uploaded to Github
file = ".netmiko.yml"
with open(file) as f:
my_data = yaml.load(f)
cisco4 = my_data['cisco4']
netcon = ConnectHandler(**cisco4)
sh_run = netcon.send_command("sh run")
my_config = netcon.send_command("sh run ")
my_config = CiscoConfParse(sh_run.splitlines())
ip_int = my_config.find_objects_w_child(parentspec=r"^interface", childspec=r"^\s+ip addres")
for int in ip_int:
print()
print(int.text)
ip_address = int.re_search_children(r"ip address")[0].text
print(ip_address)
print()
|
r,c,xr,xc=map(int,input().split())
s1=''
l=[]
for i in range(r):
s=input()
s1=''
for i in s:
s1+=i*xc
for i in range(xr):
l.append(s1)
for i in l:
print(i) |
from django.shortcuts import render, redirect, get_object_or_404
from django.contrib.auth import login, get_user_model, logout, authenticate
from .forms import AddUserForm, LoginForm, UpdateUserForm, ProfileForm
from django.contrib.auth.models import Group
from django.contrib.auth import get_user_model
from django.http import HttpResponse
from django.contrib import messages
from .models import Profile, User
from course.models import Course
from accounts.decorators import allowed_users
from django.contrib.auth.decorators import login_required
def login_view(request):
registered_user = get_user_model()
login_form = LoginForm(request.POST or None) # login form refrencer
register_form = AddUserForm(request.POST or None) # register form refrence
if request.method == "POST":
# if the post name(submit) is equals to the value(signin) than submit the register form
if request.POST.get('submit') == 'signin':
# if the data provided in the form is valid than save the form
if register_form.is_valid():
user = register_form.save(commit=False)
user = register_form.save()
raw_password = register_form.cleaned_data.get('password1')
user = authenticate(request, email=user.email, password=raw_password)
group = Group.objects.get(name='Student')
user.groups.add(group)
if user is not None:
login(request, user)
return redirect('login')
# if the post name(submit) is equals to the value(login) than submit the login form
elif request.POST.get('submit') == 'login':
if login_form.is_valid():
email = login_form.cleaned_data.get('email')
password = login_form.cleaned_data.get('password')
user = authenticate(email=email, password=password)
if user is not None:
login(request, user)
return redirect('edit-profile')
else:
return redirect('login')
contex ={
'register_form': register_form,
'login_form': login_form,
}
return render(request, 'apps/login.html', contex)
# method for registration page
# def register_view(request):
# register_form = AddUserForm(request.POST or None)
# # if the data provided in the form is valid than save the form
# if register_form.is_valid():
# user = register_form.save(commit=False)
# user = register_form.save()
# raw_password = register_form.cleaned_data.get('password1')
# user = authenticate(request, email=user.email, password=raw_password)
# group = Group.objects.get(name='Student')
# user.groups.add(group)
# if user is not None:
# login(request, user)
# return redirect('login')
# return render(request, 'apps/login.html', {'register_form': register_form})
# method for login page
# def login_view(request):
# registered_user = get_user_model()
# login_form = LoginForm(request.POST or None)
# if login_form.is_valid():
# email = login_form.cleaned_data.get('email')
# password = login_form.cleaned_data.get('password')
# user = authenticate(email=email, password=password)
# if user is not None:
# login(request, user)
# return redirect('edit-profile')
# else:
# return redirect('login')
# return render(request, 'apps/login.html', {'login_form': login_form})
#method for teacher profile where we get the logged in user and render the courses the user has uploaded
@login_required(login_url='login')
def teacher_profile_view(request):
user = request.user
teacher_course = Course.objects.filter(user=user).order_by('-created')
return render(request, "apps/teacherProfile.html", {'teacher_course': teacher_course})
#method for user profile page
@login_required(login_url='login')
def create_profile_view(request):
# if the method is post than create a object for the form
if request.method == 'POST':
user_form = UpdateUserForm(request.POST, instance=request.user)
create_form = ProfileForm(request.POST, request.FILES, instance=request.user.profile)
#if the form is valid than save the form to database and redirect the user to profile page
if create_form.is_valid() and user_form.is_valid():
create_form.save()
user_form.save()
return redirect('teacher-profile')
else:
user_form = UpdateUserForm(instance=request.user)
create_form = ProfileForm(instance=request.user.profile)
contex = {
'create_form': create_form,
'user_form': user_form
}
return render(request, 'apps/editProfile.html', contex)
|
import streamlit as st
import time
st.title('Streamlit 入門')
st.write('プレグレスバーの表示')
'Start!!'
latest_iteration = st.empty()
bar = st.progress(0)
for i in range(100):
latest_iteration.text(f'Iteration {i+1}')
bar.progress(i + 1)
time.sleep(0.1)
left_column, right_column = st.beta_columns(2)
button = left_column.button('右カラムに文字を表示')
if button:
right_column.write('ここは右カラム')
expander1 = st.beta_expander('問い合わせ1')
expander1.write('問い合わせ1の回答')
expander2 = st.beta_expander('問い合わせ2')
expander2.write('問い合わせ2の回答')
# text = st.text_input('あなたの趣味を教えてください。')
# condition = st.slider('あなたの今の調子は?', 0, 100, 50)
# 'あなたの趣味:', text, 'です'
# 'コンディション:', condition
# if st.checkbox('Show Image'):
# img = Image.open('001.jpg')
# st.image(img, caption='sample', use_column_width=True)
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Приемы векторизации в Gensim представляют особый интерес, т.к. библиотека позволяет сохранять и загружать
корпусы с диска, отделяя их от конвейера обработки.
Текущий преобразователь будет обертывать объект Dictionary из Gensim, сгенерированный методом fit, чей метод doc2bow
используется методом transform().
Объект Dictionary (например, TfidfModel) можно сохранить на диск и загрузить с диска, поэтому текущий преобразователь
тоже будет пользоваться такой возможностью. Путь сохранения будет определяться при создании экземпляра (инит).
"""
import os
from gensim.corpora import Dictionary
from gensim.matutils import sparse2full
from sklearn.base import BaseEstimator, TransformerMixin
class GensimVectorizer(BaseEstimator, TransformerMixin):
def __init__(self, path=None):
self.path = path
self.id2word = None
self.load()
def load(self):
if os.path.exists(self.path):
self.id2word = Dictionary.load(self.path)
def save(self):
self.id2word.save(self.path)
def fit(self, documents, labels=None):
"""
Конструирует объект Dictionary, передавая его конструктору лексемизированные и нормализованные документы.
Экземпляр сразу сохраняется на диск для последующей загрузки без повторного обучения.
"""
self.id2word = Dictionary(documents)
self.save()
return self
def transform(self, documents):
"""
Вызывает метод Dictionary.doc2bow, возвращающий разреженное представление документа в виде списка
кортежей (token_id, frequency). Чтобы такое представление не вызывало проблем у Scikit-Learn, используется
далее sparse2full из Gensim для преобразования полученного представления в массив NumPy.
"""
for document in documents:
docvec = self.id2word.doc2bow(document)
yield sparse2full(docvec, len(self.id2word))
|
import datetime
import dash
import dash_core_components as dcc
import dash_html_components as html
import plotly
from dash.dependencies import Input, Output
from opcua import Client
opc_client = Client("opc.tcp://localhost:4840/server/")
opc_client.connect()
opc_root = opc_client.get_root_node()
# parameters
temp_param = opc_root.get_child(["0:Objects", "2:Parameters", "2:Temperature"])
pres_param = opc_root.get_child(["0:Objects", "2:Parameters", "2:Pressure"])
time_param = opc_root.get_child(["0:Objects", "2:Parameters", "2:Time"])
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
app.layout = html.Div(
html.Div([
html.H4('OPC UA demo'),
html.Div(id='live-update-text'),
dcc.Interval(
id='interval-component',
interval=1*1000, # in milliseconds
n_intervals=0
)
])
)
@app.callback(Output('live-update-text', 'children'),
[Input('interval-component', 'n_intervals')])
def update_metrics(n):
temperature = temp_param.get_value()
pressure = pres_param.get_value()
time_value = time_param.get_value()
style = {'padding': '5px', 'fontSize': '16px'}
return [
html.Div('Temperature: {0:.2f}'.format(temperature), style=style),
html.Div('Pressure: {0:.2f}'.format(pressure), style=style),
html.Div('Time: {0:0.2f}'.format(time_value), style=style)
]
if __name__ == '__main__':
app.run_server(debug=True)
|
from django.shortcuts import redirect, HttpResponse
def indice(Request):
return HttpResponse("marcador de posición para luego mostrar una lista de todos los blogs")
def nuevo (Request):
return HttpResponse("marcador de posición para mostrar un nuevo formulario para crear un nuevo blog")
def create(request):
return redirect("/")
def show (Request, my_val):
return HttpResponse(f"marcador de posicion para mostrar el numero de blog: {my_val}")
def editar (Request):
return HttpResponse("marcador de posición para editar el blog")
def destruir (Request):
return redirect("/")
# Create your views here.
|
import os
from fuzzyparsers import parse_date
import dateutil.parser as dparser
import datetime
import json
count = 0
for p,d,files in os.walk('col_sim'):
for f in files:
if f.endswith('.json'):
to_read = os.path.join(p,f)
with open(to_read, 'r') as f:
doc = json.loads(f.read())
for k, news in doc.items():
for vv in news.get('links'):
entities = vv.get('entities')
transformed_date1 = None
for entity in entities:
try:
transformed_date1 = dparser.parse(entities,fuzzy=True)
break
except:
pass
#print(transformed_date1)
#print(vv.get('title'))
#print(vv.get('text'))
#print('******')
count += 1
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2018 yuqilin <yuqilin1228@gmail.com>
#
"""
"""
import csv
import getopt
import matplotlib.pyplot as plt
import os
import sys
# from datetime import datetime
opts, args = getopt.getopt(sys.argv[1:], "")
input_file = args[0]
output_file = os.path.splitext(input_file)[0] + ".png"
# print input_file, output_file
csv_file = input_file #os.path.join(os.path.dirname(__file__), input_file)
figure_file = output_file #os.path.join(os.path.dirname(__file__), "mem_graph.png")
# mem_info = {
# 'java_heap' : [],
# 'native_heap' : [],
# 'code' : [],
# 'stack' : [],
# 'graphics' : [],
# 'private_other' : [],
# 'system' : [],
# 'total' : []
# }
time = []
java_heap = []
native_heap = []
code = []
stack = []
graphics = []
private_other = []
system = []
total = []
with open(csv_file,'r') as csvfile:
plots = csv.reader(csvfile, delimiter=',')
next(plots, None)
for row in plots:
# data = row.split()
# time.append(datetime.strptime(row[0], '%H:%M:%S'))
time.append(row[0])
java_heap.append(float(row[1]))
native_heap.append(float(row[2]))
# code.append(row[3])
# stack.append(float(row[4]))
graphics.append(float(row[5]))
# private_other.append(float(row[6]))
# system.append(float(row[7]))
total.append(float(row[8]))
# print time
# print java_heap
# print native_heap
# print graphics
# print total
fig = plt.figure(figsize=(16,9), dpi=120)
plt.plot(time, java_heap, label='java_heap')
plt.plot(time, native_heap, label='native_heap')
# plt.plot(time, code, label='code')
# # plt.plot(time, stack, label='stack')
plt.plot(time, graphics, label='graphics')
# # plt.plot(time, private_other, label='private_other')
# # plt.plot(time, system, label='system')
plt.plot(time, total, label='total')
plt.xlabel('Time')
plt.ylabel('Memory (MB)')
plt.title('Trendline')
plt.xticks(rotation=90)
plt.legend(loc=1)
# plt.legend(bbox_to_anchor=(1, 1))
plt.tight_layout()
# plt.show()
plt.savefig(figure_file)
|
"""
Given a non-negative integer numRows, generate the first numRows of Pascal's triangle.
"""
class Solution:
def generate(self, numRows: int) -> list:
if numRows == 0:
return []
if numRows == 1:
return [[1]]
if numRows == 2:
return [[1], [1, 1]]
res = self.generate(numRows - 1)
last_list = res[-1]
new_list = [1]
for i in range(len(last_list) - 1):
new_list.append(last_list[i] + last_list[i + 1])
new_list.append(1)
res.append(new_list)
return res
|
# Python 物件導向(2)
class Ironmen:
'''這是一個叫做 Ironmen 的類別''' # Doc string
def __init__(self, group, participants):
self.group = group
self.participants = participants
# 在物件名稱後面使用 . 接屬性名稱就可以使用。
modern_web = Ironmen("Modern Web", 54)
print(modern_web)
print(modern_web.group)
print(modern_web.participants)
print(modern_web.__doc__)
print(dir(modern_web))
# -------
# 定義方法(Method)
class Ironmen:
'''這是一個叫做 Ironmen 的類別''' # Doc string
def __init__(self, group, participants):
self.group = group
self.participants = participants
def print_info(self):
print(self.group, "組有", self.participants, "位鐵人參賽!")
# 根據 Ironmen 類別建立一個物件 modern_web
modern_web = Ironmen("Modern Web", 54)
# 根據 Ironmen 類別建立一個物件 dev_ops
dev_ops = Ironmen("DevOps", 8)
# 使用 modern_web 的 print_info() 方法
modern_web.print_info()
# 使用 dev_ops 的 print_info() 方法
dev_ops.print_info()
# -----
# 繼承(Inheritance)
class Ironmen:
'''這是一個叫做 Ironmen 的類別''' # Doc string
def __init__(self, group, participants):
self.group = group
self.participants = participants
def print_info(self):
print(self.group, "組有", self.participants, "位鐵人參賽!")
# --------
# Articles 類別繼承 Ironmen 類別
class Articles(Ironmen):
'''
這是一個叫做 Articles 的類別。
Articles 繼承 Ironmen 類別,她新增了一個 print_articles() 方法
'''
def print_articles(self):
print(self.group, "組預計會有", self.participants * 30, "篇文章!")
# 根據 Articles 類別建立一個物件 modern_web
modern_web = Articles("Modern Web", 54)
# 使用 modern_web 的 print_articles() 方法
modern_web.print_articles()
# 檢查 modern_web 是否還擁有 print_info() 方法
modern_web.print_info()
# ------
# 繼承 : super()
# 可以根據原本的屬性或方法之上建立新的屬性或方法。
class OnlyGroup:
'''這是一個叫做 OnlyGroup 的類別''' # Doc string
def __init__(self, group):
self.group = group
# Ironmen 類別繼承 OnlyGroup 類別
class Ironmen(OnlyGroup):
'''這是一個叫做 Ironmen 的類別''' # Doc string
def __init__(self, group, participants):
super().__init__(group)
self.participants = participants
# 根據 Ironmen 類別建立一個物件 modern_web
modern_web = Ironmen("Modern Web", 54)
# 印出 modern_web 的兩個屬性
print(modern_web.group)
print(modern_web.participants)
# --------
# 在繼承時改寫方法(Override)
# 我們在先前繼承時成功增加一個方法 print_articles(),現在我們要試著在 Article 類別中改寫原本 Ironmen 類別中的 print_info() 方法。
class Ironmen:
'''這是一個叫做 Ironmen 的類別''' # Doc string
def __init__(self, group, participants):
self.group = group
self.participants = participants
def print_info(self):
print(self.group, "組有", self.participants, "位鐵人參賽!")
# Articles 類別繼承 Ironmen 類別
class Articles(Ironmen):
'''
這是一個叫做 Articles 的類別。
Articles 繼承 Ironmen 類別,她新增了一個 print_articles() 方法
'''
def print_articles(self):
print(self.group, "組預計會有", self.participants * 30, "篇文章!")
# 改寫 print_info() 方法
def print_info(self):
print(self.group, "組有", self.participants, "位鐵人參賽!p.s.我被改寫了!")
# 根據 Articles 類別建立一個物件 modern_web
modern_web = Articles("Modern Web", 54)
# 檢查 modern_web 的 print_info() 方法是否被改寫
modern_web.print_info()
|
solution_chain = []
solution = 0
for x in xrange(1, 1000000):
print x
chain_length = []
test_num = x
while test_num != 1:
if test_num % 2 == 0:
test_num /= 2
chain_length.append(test_num)
else:
test_num = test_num * 3 + 1
chain_length.append(test_num)
if len(chain_length) > len(solution_chain):
solution_chain = chain_length
solution = x
print """The solution is %d.
The chain is %s.
The chain length is %d.""" % (solution, solution_chain, len(solution_chain))
|
# -*- encoding: utf-8 -*-
import os
from flask_migrate import Migrate
from sys import exit
from decouple import config
from config import config_dict
from app import create_app, db
config_mode = config('APP_SETTINGS')
try:
app_config = config_dict[config_mode]
except KeyError:
exit('Error: Invalid <config_mode>. Expected values [Testing, Development, Staging, Production] ')
app = create_app(app_config)
Migrate(app, db)
if __name__ == "__main__":
app.run(port=(os.getenv('PORT') if os.getenv('PORT') else 8000), debug=True)
|
class BuyingTshirts(object):
def __init__(self):
object.__init__(self)
def meet(self, T, Q, P):
result = 0
saveOfQ = saveOfP = 0
cntOfDays = len(Q)
for i in range(cntOfDays):
saveOfP = saveOfP + P[i]
saveOfQ = saveOfQ + Q[i]
if saveOfP >= T and saveOfQ >= T:
result = result + 1
if saveOfP >= T:
saveOfP = saveOfP % T
if saveOfQ >= T:
saveOfQ = saveOfQ % T
return result
def main():
obj = BuyingTshirts()
print obj.meet(2, (1,2,1,2,1,2,1,2), (1,1,1,1,2,2,2,2))
if __name__ == '__main__':
main()
|
import six
import sys
sys.modules['sklearn.externals.six'] = six
from id3 import Id3Estimator
from id3 import export_graphviz
import pandas as pd
col_names = ['pfuncaffec','workfaffec','severity','custsaffec','priority']
# load dataset
bugdtls = pd.read_csv("data121a.csv", header=None, names=col_names)
#split dataset in features and target variable
feature_cols = ['pfuncaffec','workfaffec','severity','custsaffec']
X = bugdtls[feature_cols] # Features
y = bugdtls.priority # Target variable
#bunch = load_breast_cancer()
#print(bunch)
estimator = Id3Estimator()
estimator.fit(X, y)
export_graphviz(estimator.tree_, 'tree4.dot', feature_names = X.columns)
print("The dot file is successfully generated")
#Command to generate the tree in a .pdf: dot -Tpdf tree1.dot -o tree1.pdf
#https://pypi.org/project/decision-tree-id3/ |
import unittest
import argon2
class Argon2Test(unittest.TestCase):
def test_simple_call(self):
actual = argon2.argon2_hash("password", "some_salt", )
self.assertEqual(
actual,
(b'\xa8&x\xc7\xd9\xc4\x1f\xdf[2\xd9hq\xab\xe5\xb4WV\x89\xca\xa4\xee\xb3\x98\xf1I\xd1'
b'\xdaf\xe7=\xfaA\x04\xeb\xe1\xfd\x94)\xad\x84\r\x9ed<8xE\xc3\xd3\xfb\x13\xcbN\xcf\\}'
b'\xfd-9\x8b\x07@\xd8\x10\x1a\x83\x05\xd5\xfd\xc4m\x9f\xd7\x81\xdcX\x87\xb2\x02\xa9R'
b'\xc1\x9d\xaf6\xbb\x8c\xe1vH+\x07\xc7Y\x80\xb3\xb5\xf8\xba\xbd\x87\xd8\xf5\xea\x1a'
b'\x04V&\xf7\xde\x9b\x93\x8dbQ\x91e\xf6\xd6\xa2\xd8G8\xe3\x9a\x03\xf3'))
def test_specified_call(self):
actual = argon2.argon2_hash(password="some password", salt="some salt", t=16, m=8, p=1,
buflen=128, argon_type=argon2.Argon2Type.Argon2_i)
self.assertEqual(
actual,
(b"\x1f>\xe0\xb8\x88%\xef\xee\xb3n\\\xb85\x03\x14\xb8\xb8O\x02Zk\xbf<\xd5\xa0C\xf2,p"
b"\x00\xda\xd7Bc\xa71\x84\x10\x00\x8cx'\xec?Q\x8499\x9b\xd4)\xf1\x98F\x13!\x8bB\x12!"
b"\xc3U\x8d\x9a\xb5\x10\x8cIo\xd2p\xcd'\x8c\x96d\xa5?{\x1d*\xaf\xab\x99\x9e\xe9c\xa4"
b"\xb7\xb2\x00\xfa\x82\x96/\xdei_1Nun\x92j\n\xf3D#\x05\tj\xa2\x92\xd5\xf4nym\xd1Kq"
b"\xa1|\xd19\xa9Q8"))
|
#!/usr/bin/env python3
## NOTE : This script doesn't work with python2, it's gonna throw a syntax error
## because of the formated strings. Happy Cracking :)
import hashlib
import sys
import argparse
import time
BLUE = "\033[34m"
GREEN = "\033[0;32m"
RED = "\033[1;31m"
RESET = "\033[0;0m"
YELLOW = "\033[33m"
print(BLUE + """\n\n
+------------------------------------------------+
| Coded by : L |
| |
| https://github.com/shellbr3ak?tab=repositories |
+------------------------------------------------+
____ _ _ _ ____ _
/ ___|| |__ ___| | | __ ) _ __ ___ __ _| | __
\___ \| '_ \ / _ \ | | _ \| '__/ _ \/ _` | |/ /
___) | | | | __/ | | |_) | | | __/ (_| | <
|____/|_| |_|\___|_|_|____/|_| \___|\__,_|_|\_|
offensive python
----------------
""" + RESET)
parser = argparse.ArgumentParser(description="Example : python3 hasher.py -m algo -f hashfile -w wordlist")
parser.add_argument("-m", dest="algorithm", help="The Hashing algorithm")
parser.add_argument("-f", dest="hashfile", help="The File Contains the hash")
parser.add_argument("-w", dest="wordlist", help="The dictionary file")
parsed_args = parser.parse_args()
def get_hash(password, algorithm):
""" Allowed Alogrithms are md5 sha1 sha256 sha512 """
hash_type = hashlib.new(algorithm)
hash_type.update(password.encode())
return hash_type.hexdigest()
try:
algo = parsed_args.algorithm
hashfile = parsed_args.hashfile
wordlist = parsed_args.wordlist
hpass = open(hashfile).read().strip()
passwords = open(wordlist, encoding="latin1").read().splitlines()
print(f"\nCracking Tha Hash " + YELLOW + f"{hpass}" + RESET)
time.sleep(2)
for password in passwords:
#time.sleep(.2)
#print(YELLOW + "[+]" + RESET + f" Trying password : {password}")
if hpass == get_hash(password,algo):
print(GREEN + f"Hash Cracked : {password}" + RESET)
break
else:
print(RED + "Word Not Found!" + RESET)
except KeyboardInterrupt:
print(RED + "You Stopped The Script" + RESET)
pass
except TypeError:
print(RED + parser.description + RESET)
pass
|
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
def zero(segments):
GPIO.output(segments[0],0)
GPIO.output(segments[1],0)
GPIO.output(segments[2],0)
GPIO.output(segments[3],0)
GPIO.output(segments[4],0)
GPIO.output(segments[5],0)
GPIO.output(segments[6],1)
def onee(segments):
GPIO.output(segments[0],1)
GPIO.output(segments[1],0)
GPIO.output(segments[2],0)
GPIO.output(segments[3],1)
GPIO.output(segments[4],1)
GPIO.output(segments[5],1)
GPIO.output(segments[6],1)
def two(segments):
GPIO.output(segments[0],0)
GPIO.output(segments[1],0)
GPIO.output(segments[2],1)
GPIO.output(segments[3],0)
GPIO.output(segments[4],0)
GPIO.output(segments[5],1)
GPIO.output(segments[6],0)
def three(segments):
GPIO.output(segments[0],0)
GPIO.output(segments[1],0)
GPIO.output(segments[2],0)
GPIO.output(segments[3],0)
GPIO.output(segments[4],1)
GPIO.output(segments[5],1)
GPIO.output(segments[6],0)
def four(segments):
GPIO.output(segments[0],1)
GPIO.output(segments[1],0)
GPIO.output(segments[2],0)
GPIO.output(segments[3],1)
GPIO.output(segments[4],1)
GPIO.output(segments[5],0)
GPIO.output(segments[6],0)
def five(segments):
GPIO.output(segments[0],0)
GPIO.output(segments[1],1)
GPIO.output(segments[2],0)
GPIO.output(segments[3],0)
GPIO.output(segments[4],1)
GPIO.output(segments[5],0)
GPIO.output(segments[6],0)
def six(segments):
GPIO.output(segments[0],0)
GPIO.output(segments[1],1)
GPIO.output(segments[2],0)
GPIO.output(segments[3],0)
GPIO.output(segments[4],0)
GPIO.output(segments[5],0)
GPIO.output(segments[6],0)
def seven(segments):
GPIO.output(segments[0],0)
GPIO.output(segments[1],0)
GPIO.output(segments[2],0)
GPIO.output(segments[3],1)
GPIO.output(segments[4],1)
GPIO.output(segments[5],1)
GPIO.output(segments[6],1)
def eight(segments):
GPIO.output(segments[0],0)
GPIO.output(segments[1],0)
GPIO.output(segments[2],0)
GPIO.output(segments[3],0)
GPIO.output(segments[4],0)
GPIO.output(segments[5],0)
GPIO.output(segments[6],0)
def nine(segments):
GPIO.output(segments[0],0)
GPIO.output(segments[1],0)
GPIO.output(segments[2],0)
GPIO.output(segments[3],0)
GPIO.output(segments[4],1)
GPIO.output(segments[5],0)
GPIO.output(segments[6],0)
|
#
# QAPI command marshaller generator
#
# Copyright IBM, Corp. 2011
# Copyright (C) 2014-2016 Red Hat, Inc.
#
# Authors:
# Anthony Liguori <aliguori@us.ibm.com>
# Michael Roth <mdroth@linux.vnet.ibm.com>
# Markus Armbruster <armbru@redhat.com>
#
# This work is licensed under the terms of the GNU GPL, version 2.
# See the COPYING file in the top-level directory.
from qapi import *
def gen_command_decl(name, arg_type, boxed, ret_type):
return mcgen('''
%(c_type)s qmp_%(c_name)s(%(params)s);
''',
c_type=(ret_type and ret_type.c_type()) or 'void',
c_name=c_name(name),
params=build_params(arg_type, boxed, 'Error **errp'))
def gen_call(name, arg_type, boxed, ret_type):
ret = ''
argstr = ''
if boxed:
assert arg_type and not arg_type.is_empty()
argstr = '&arg, '
elif arg_type:
assert not arg_type.variants
for memb in arg_type.members:
if memb.optional:
argstr += 'arg.has_%s, ' % c_name(memb.name)
argstr += 'arg.%s, ' % c_name(memb.name)
lhs = ''
if ret_type:
lhs = 'retval = '
ret = mcgen('''
%(lhs)sqmp_%(c_name)s(%(args)s&err);
''',
c_name=c_name(name), args=argstr, lhs=lhs)
if ret_type:
ret += mcgen('''
if (err) {
goto out;
}
qmp_marshal_output_%(c_name)s(retval, ret, &err);
''',
c_name=ret_type.c_name())
return ret
def gen_marshal_output(ret_type):
return mcgen('''
static void qmp_marshal_output_%(c_name)s(%(c_type)s ret_in, QObject **ret_out, Error **errp)
{
Error *err = NULL;
Visitor *v;
v = qobject_output_visitor_new(ret_out);
visit_type_%(c_name)s(v, "unused", &ret_in, &err);
if (!err) {
visit_complete(v, ret_out);
}
error_propagate(errp, err);
visit_free(v);
v = qapi_dealloc_visitor_new();
visit_type_%(c_name)s(v, "unused", &ret_in, NULL);
visit_free(v);
}
''',
c_type=ret_type.c_type(), c_name=ret_type.c_name())
def build_marshal_proto(name):
return ('void qmp_marshal_%s(QDict *args, QObject **ret, Error **errp)'
% c_name(name))
def gen_marshal_decl(name):
return mcgen('''
%(proto)s;
''',
proto=build_marshal_proto(name))
def gen_marshal(name, arg_type, boxed, ret_type):
have_args = arg_type and not arg_type.is_empty()
ret = mcgen('''
%(proto)s
{
Error *err = NULL;
''',
proto=build_marshal_proto(name))
if ret_type:
ret += mcgen('''
%(c_type)s retval;
''',
c_type=ret_type.c_type())
if have_args:
visit_members = ('visit_type_%s_members(v, &arg, &err);'
% arg_type.c_name())
ret += mcgen('''
Visitor *v;
%(c_name)s arg = {0};
''',
c_name=arg_type.c_name())
else:
visit_members = ''
ret += mcgen('''
Visitor *v = NULL;
if (args) {
''')
push_indent()
ret += mcgen('''
v = qobject_input_visitor_new(QOBJECT(args));
visit_start_struct(v, NULL, NULL, 0, &err);
if (err) {
goto out;
}
%(visit_members)s
if (!err) {
visit_check_struct(v, &err);
}
visit_end_struct(v, NULL);
if (err) {
goto out;
}
''',
visit_members=visit_members)
if not have_args:
pop_indent()
ret += mcgen('''
}
''')
ret += gen_call(name, arg_type, boxed, ret_type)
ret += mcgen('''
out:
error_propagate(errp, err);
visit_free(v);
''')
if have_args:
visit_members = ('visit_type_%s_members(v, &arg, NULL);'
% arg_type.c_name())
else:
visit_members = ''
ret += mcgen('''
if (args) {
''')
push_indent()
ret += mcgen('''
v = qapi_dealloc_visitor_new();
visit_start_struct(v, NULL, NULL, 0, NULL);
%(visit_members)s
visit_end_struct(v, NULL);
visit_free(v);
''',
visit_members=visit_members)
if not have_args:
pop_indent()
ret += mcgen('''
}
''')
ret += mcgen('''
}
''')
return ret
def gen_register_command(name, success_response):
options = 'QCO_NO_OPTIONS'
if not success_response:
options = 'QCO_NO_SUCCESS_RESP'
ret = mcgen('''
qmp_register_command(cmds, "%(name)s",
qmp_marshal_%(c_name)s, %(opts)s);
''',
name=name, c_name=c_name(name),
opts=options)
return ret
def gen_registry(registry):
ret = mcgen('''
void %(c_prefix)sqmp_init_marshal(QmpCommandList *cmds)
{
QTAILQ_INIT(cmds);
''',
c_prefix=c_name(prefix, protect=False))
ret += registry
ret += mcgen('''
}
''')
return ret
class QAPISchemaGenCommandVisitor(QAPISchemaVisitor):
def __init__(self):
self.decl = None
self.defn = None
self._regy = None
self._visited_ret_types = None
def visit_begin(self, schema):
self.decl = ''
self.defn = ''
self._regy = ''
self._visited_ret_types = set()
def visit_end(self):
self.defn += gen_registry(self._regy)
self._regy = None
self._visited_ret_types = None
def visit_command(self, name, info, arg_type, ret_type,
gen, success_response, boxed):
if not gen:
return
self.decl += gen_command_decl(name, arg_type, boxed, ret_type)
if ret_type and ret_type not in self._visited_ret_types:
self._visited_ret_types.add(ret_type)
self.defn += gen_marshal_output(ret_type)
self.decl += gen_marshal_decl(name)
self.defn += gen_marshal(name, arg_type, boxed, ret_type)
self._regy += gen_register_command(name, success_response)
(input_file, output_dir, do_c, do_h, prefix, opts) = parse_command_line()
c_comment = '''
/*
* schema-defined QMP->QAPI command dispatch
*
* Copyright IBM, Corp. 2011
*
* Authors:
* Anthony Liguori <aliguori@us.ibm.com>
*
* This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
* See the COPYING.LIB file in the top-level directory.
*
*/
'''
h_comment = '''
/*
* schema-defined QAPI function prototypes
*
* Copyright IBM, Corp. 2011
*
* Authors:
* Anthony Liguori <aliguori@us.ibm.com>
*
* This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
* See the COPYING.LIB file in the top-level directory.
*
*/
'''
(fdef, fdecl) = open_output(output_dir, do_c, do_h, prefix,
'qmp-marshal.c', 'qmp-commands.h',
c_comment, h_comment)
fdef.write(mcgen('''
#include "qemu/osdep.h"
#include "qemu-common.h"
#include "qemu/module.h"
#include "qapi/qmp/types.h"
#include "qapi/visitor.h"
#include "qapi/qobject-output-visitor.h"
#include "qapi/qobject-input-visitor.h"
#include "qapi/dealloc-visitor.h"
#include "%(prefix)sqapi-types.h"
#include "%(prefix)sqapi-visit.h"
#include "%(prefix)sqmp-commands.h"
''',
prefix=prefix))
fdecl.write(mcgen('''
#include "%(prefix)sqapi-types.h"
#include "qapi/qmp/qdict.h"
#include "qapi/qmp/dispatch.h"
#include "qapi/error.h"
void %(c_prefix)sqmp_init_marshal(QmpCommandList *cmds);
''',
prefix=prefix, c_prefix=c_name(prefix, protect=False)))
schema = QAPISchema(input_file)
gen = QAPISchemaGenCommandVisitor()
schema.visit(gen)
fdef.write(gen.defn)
fdecl.write(gen.decl)
close_output(fdef, fdecl)
|
print("***PygLatin Translator***")
word = 'empty'
def trans(w):
w = w + w[0] + 'ay'
return w[1:]
while word.isalpha():
word = input("Type word: ")
if len(word) == 0:
print("Exit")
break
print("Translate: {}".format(trans(word)))
|
# -*- coding: UTF-8 -*-
import sys
import csv
import os.path
from searcherClass import Searcher
def showMenu():
print('Usage phpPackageFinder arg1 arg2 arg3 arg4')
print(' arg1 = path to directory to search')
print(' arg2 = search string')
print(' arg3 = file extension')
print(' arg4 = hit count only, meaning only show number of hits of the search string in the file (True or False)')
print(' ')
print('Example: phpPackageFinder /home/somename/Documents/ testQuery txt True')
print(' ')
print('For the query string, use a single quote for a multiple word phrase.')
#path = 'c:\\temp\\'
path = '/someName/Users/'
search = 'security'
fileExt = 'txt'
hitOnly = True
noVars = False
searchForVersion = False
my_list = []
fieldNames = ['file','fine','fineno']
def csv_dict_writer(csv_file,csv_columns,dict_data):
try:
data_file = open(csv_file, 'w', newline='')
obj = csv.DictWriter(data_file, fieldnames=fieldNames)
obj.writeheader()
data_file.close()
csv_file = open(csv_file,'w',newline='')
obj = csv.writer(csv_file)
for data in dict_data:
obj.writerow(data)
except IOError as strerror:
print("I/O error: {1}".format(strerror))
except:
print("Error: {1}".format(Exception))
finally:
csv_file.close()
return
def performSearch(searchObj):
print('Search started...')
Search.find()
version = '@version'
results = Search.getResults()
print('Found ', len(results), ' files:')
print('Search ended.')
my_list.append(fieldNames)
for file, count in results.items():
#after search @Package, then conduct another search using the filename
#to get the version number of the Package
counter = 0
if os.path.exists(path):
try:
with open(file, 'r') as searchfile:
for line in searchfile:
counter = counter + 1
if search in line:
#print('File:', file, 'Line:', line, 'Line No:', counter)
#inner_dict=[file.translate(None, '\t\n;'), line.translate(None,'\t\n;'), counter]
inner_dict=[file, line, str(counter)]
my_list.append(inner_dict)
#my_list.append(line)
if searchForVersion == True and version in line:
print('File:', file, 'Line:', line, 'Line No:', counter)
except FileNotFoundError:
print("Unable to find file",file)
finally:
print("Moving on to next file.")
print(my_list)
csv_dict_writer('dict_output.csv',fieldNames,my_list)
#print 'File: ', file, ' Found entries:' , count
if __name__ == '__main__':
print('Number of args: ', len(sys.argv))
if len(sys.argv) < 3:
showMenu()
#elif len(sys.argv) == 6:
else:
print('Inside sys.argv...')
path = sys.argv[1]
search = sys.argv[2]
fileExt = sys.argv[3]
if len(sys.argv) > 4:
hitOnly = sys.argv[4]
else:
hitOnly = False
#hitOnly = sys.argv[4]
searchForVersion = True #sys.argv[5]
Search = Searcher(path, search, fileExt, hitOnly)
performSearch(Search)
#else:
# print('Inside else...')
# Search = Searcher(path, search, fileExt, hitOnly)
# performSearch(Search)
|
def merge(left, right):
result = []
while left and right:
if left[0] <= right[0]:
result.append(left.pop(0))
else:
result.append(right.pop(0))
while left:
result.append(left.pop(0))
while right:
result.append(right.pop(0))
return result
def mergeSort(arry):
n = len(arry)
if n < 2:
return arry
mid = n // 2
left, right = arry[0: mid], arry[mid:]
return merge(mergeSort(left), mergeSort(right))
nums = [2, 3, 5, 7, 1, 4, 6, 8]
print("排序之前:", nums)
print("排序之后:", mergeSort(nums))
|
from dev.loaders.dataloader import AudioMNISTDataset, PreprocessRaw
from dev.loaders.librispeech import LibriSpeech4SpeakerRecognition
from dev.loaders.libriresolver import LibriSpeechSpeakers
|
__author__ = "Ankur Prakash Singh"
# Date format "%m-%d-%Y"
__date__ = '09-17-2020'
"""
Problem Statement #
Implement a function right_rotate(lst, n) which will rotate the given list by k. This means that the right-most elements will appear at the left-most position in the list and so on. You only have to rotate the list by one element at a time.
Input #
A list and a number by which to rotate that list
Output: #
The given list rotated by k elements
Sample Input #
lst = [10,20,30,40,50]
n = 3
Sample Output #
lst = [30,40,50,10,20]
"""
def rotate_right_list(lst, n):
n = n % len(lst)
return lst[-n:] + lst[:-n]
def right_rotate(lst, n):
"""
In this solution, we first create an empty list. We then iterate through the last k elements of the list and
place them at the start of the new list. Lastly, we append the first length(lst)-k elements to the new list
and return.
Time Complexity #
Since the entire list is iterated over, the time complexity of this solution is O(n)
:param lst:
:param n:
:return:
"""
n = n % len(lst)
rotated_list = []
# get the elements from the end
for item in range(len(lst) - n, len(lst)):
rotated_list.append(lst[item])
# get the remaining elements
for item in range(0, len(lst) - n):
rotated_list.append(lst[item])
return rotated_list
if __name__ == "__main__":
print(rotate_right_list([10, 20, 30, 40, 50], 3))
print(right_rotate([10, 20, 30, 40, 50], 3))
|
from datetime import datetime
import pytz
def timezone_time(time):
tehran = pytz.timezone("Asia/Tehran")
fmt = '%Y-%m-%d %H:%M:%S'
return tehran.localize(time).strftime(fmt)
|
from entity import Entity
from enum import Enum, auto
import stage
import time
class EffectType(Enum):
SMOKE = auto()
SMALL_GROUND_DUST = auto()
BIG_GROUND_DUST = auto()
SLEEPING = auto()
class Effect(Entity):
def __init__(self, entity_stage: 'stage.Stage', effect_type: EffectType, duration: float):
super().__init__(entity_stage)
self._effect_type = effect_type
self._live_until = 0 if duration == 0 else time.monotonic() + duration
def update(self):
if self._live_until != 0 and time.monotonic() >= self._live_until:
self.set_dead()
def get_effect_type(self) -> EffectType:
return self._effect_type
def _setup_box_pos(self, x: float, y: float):
self._hitbox.set_positions(x - 0.05, y, x + 0.05, y + 0.1)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# @Time : 2020/1/31
# @Author : xujun
class Solution:
def twoSum(self, nums, target: int):
res = {}
for i in range(len(nums)):
if nums[i] in res.keys():
return [res[nums[i]], i]
diff = target - nums[i]
res[diff] = i
|
class Tyontekija:
"""
Tämä on worker luokka
"""
def __init__(self, etunimi, sukunimi):
'''
This is class's __init__
:param etunimi:
:param sukunimi:
'''
print("Luodaan objekti", self)
self.etunimi = etunimi
self.sukunimi = sukunimi
self.email = etunimi + '.'+ sukunimi + '@firma.fi'
#
def __del__(self):
'''
luokan del funktio
'''
#print("Olio",self,"tuhotaan")
#print("Tallennetaan")
def tulosta_tiedot(self):
'''
Metodi tulostaa luokan tiedot
'''
print()
print("Tiedot")
print('='*10)
print("Nimi:",self.etunimi, self.sukunimi)
print("Sähköposti:",self.email)
#
# t1 = Tyontekija("Pete", "Meikäläinen")
# t2 = Tyontekija("Matti", "Heijölöinen")
# print(t1)
# print(t2)
#
# t1.etunimi = "Matti"
# t1.sukunimi = "Meikäläinen"
# print(t1.etunimi)
def fun1():
'''
:return:
'''
print("Fun1")
if __name__ == '__main__':
#
tekija1 = Tyontekija("Matti", "Meikäläinen")
tekija2 = Tyontekija("Pekka", "Virtanen")
# print(tekija1)
# print(tekija2)
# print(tekija1.etunimi, tekija1.sukunimi)
# print(tekija1.email)
# #tekija1.etunimi="Kimmo"
# print(tekija1.etunimi, tekija1.sukunimi)
# print(tekija1.email)
#
# tekija1.tulosta_tiedot()
# Tyontekija.tulosta_tiedot(tekija2)
print(tekija1.__doc__)
print(tekija1.__init__.__doc__)
# |
import topbnv_tools as tbt
import numpy as np
import matplotlib.pylab as plt
import ROOT
import sys
import lichen.lichen as lch
import pickle
################################################################################
def main(infiles=None):
# Get weights
MCinfo = tbt.csvtodict("MCinfo.csv")
for key in MCinfo.keys():
#print(key)
#print(MCinfo[key])
#exit()
filegroups = {"data":{}, "MC":{}}
for infile in infiles:
print(infile)
isMC = infile.find('MC_DATASET')>=0
dataset = infile.split("DATASET_crab_bellis_")[1].split("_NFILES")[0]
print(MCinfo[dataset])
typekey = "data"
if isMC:
typekey = "MC"
else:
dataset = "Run II" # Do this so that all the data is one dataset
if dataset in filegroups[typekey].keys():
filegroups[typekey][dataset].append(infile)
else:
filegroups[typekey][dataset] = []
filegroups[typekey][dataset].append(infile)
print(filegroups)
print()
print(filegroups['data'].keys())
print()
print(filegroups['MC'].keys())
exit()
plt.figure(figsize=(12,8))
vals = []
#for dormc in ['data','MC']:
for dormc in ['MC']:
print("{0} --------------".format(dormc))
for fg in filegroups[dormc].keys():
print("\t{0}".format(fg))
infiles = filegroups[dormc][fg]
chain = ROOT.TChain("Tskim")
for infile in infiles:
print(infile)
chain.Add(infile)
#chain.Print()
#chain.Show(10)
nentries = chain.GetEntries()
leadmupt = []
topmass = []
Wmass = []
jetcsv = []
njet = []
nbjet = []
ntop = []
nmuon = []
for i in range(nentries):
chain.GetEntry(i)
if i%100000==0:
print("{0} out of {1} entries".format(i,nentries))
if i>100:
break
nmuon.append(chain.nmuon)
leadmupt.append(chain.leadmupt)
ntop.append(chain.ntop)
for n in range(chain.ntop):
topmass.append(chain.topmass[n])
for n in range(chain.nW):
Wmass.append(chain.Wmass[n])
nbjet.append(chain.nbjet)
njet.append(chain.njet)
for n in range(chain.njet):
jetcsv.append(chain.jetcsv[n])
leadmupt = np.array(leadmupt)
topmass = np.array(topmass)
Wmass = np.array(Wmass)
jetcsv = np.array(jetcsv)
njet = np.array(njet)
nbjet = np.array(nbjet)
ntop = np.array(ntop)
nmuon = np.array(nmuon)
vals.append(leadmupt[leadmupt<200])
#lch.hist_err(leadmupt[leadmupt<200],bins=400,alpha=0.2)
print(len(vals))
plt.hist(vals,bins=20,stacked=True)
'''
plt.subplot(2,3,2)
lch.hist_err(topmass[topmass<1200],bins=400,alpha=0.2)
plt.subplot(2,3,3)
lch.hist_err(Wmass[Wmass<1200],bins=400,range=(0,400),alpha=0.2)
plt.subplot(2,3,4)
lch.hist_err(Wmass[(Wmass>40)*(Wmass<150)],bins=100,alpha=0.2)
plt.subplot(2,3,5)
lch.hist_err(njet,bins=20,range=(0,20),alpha=0.2)
plt.subplot(2,3,6)
lch.hist_err(nbjet,bins=8,range=(0,8),alpha=0.2)
#lch.hist_err(jetcsv,bins=400)
plt.figure(figsize=(12,8))
plt.subplot(2,3,1)
lch.hist_err(ntop,bins=20,range=(0,20),alpha=0.2)
plt.subplot(2,3,2)
lch.hist_err(nmuon,bins=20,range=(0,20),alpha=0.2)
'''
plt.show()
return 1
################################################################################
if __name__=="__main__":
infiles = sys.argv[1:]
main(infiles)
|
from datetime import datetime
from pprint import pprint
from flask import Flask, jsonify, request, abort, make_response, render_template
from flask_socketio import SocketIO
from arduino import Arduino
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app)
arduino = Arduino("COM4")
@socketio.on('connect')
def handle_connect():
print('Client connected!')
socketio.emit('led_state', {'leds': arduino.leds})
@socketio.on('disconnect')
def handle_connect():
print('Client disconnected!')
@socketio.on('set_led_state')
def handle_set_led_state(message):
arduino.set_led_state(message['id'], message['state'])
socketio.emit('led_state', {'leds': arduino.leds})
now = datetime.now()
msg = "[%s] %s changed led %s to %s" % (
now, request.remote_addr, message['id'], 'ON' if message['state'] else 'OFF')
socketio.emit('log', {'msg': msg})
@app.route('/')
def index():
return render_template('index.html', leds=arduino.leds)
@app.route('/api/leds')
def get_leds():
return jsonify({'leds': arduino.leds})
@app.route('/api/leds/<int:led_id>')
def get_led(led_id):
led = [led for led in arduino.leds if led['id'] == led_id]
if not led:
abort(404)
return jsonify({'led': led[0]})
@app.route('/api/leds/<int:led_id>', methods=['PUT'])
def set_led_state(led_id):
led = [led for led in arduino.leds if led['id'] == led_id]
if not led or not request.json:
abort(404)
if 'state' not in request.json or type(request.json['state']) is not bool:
abort(400)
arduino.set_led_state(led_id, request.json['state'])
socketio.emit('led_state', {'leds': arduino.leds})
return jsonify(led[0])
@app.errorhandler(404)
def not_found(error):
return make_response(jsonify({'error': 'Not found'}), 404)
if __name__ == '__main__':
socketio.run(app, debug=True)
# socketio.run(app, debug=False, host='0.0.0.0') # Allow access from Internet
|
"""Module for the extract handler class"""
from argparse import Namespace
from os import mkdir, system
from os.path import isdir, isfile, join, split
from handler.handler import Handler
from strings.extract_handler import *
class ExtractHandler(Handler):
"""The handler for extracting all the compressed files in the data directory"""
@staticmethod
def handle(args: Namespace):
"""
Extracts all the compressed files in the data directory so they can be queried
@param args: The arguments for the extract handler
"""
Handler._directory_walk(dir_path=args.data_path, action=ExtractHandler._extract_file)
@staticmethod
def _extract_file(root: str, file_path: str):
"""
Extracts a file in a directory
@param root: The directory that the file is in
@param file_path: The path to the file to extract
"""
if file_path.endswith(TAR_GZ_EXTENSION) or file_path.endswith(TGZ_EXTENSION):
ExtractHandler._extract_tar_gz(file_path=file_path, dest_dir=root)
elif file_path.endswith(GZ_EXTENSION):
ExtractHandler._extract_gz(file_path=file_path)
elif file_path.endswith(TAR_EXTENSION):
ExtractHandler._extract_tar(file_path=file_path, dest_dir=root)
elif file_path.endswith(ZIP_EXTENSION):
ExtractHandler._extract_zip(file_path=file_path, dest_dir=root)
@staticmethod
def _extract_gz(file_path: str):
"""
Extracts a file with a .gz extension
@param file_path: Path to the .gz file to be extracted
"""
assert isfile(file_path)
command: str = GZ_EXTRACT_COMMAND.format(file_path)
system(command)
@staticmethod
def _extract_tar(file_path: str, dest_dir: str):
"""
Extracts a directory with a .tar extension and removes it
@param file_path: The path to the .tar file to extract and remove
@param dest_dir: The destination of the resulting extracted directory
"""
ExtractHandler._extract_dir(file_path=file_path, dest_dir=dest_dir, extract_command=TAR_EXTRACT_COMMAND)
@staticmethod
def _extract_zip(file_path: str, dest_dir):
"""
Extracts a file with a .zip extension and removes it
@param file_path: The path to the .zip file to extract and remove
@param dest_dir: The destination of the resulting extracted directory
"""
ExtractHandler._extract_dir(file_path=file_path, dest_dir=dest_dir, extract_command=ZIP_EXTRACT_COMMAND)
@staticmethod
def _extract_tar_gz(file_path, dest_dir: str):
"""
Extracts a file with a .tar.gz extension and removes it. Also works with a .tgz extension.
@param file_path: The path to the .tar.gz file to extract and remove
@param dest_dir: The destination of the resulting extracted directory
"""
ExtractHandler._extract_dir(file_path=file_path, dest_dir=dest_dir, extract_command=TAR_GZ_EXTRACT_COMMAND)
@staticmethod
def _extract_dir(file_path: str, dest_dir: str, extract_command: str):
"""
Extracts a compressed-directory and removes its corresponding compressed-directory file
@param file_path: The path to the compressed-directory file to extract and remove
@param dest_dir: The destination of the resulting extracted directory
@param extract_command: The terminal-command specifying the extension of the resulting compressed-directory file
"""
assert isfile(file_path)
assert isdir(dest_dir)
# Get the name of the compressed-directory file to act as the destination of the extracted contents
file_name: str = ExtractHandler._get_compressed_file_name(file_path=file_path)
dest_dir: str = join(dest_dir, file_name)
assert not isdir(dest_dir)
mkdir(dest_dir)
command: str = extract_command.format(file_path, dest_dir)
system(command)
ExtractHandler._remove_file(file_path)
@staticmethod
def _get_compressed_file_name(file_path: str) -> str:
"""
Gets the name of a compressed file
@param file_path: The path to the compressed file
@return: The name of the compressed file
"""
assert isfile(file_path)
_, file_name = split(file_path)
if file_path.endswith(TAR_EXTENSION):
extension_len: int = len(TAR_EXTENSION)
elif file_path.endswith(ZIP_EXTENSION):
extension_len: int = len(ZIP_EXTENSION)
elif file_path.endswith(TAR_GZ_EXTENSION):
extension_len: int = len(TAR_GZ_EXTENSION)
elif file_path.endswith(TGZ_EXTENSION):
extension_len: int = len(TGZ_EXTENSION)
else:
error_msg: str = FILE_EXTENSION_UNSUPPORTED_MSG.format(file_path)
raise ValueError(error_msg)
assert extension_len > 0
file_name: str = file_name[:len(file_name) - extension_len]
return file_name
@staticmethod
def _remove_file(file_path: str):
"""
Removes a file from the file system, ensuring the file exists
@param file_path: The path to the file to remove
"""
assert isfile(file_path)
system(REMOVE_FILE_COMMAND.format(file_path))
|
#-*- coding:utf-8 -*-
import six, re, time, datetime
import logging
import xml.etree.ElementTree as ET
#############alipay##################
from pay.conf import wapalipay
#############wxpay#######################
from weixin.backends.dj import Helper, sns_userinfo
from weixin import WeixinHelper, JsApi_pub, WxPayConf_pub, UnifiedOrder_pub, Notify_pub, catch
from django.shortcuts import render_to_response, redirect
from django.http import HttpResponseRedirect, HttpResponse
from django.views.decorators.csrf import csrf_exempt
from django.conf import settings
from rr_user.models import User,UserCompany
from others.models import CompanyUser
from business.models import Order, Express, Express_Order
from others.models import News
from django.core.urlresolvers import reverse
from urlparse import unquote
from register.run import aliyun_sendmsg
logger1 =logging.getLogger(settings.LOGGING_ALIPAY)
logger1.setLevel(logging.INFO)
logger1.addHandler (logging.FileHandler(settings.LOGGING_ALIPAY))
logger2 =logging.getLogger(settings.LOGGING_WXIPAY)
logger2.setLevel(logging.INFO)
logger2.addHandler (logging.FileHandler(settings.LOGGING_WXIPAY))
# Create your views here.第一个发送给四个联系人:张峻18101846677,慈一林:13122111314,张明:18516114095,韦凯:13980996408
name = '胡霞'
tel1 = '18101846677'#张峻
tel2 = '13122111314'#慈一林
tel3 = '18516114095'#张明
tel4 = '13980996408'#韦凯
def arrayToXml(**arr):
"""array转xml"""
xml = ["<xml>"]
for k, v in arr.items():
if v.isdigit():
xml.append("<%s>%s</%s>"%(k,v,k))
else:
xml.append("<%s><![CDATA[%s]]></%s>"%(k,v,k))
xml.append("</xml>")
return "".join(xml)
@csrf_exempt
def notify(request,payway='alipay'):
success={'return_code':'SUCCESS'}
fail={'return_code':'FAIL'}
if request.method == "POST":
result_code="FAIL"
if payway=='alipay':
logger=logger1
logger.info ('>>进入支付宝异步通知接口...')
params=request.POST.copy()
tree={}
for p in params:
try:
tree[p.encode('utf-8')]=params[p].encode('utf-8')
except:
tree[p.encode('utf-8')]=params[p][0].encode('utf-8')
out_trade_no = tree['out_trade_no'].split('_')[0]
attach = tree['out_trade_no'].split('_')[1]
paytime = tree.get('gmt_payment')
apamount = tree['total_fee']
logger1.info(tree['trade_status'])
if tree['trade_status']=="TRADE_SUCCESS":
result_code="SUCCESS"
elif payway=='wap':
logger=logger1
logger.info ('>>进入支付宝网页支付异步通知接口...')
params=request.POST.copy()
if wapalipay.verify_notify(**params):
if 'notify_data' in params:
notifydata = unquote(params['notify_data'])
if isinstance(notifydata, str):
notifydata = six.u(notifydata).encode('utf-8')
elif isinstance(notifydata, six.string_types):
notifydata = notifydata.encode('utf-8')
tree = ET.ElementTree(ET.fromstring(notifydata))
out_trade_no = tree.find("out_trade_no").text.split('_')[0]
attach = tree.find("out_trade_no").text.split('_')[1]
paytime = tree.get('gmt_payment')
apamount = tree['total_fee']
trade_status = tree.find("trade_status").text
logger1.info(trade_status)
if trade_status == 'TRADE_SUCCESS':
result_code="SUCCESS"
elif payway=='weixin':
logger=logger2
logger.info ('>>进入微信支付异步通知接口...')
params = request.body
tree = ET.ElementTree(ET.fromstring(params))
return_code = tree.find("return_code").text
if return_code=="SUCCESS":
result_code = tree.find("result_code").text
out_trade_no = tree.find("out_trade_no").text
attach = tree.find("attach").text
paytime = datetime.datetime.strptime(tree.find("time_end").text, '%Y%m%d%H%M%S').strftime('%Y-%m-%d %H:%M:%S')
apamount = float(tree.find("total_fee").text)/100
if result_code=="SUCCESS":
logger.info('>>订单 %s 支付成功,订单长度%s'%(out_trade_no,len(out_trade_no)))
if len(out_trade_no)==18:
neworder = Order.objects.get(orderNo=out_trade_no)
logger.info('更改订单状态...')
neworder.order_status ='3'
neworder.paytime =paytime
neworder.apamount =apamount
neworder.save ()
logger.info('>>订单 %s 的状态改为 服务中'%out_trade_no)
usertype=out_trade_no[-5]
context={'tel':'13248268832'}
c1 = {}
user=servicesprovider=None
if neworder.userid:
user=User.objects.get(id=neworder.userid)
if neworder.servicesproviderid:
servicesprovider=User.objects.get(id=neworder.servicesproviderid)
if user:
context['tel'] = user.username[:-2].encode('utf-8')
if user.name:
context['name'] = user.name.encode('utf-8')
if servicesprovider:
c1['tel'] = servicesprovider.username[:-2].encode('utf-8')
if servicesprovider.name:
c1['name'] = servicesprovider.name.encode('utf-8')
p=re.compile('^(0|86|17951)?(13[0-9]|15[0-9]|17[678]|18[0-9]|14[57])[0-9]{8}$')
mobilematch=p.match(context['tel'])
if mobilematch:
if usertype in ['1','6','7','8','9']:
if servicesprovider:
aliyun_sendmsg(context['tel'], 1270, **c1)
aliyun_sendmsg(c1['tel'], 1268, **context)
else:
aliyun_sendmsg(context['tel'], 1451)
# aliyun_sendmsg(tel3, 1268,**context)
elif usertype in ['3','4','5']:
aliyun_sendmsg(context['tel'], 1451)
if float(apamount) == 1:
aliyun_sendmsg('13248268832', 1268, **context)
orderdetail = {'customer':context.get(name),'orderNumber':out_trade_no,'customerTel':context['tel']}
companyid=neworder.companyid
try:
orderdetail['businessAdd'] = UserCompany.objects.get(id=companyid).address
except:
pass
try:
orderdetail['registereAdd'] = CompanyUser.objects.get(id=companyid).revenueArea
except:
pass
aliyun_sendmsg(tel1, 3821, **orderdetail)
aliyun_sendmsg(tel2, 3821, **orderdetail)
aliyun_sendmsg(tel3, 3821, **orderdetail)
aliyun_sendmsg(tel4, 3821, **orderdetail)
else:
aliyun_sendmsg(tel1, 1267, **context)
if attach!=out_trade_no:
oldorder = Order.objects.get(orderNo=attach)
oldorder.isrenewal = '1'
oldorder.save()
logger.info('>>订单 %s 的状态改为 已续费'%attach)
elif len(out_trade_no)==14:
express = Express.objects.get(expressorderNo=out_trade_no)
order = Express_Order.objects.filter(expressid=express.id).values_list('id', flat=True)
logger.info('更改快递订单的订单状态...')
Order.objects.filter(id__in=order).update(ismkinvoice="已开票")
logger.info('>>快递订单 %s 中的订单改为 已开票'%out_trade_no)
else:
logger.info('这是什么订单?')
if payway=='weixin':
logger.info('>>返回 %s '%arrayToXml(**success))
return HttpResponse (arrayToXml(**success))
logger.info('>>返回 success ')
return HttpResponse ("success")
else:
logger.info('>>订单 %s 支付失败'%out_trade_no)
if payway=='weixin':
logger.info('>>返回 %s '%arrayToXml(**fail))
return HttpResponse (arrayToXml(**fail))
logger.info('>>返回 fail ')
return HttpResponse ("fail")
@csrf_exempt
def create_direct_pay(request):
if request.method == "POST":
return_url='http://139.196.33.243/pay.html'
notify_url='http://121.40.219.131:8100/pay/notify/'
params = {'out_trade_no': request.POST.get('orderNo'),
'subject': u'测试',
'total_fee': request.POST.get('amount'),
'seller_account_name': wapalipay.seller_email,
'call_back_url': return_url,
'notify_url': notify_url}
url=wapalipay.create_direct_pay_by_user_url(**params)
return HttpResponse (url)
return HttpResponseRedirect (reverse ('pay_error'))
# @csrf_exempt
# @sns_userinfo
# @catch
def jspay(request):
openid = request.GET.get('openid')
print openid
amount = request.GET.get("amount") or "0.01"
money = int(float(amount)*100)
jsApi = JsApi_pub()
unifiedOrder = UnifiedOrder_pub()
unifiedOrder.setParameter("openid",openid) #商品描述
unifiedOrder.setParameter("body",request.GET.get('subject','一元财税包')) #商品描述
timeStamp = time.time()
out_trade_no = "{0}{1}".format(WxPayConf_pub.APPID, int(timeStamp*100))
unifiedOrder.setParameter("out_trade_no", request.GET.get('order_sn', out_trade_no)) #商户订单号
unifiedOrder.setParameter("total_fee", str(money)) #总金额
unifiedOrder.setParameter("notify_url", WxPayConf_pub.NOTIFY_URL) #通知地址
unifiedOrder.setParameter("trade_type", "JSAPI") #交易类型
unifiedOrder.setParameter("attach", request.GET.get('order_id','test')) #附件数据,可分辨不同商家(string(127))
try:
prepay_id = unifiedOrder.getPrepayId()
if isinstance(prepay_id, dict):
return HttpResponse(prepay_id.get("err_code_des"))
jsApi.setPrepayId(prepay_id)
jsApiParameters = jsApi.getParameters()
except Exception as e:
print(e)
else:
print jsApiParameters
return HttpResponse(jsApiParameters)
# @csrf_exempt
@sns_userinfo
def paytest(request):
response = render_to_response("pay/pay.html",{'openid': request.openid})
response.set_cookie("openid", request.openid)
return response
@sns_userinfo
def paytestb(request):
return redirect("http://dev.i-caiwu.com/wxpay/wx/www/index.html#/app/index?openid={0}".format(request.openid))
|
import logging
import json
import requests
from tenacity import (
retry,
wait_exponential,
stop_after_attempt,
retry_if_exception_type,
)
from airflow.hooks.http_hook import HttpHook
from airflow.hooks.S3_hook import S3Hook
from airflow.hooks.base_hook import BaseHook
from airflow.hooks.postgres_hook import PostgresHook
from airflow.exceptions import AirflowException
from airflow.models import Variable
from psycopg2 import ProgrammingError
from mongoengine import connect
from sqlalchemy.exc import OperationalError
Logger = logging.getLogger(__name__)
DEFAULT_HEADER = {
"Content-Type": "application/json"
}
KERNEL_HOOK_BASE = HttpHook(http_conn_id="kernel_conn", method="GET")
try:
HTTP_HOOK_RUN_RETRIES = int(Variable.get("HTTP_HOOK_RUN_RETRIES", 5))
except (OperationalError, ValueError):
HTTP_HOOK_RUN_RETRIES = 5
@retry(
wait=wait_exponential(),
stop=stop_after_attempt(HTTP_HOOK_RUN_RETRIES),
retry=retry_if_exception_type((requests.ConnectionError, requests.Timeout)),
)
def http_hook_run(api_hook, method, endpoint, data=None, headers=DEFAULT_HEADER, timeout=1):
response = api_hook.run(
endpoint=endpoint,
data=data,
headers=headers,
extra_options={"timeout": timeout, "check_response": False}
)
Logger.info(
"%s %s - Payload: %s - status_code: %s",
method, endpoint, json.dumps((data or ""), indent=2), response.status_code
)
return response
def kernel_connect(endpoint, method, data=None, headers=DEFAULT_HEADER, timeout=13):
api_hook = HttpHook(http_conn_id="kernel_conn", method=method)
response = http_hook_run(
api_hook=api_hook,
method=method,
endpoint=endpoint,
data=json.dumps(data) if data is not None else None,
headers=headers,
timeout=timeout
)
response.raise_for_status()
return response
@retry(
wait=wait_exponential(),
stop=stop_after_attempt(10),
)
def object_store_connect(bytes_data, filepath, bucket_name):
s3_hook = S3Hook(aws_conn_id="aws_default")
s3_hook.load_bytes(bytes_data, key=filepath, bucket_name=bucket_name, replace=True)
s3_host = s3_hook.get_connection("aws_default").extra_dejson.get("host")
return "{}/{}/{}".format(s3_host, bucket_name, filepath)
@retry(wait=wait_exponential(), stop=stop_after_attempt(4))
def update_metadata_in_object_store(filepath, metadata, bucket_name):
s3_hook = S3Hook(aws_conn_id="aws_default")
s3_object = s3_hook.get_key(key=filepath, bucket_name=bucket_name)
s3_object.metadata.update(metadata)
s3_object.copy_from(
CopySource={'Bucket': bucket_name, 'Key': filepath},
Metadata=s3_object.metadata,
MetadataDirective='REPLACE'
)
@retry(wait=wait_exponential(), stop=stop_after_attempt(10))
def mongo_connect():
# TODO: Necessário adicionar um commando para adicionar previamente uma conexão, ver: https://github.com/puckel/docker-airflow/issues/75
conn = BaseHook.get_connection("opac_conn")
uri = "mongodb://{creds}{host}{port}/{database}".format(
creds="{}:{}@".format(conn.login, conn.password) if conn.login else "",
host=conn.host,
port="" if conn.port is None else ":{}".format(conn.port),
database=conn.schema,
)
connect(host=uri, **conn.extra_dejson)
@retry(wait=wait_exponential(), stop=stop_after_attempt(10))
def add_execution_in_database(
table, data={}, connection_id="postgres_report_connection"
):
"""Registra informações em um banco PostgreSQL de forma dinâmica."""
data = dict(data)
if data is None or len(data.keys()) == 0:
logging.info(
"Cannot insert `empty data` into the database. Please verify your data attributes."
)
return
hook = PostgresHook(postgres_conn_id=connection_id)
try:
hook.get_conn()
except AirflowException:
logging.info("Cannot insert data. Connection '%s' is not configured.", connection_id)
return
if data.get("payload"):
data["payload"] = json.dumps(data["payload"])
columns = list(data.keys())
values = list(data.values())
try:
hook.insert_rows(table, [values], target_fields=columns)
except (AirflowException, ProgrammingError) as exc:
logging.error(exc)
else:
logging.info("Registering `%s` into '%s' table.", data, table)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-04-23 20:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('polls', '0008_userpollfilter_name'),
]
operations = [
migrations.AlterField(
model_name='poll',
name='name',
field=models.CharField(max_length=150, verbose_name='Название опроса'),
),
migrations.AlterField(
model_name='poll',
name='name_ru',
field=models.CharField(max_length=150, null=True, verbose_name='Название опроса'),
),
migrations.AlterField(
model_name='poll',
name='name_uk',
field=models.CharField(max_length=150, null=True, verbose_name='Название опроса'),
),
]
|
# -*- coding: utf-8 -*-
#Problem Number 1041
line = str(input(""))
split = line.split(" ")
x = float(split[0])
y = float(split[1])
if(x == 0 and y == 0):
print("Origem")
elif(y == 0):
print("Eixo X")
elif(x == 0):
print("Eixo Y")
elif(x > 0 and y > 0):
print("Q1")
elif(x < 0 and y > 0):
print("Q2")
elif(x < 0 and y < 0):
print("Q3")
else:
print("Q4") |
# -*- coding: utf-8 -*-
# ------------------------------------
# Create On 2018/6/2 15:51
# File Name: time_eval.py
# Edit Author: lnest
# ------------------------------------
import time
from functools import wraps
def time_count(func):
s_time = time.time()
@wraps(func)
def wrapper(*args, **kwargs):
data = func(*args, **kwargs)
e_time = time.time()
print('run <{}> cost: {}'.format(func.__name__, e_time - s_time))
return data
return wrapper
class TimeCountBlock:
def __init__(self, name=None):
self._name = name
self._timepast = 0
def __enter__(self):
self._start = time.time()
return self
def __exit__(self, enter_type, value, traceback):
self._end = time.time()
self._timepast = self._end - self._start
print(u'Runing block <{}> cost time: {}'.format(self._name, round(self._timepast, 3)))
|
n, k = list(map(int, input().split()))
A = list(map(int, input().split()))
A_min_idx = A.index(min(A))
print(-(-(len(A)-1) // (k-1)) )
|
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""An estimator class for training with TensorFlow on Amazon SageMaker."""
from __future__ import absolute_import
import logging
from typing import Optional, Union, Dict
from packaging import version
from sagemaker import image_uris, s3, utils
from sagemaker.deprecations import renamed_kwargs
from sagemaker.estimator import Framework, EstimatorBase
import sagemaker.fw_utils as fw
from sagemaker.tensorflow import defaults
from sagemaker.tensorflow.model import TensorFlowModel
from sagemaker.transformer import Transformer
from sagemaker.vpc_utils import VPC_CONFIG_DEFAULT
from sagemaker.workflow import is_pipeline_variable
from sagemaker.tensorflow.training_compiler.config import TrainingCompilerConfig
from sagemaker.workflow.entities import PipelineVariable
logger = logging.getLogger("sagemaker")
class TensorFlow(Framework):
"""Handle end-to-end training and deployment of user-provided TensorFlow code."""
_framework_name = "tensorflow"
_HIGHEST_LEGACY_MODE_ONLY_VERSION = version.Version("1.10.0")
_HIGHEST_PYTHON_2_VERSION = version.Version("2.1.1")
def __init__(
self,
py_version: Optional[str] = None,
framework_version: Optional[str] = None,
model_dir: Optional[Union[str, PipelineVariable]] = None,
image_uri: Optional[Union[str, PipelineVariable]] = None,
distribution: Optional[Dict[str, str]] = None,
compiler_config: Optional[TrainingCompilerConfig] = None,
**kwargs,
):
"""Initialize a ``TensorFlow`` estimator.
Args:
py_version (str): Python version you want to use for executing your model training
code. Defaults to ``None``. Required unless ``image_uri`` is provided.
framework_version (str): TensorFlow version you want to use for executing your model
training code. Defaults to ``None``. Required unless ``image_uri`` is provided.
List of supported versions:
https://github.com/aws/sagemaker-python-sdk#tensorflow-sagemaker-estimators.
model_dir (str or PipelineVariable): S3 location where the checkpoint data and models
can be exported to during training (default: None). It will be passed in the
training script as one of the command line arguments. If not specified,
one is provided based on your training configuration:
* *distributed training with SMDistributed or MPI with Horovod* - ``/opt/ml/model``
* *single-machine training or distributed training without MPI* - \
``s3://{output_path}/model``
* *Local Mode with local sources (file:// instead of s3://)* - \
``/opt/ml/shared/model``
To disable having ``model_dir`` passed to your training script,
set ``model_dir=False``.
image_uri (str or PipelineVariable): If specified, the estimator will use this image
for training and hosting, instead of selecting the appropriate SageMaker official
image based on framework_version and py_version.
It can be an ECR url or dockerhub image and tag.
Examples:
123.dkr.ecr.us-west-2.amazonaws.com/my-custom-image:1.0
custom-image:latest.
If ``framework_version`` or ``py_version`` are ``None``, then
``image_uri`` is required. If also ``None``, then a ``ValueError``
will be raised.
distribution (dict): A dictionary with information on how to run distributed training
(default: None). Currently, the following are supported:
distributed training with parameter servers, SageMaker Distributed (SMD) Data
and Model Parallelism, and MPI. SMD Model Parallelism can only be used with MPI.
**To enable the SageMaker distributed data parallelism:**
.. code:: python
{ "smdistributed": { "dataparallel": { "enabled": True } } }
.. seealso::
To learn more, see :ref:`sdp_api_docs_toc`.
**To enable the SageMaker distributed model parallelism:**
.. code:: python
{
"smdistributed": {
"modelparallel": {
"enabled":True,
"parameters": {
"partitions": 2,
"microbatches": 4,
"placement_strategy": "spread",
"pipeline": "interleaved",
"optimize": "speed",
"ddp": True,
}
},
"mpi": {
"enabled" : True,
"processes_per_host" : 8,
}
}
.. note::
The SageMaker distributed model parallel library internally uses MPI.
In order to use model parallelism, MPI also must be enabled.
.. seealso::
To learn more, see :ref:`smp_api_docs_toc`.
.. seealso::
To find a complete list of parameters for SageMaker model parallelism,
see :ref:`sm-sdk-modelparallel-general`.
**To enable Multi Worker Mirrored Strategy:**
.. code:: python
{
"multi_worker_mirrored_strategy": {
"enabled": True
}
}
This distribution strategy option is available for TensorFlow 2.9 and later in
the SageMaker Python SDK v2.xx.yy and later.
To learn more about the mirrored strategy for TensorFlow,
see `TensorFlow Distributed Training
<https://www.tensorflow.org/guide/distributed_training>`_
in the *TensorFlow documentation*.
**To enable MPI:**
.. code:: python
{
"mpi": {
"enabled": True
}
}
To learn more, see `Training with Horovod
<https://sagemaker.readthedocs.io/en/stable/frameworks/tensorflow/using_tf.html#training-with-horovod>`_.
**To enable parameter server:**
.. code:: python
{
"parameter_server": {
"enabled": True
}
}
To learn more, see `Training with parameter servers
<https://sagemaker.readthedocs.io/en/stable/frameworks/tensorflow/using_tf.html#training-with-parameter-servers>`_.
compiler_config (:class:`~sagemaker.tensorflow.TrainingCompilerConfig`):
Configures SageMaker Training Compiler to accelerate training.
**kwargs: Additional kwargs passed to the Framework constructor.
.. tip::
You can find additional parameters for initializing this class at
:class:`~sagemaker.estimator.Framework` and
:class:`~sagemaker.estimator.EstimatorBase`.
"""
distribution = renamed_kwargs("distributions", "distribution", distribution, kwargs)
instance_type = renamed_kwargs(
"train_instance_type", "instance_type", kwargs.get("instance_type"), kwargs
)
fw.validate_version_or_image_args(framework_version, py_version, image_uri)
if py_version == "py2":
logger.warning(
fw.python_deprecation_warning(self._framework_name, defaults.LATEST_PY2_VERSION)
)
self.framework_version = framework_version
self.py_version = py_version
self.instance_type = instance_type
if "enable_sagemaker_metrics" not in kwargs:
# enable sagemaker metrics for TF v1.15 or greater:
if framework_version and version.Version(framework_version) >= version.Version("1.15"):
kwargs["enable_sagemaker_metrics"] = True
super(TensorFlow, self).__init__(image_uri=image_uri, **kwargs)
if distribution is not None:
distribution = fw.validate_distribution(
distribution,
self.instance_groups,
self._framework_name,
framework_version,
py_version,
image_uri,
kwargs,
)
self.model_dir = model_dir
self.distribution = distribution or {}
self._validate_args(py_version=py_version)
if compiler_config is not None:
if not isinstance(compiler_config, TrainingCompilerConfig):
error_string = (
f"Expected instance of type {TrainingCompilerConfig}"
f"for argument compiler_config. "
f"Instead got {type(compiler_config)}"
)
raise ValueError(error_string)
if compiler_config:
compiler_config.validate(self)
self.compiler_config = compiler_config
if "profiler_config" in kwargs:
fw.profiler_config_deprecation_warning(
kwargs["profiler_config"], image_uri, self._framework_name, framework_version
)
def _validate_args(self, py_version):
"""Placeholder docstring"""
if py_version == "py2" and self._only_python_3_supported():
msg = (
"Python 2 containers are only available with {} and lower versions. "
"Please use a Python 3 container.".format(defaults.LATEST_PY2_VERSION)
)
raise AttributeError(msg)
if self.image_uri is None and self._only_legacy_mode_supported():
legacy_image_uri = image_uris.retrieve(
"tensorflow",
self.sagemaker_session.boto_region_name,
instance_type=self.instance_type,
version=self.framework_version,
py_version=self.py_version,
image_scope="training",
)
msg = (
"TF {} supports only legacy mode. Please supply the image URI directly with "
"'image_uri={}' and set 'model_dir=False'. If you are using any legacy parameters "
"(training_steps, evaluation_steps, checkpoint_path, requirements_file), "
"make sure to pass them directly as hyperparameters instead. For more, see "
"https://sagemaker.readthedocs.io/en/v2.0.0.rc0/frameworks/tensorflow/upgrade_from_legacy.html."
).format(self.framework_version, legacy_image_uri)
raise ValueError(msg)
def _only_legacy_mode_supported(self):
"""Placeholder docstring"""
return version.Version(self.framework_version) <= self._HIGHEST_LEGACY_MODE_ONLY_VERSION
def _only_python_3_supported(self):
"""Placeholder docstring"""
if not self.framework_version:
return False
return version.Version(self.framework_version) > self._HIGHEST_PYTHON_2_VERSION
@classmethod
def _prepare_init_params_from_job_description(cls, job_details, model_channel_name=None):
"""Convert the job description to init params that can be handled by the class constructor
Args:
job_details: the returned job details from a describe_training_job API call.
Returns:
dictionary: The transformed init_params
"""
init_params = super(TensorFlow, cls)._prepare_init_params_from_job_description(
job_details, model_channel_name
)
image_uri = init_params.pop("image_uri")
framework, py_version, tag, script_mode = fw.framework_name_from_image(image_uri)
if not framework:
# If we were unable to parse the framework name from the image, it is not one of our
# officially supported images, so just add the image to the init params.
init_params["image_uri"] = image_uri
return init_params
model_dir = init_params["hyperparameters"].pop("model_dir", None)
if model_dir:
init_params["model_dir"] = model_dir
elif script_mode is None:
init_params["model_dir"] = False
init_params["py_version"] = py_version
# We switched image tagging scheme from regular image version (e.g. '1.0') to more
# expressive containing framework version, device type and python version
# (e.g. '1.5-gpu-py2'). For backward compatibility map deprecated image tag '1.0' to a
# '1.4' framework version otherwise extract framework version from the tag itself.
init_params["framework_version"] = (
"1.4" if tag == "1.0" else fw.framework_version_from_tag(tag)
)
# Legacy images are required to be passed in explicitly.
if not script_mode:
init_params["image_uri"] = image_uri
if framework != cls._framework_name:
raise ValueError(
"Training job: {} didn't use image for requested framework".format(
job_details["TrainingJobName"]
)
)
return init_params
def create_model(
self,
role=None,
vpc_config_override=VPC_CONFIG_DEFAULT,
entry_point=None,
source_dir=None,
dependencies=None,
**kwargs,
):
"""Creates ``TensorFlowModel`` object to be used for creating SageMaker model entities.
This can be done by deploying it to a SageMaker endpoint,
or starting SageMaker Batch Transform jobs.
Args:
role (str): The ``TensorFlowModel``, which is also used during transform jobs.
If not specified, the role from the Estimator is used.
vpc_config_override (dict[str, list[str]]): Optional override for VpcConfig set on the
model. Default: use subnets and security groups from this Estimator.
* 'Subnets' (list[str]): List of subnet ids.
* 'SecurityGroupIds' (list[str]): List of security group ids.
entry_point (str): Path (absolute or relative) to the local Python source file which
should be executed as the entry point to training. If ``source_dir`` is specified,
then ``entry_point`` must point to a file located at the root of ``source_dir``.
If not specified and ``endpoint_type`` is 'tensorflow-serving',
no entry point is used. If ``endpoint_type`` is also ``None``,
then the training entry point is used.
source_dir (str): Path (absolute or relative or an S3 URI) to a directory with any other
serving source code dependencies aside from the entry point file (default: None).
dependencies (list[str]): A list of paths to directories (absolute or relative) with
any additional libraries that will be exported to the container (default: None).
**kwargs: Additional kwargs passed to
:class:`~sagemaker.tensorflow.model.TensorFlowModel`.
Returns:
sagemaker.tensorflow.model.TensorFlowModel: A ``TensorFlowModel`` object.
See :class:`~sagemaker.tensorflow.model.TensorFlowModel` for full details.
"""
kwargs["name"] = self._get_or_create_name(kwargs.get("name"))
if "image_uri" not in kwargs:
kwargs["image_uri"] = self.image_uri
if "enable_network_isolation" not in kwargs:
kwargs["enable_network_isolation"] = self.enable_network_isolation()
return TensorFlowModel(
model_data=self.model_data,
role=role or self.role,
container_log_level=self.container_log_level,
framework_version=self.framework_version,
sagemaker_session=self.sagemaker_session,
vpc_config=self.get_vpc_config(vpc_config_override),
entry_point=entry_point,
source_dir=source_dir,
dependencies=dependencies,
**kwargs,
)
def hyperparameters(self):
"""Return hyperparameters used by your custom TensorFlow code during model training."""
hyperparameters = super(TensorFlow, self).hyperparameters()
additional_hyperparameters = self._distribution_configuration(self.distribution)
if self.model_dir is not False:
self.model_dir = self.model_dir or self._default_s3_path(
"model", mpi=additional_hyperparameters.get(self.LAUNCH_MPI_ENV_NAME, False)
)
additional_hyperparameters["model_dir"] = self.model_dir
hyperparameters.update(
EstimatorBase._json_encode_hyperparameters(additional_hyperparameters)
)
if self.compiler_config:
training_compiler_hyperparameters = self.compiler_config._to_hyperparameter_dict()
hyperparameters.update(
EstimatorBase._json_encode_hyperparameters(training_compiler_hyperparameters)
)
return hyperparameters
def _default_s3_path(self, directory, mpi=False):
"""Placeholder docstring"""
local_code = utils.get_config_value("local.local_code", self.sagemaker_session.config)
if self.sagemaker_session.local_mode and local_code:
return "/opt/ml/shared/{}".format(directory)
if mpi:
return "/opt/ml/model"
if self._current_job_name:
if is_pipeline_variable(self.output_path):
return s3.s3_path_join(
"s3://",
self.sagemaker_session.default_bucket(),
self.sagemaker_session.default_bucket_prefix,
self._current_job_name,
directory,
)
return s3.s3_path_join(self.output_path, self._current_job_name, directory)
return None
def _validate_and_set_debugger_configs(self):
"""Disable Debugger Hook Config for ParameterServer (PS) as it is not supported in smdebug.
Else, set default HookConfig
"""
super(TensorFlow, self)._validate_and_set_debugger_configs()
ps_enabled = "parameter_server" in self.distribution and self.distribution[
"parameter_server"
].get("enabled", False)
if ps_enabled:
if self.debugger_hook_config is not None or self.debugger_rule_configs is not None:
logger.info(
"Amazon SageMaker Debugger does not currently support "
"Parameter Server distribution"
)
self.debugger_hook_config = None
self.debugger_rule_configs = None
def transformer(
self,
instance_count,
instance_type,
strategy=None,
assemble_with=None,
output_path=None,
output_kms_key=None,
accept=None,
env=None,
max_concurrent_transforms=None,
max_payload=None,
tags=None,
role=None,
volume_kms_key=None,
entry_point=None,
vpc_config_override=VPC_CONFIG_DEFAULT,
enable_network_isolation=None,
model_name=None,
):
"""Return a ``Transformer`` that uses a SageMaker Model based on the training job.
It reuses the SageMaker Session and base job name used by the Estimator.
Args:
instance_count (int): Number of EC2 instances to use.
instance_type (str): Type of EC2 instance to use, for example, 'ml.c4.xlarge'.
strategy (str): The strategy used to decide how to batch records in a single request
(default: None). Valid values: 'MultiRecord' and 'SingleRecord'.
assemble_with (str): How the output is assembled (default: None). Valid values: 'Line'
or 'None'.
output_path (str): S3 location for saving the transform result. If not specified,
results are stored to a default bucket.
output_kms_key (str): Optional. KMS key ID for encrypting the transform output
(default: None).
accept (str): The accept header passed by the client to
the inference endpoint. If it is supported by the endpoint,
it will be the format of the batch transform output.
env (dict): Environment variables to be set for use during the transform job
(default: None).
max_concurrent_transforms (int): The maximum number of HTTP requests to be made to
each individual transform container at one time.
max_payload (int): Maximum size of the payload in a single HTTP request to the
container in MB.
tags (list[dict]): List of tags for labeling a transform job. If none specified, then
the tags used for the training job are used for the transform job.
role (str): The IAM Role ARN for the ``TensorFlowModel``, which is also used
during transform jobs. If not specified, the role from the Estimator is used.
volume_kms_key (str): Optional. KMS key ID for encrypting the volume attached to the ML
compute instance (default: None).
entry_point (str): Path (absolute or relative) to the local Python source file which
should be executed as the entry point to training. If ``source_dir`` is specified,
then ``entry_point`` must point to a file located at the root of ``source_dir``.
If not specified and ``endpoint_type`` is 'tensorflow-serving',
no entry point is used. If ``endpoint_type`` is also ``None``,
then the training entry point is used.
vpc_config_override (dict[str, list[str]]): Optional override for
the VpcConfig set on the model.
Default: use subnets and security groups from this Estimator.
* 'Subnets' (list[str]): List of subnet ids.
* 'SecurityGroupIds' (list[str]): List of security group ids.
enable_network_isolation (bool): Specifies whether container will
run in network isolation mode. Network isolation mode restricts
the container access to outside networks (such as the internet).
The container does not make any inbound or outbound network
calls. If True, a channel named "code" will be created for any
user entry script for inference. Also known as Internet-free mode.
If not specified, this setting is taken from the estimator's
current configuration.
model_name (str): Name to use for creating an Amazon SageMaker
model. If not specified, the estimator generates a default job name
based on the training image name and current timestamp.
"""
role = role or self.role
model_name = self._get_or_create_name(model_name)
if self.latest_training_job is None:
logger.warning(
"No finished training job found associated with this estimator. Please make sure "
"this estimator is only used for building workflow config"
)
return Transformer(
model_name,
instance_count,
instance_type,
strategy=strategy,
assemble_with=assemble_with,
output_path=output_path,
output_kms_key=output_kms_key,
accept=accept,
max_concurrent_transforms=max_concurrent_transforms,
max_payload=max_payload,
env=env or {},
tags=tags,
base_transform_job_name=self.base_job_name,
volume_kms_key=volume_kms_key,
sagemaker_session=self.sagemaker_session,
)
if enable_network_isolation is None:
enable_network_isolation = self.enable_network_isolation()
model = self.create_model(
role=role,
vpc_config_override=vpc_config_override,
entry_point=entry_point,
enable_network_isolation=enable_network_isolation,
name=model_name,
)
return model.transformer(
instance_count,
instance_type,
strategy=strategy,
assemble_with=assemble_with,
output_path=output_path,
output_kms_key=output_kms_key,
accept=accept,
env=env,
max_concurrent_transforms=max_concurrent_transforms,
max_payload=max_payload,
tags=tags,
volume_kms_key=volume_kms_key,
)
|
class Probe(object) :
def __init__(self,sampleID,dateTime,sourceCode,latitude,longitude,altitude,speed,heading) :
self.sampleID = sampleID
self.dateTime = dateTime
self.sourceCode = sourceCode
self.latitude = latitude
self.longitude = longitude
self.altitude = altitude
self.speed = speed
self.heading = heading
def __str__(self) :
return str(self.sampleID) + ',' + str(self.dateTime) + ',' + str(self.sourceCode) + ',' \
+ str(self.latitude) + ',' + str(self.longitude) + ',' + str(self.altitude) + ',' \
+ str(self.speed) + ',' + str(self.heading) |
""" download images from OMERO server with list of image IDs
"""
import os
import argparse
import getpass
import numpy as np
import pandas as pd
#from scipy.misc import imresize
from omero.gateway import BlitzGateway
parser = argparse.ArgumentParser(description='Download from OMERO server')
parser.add_argument('-s', type=str, default='images',
help='save directory')
parser.add_argument('-i', type=str, default='all_meta.csv',
help='csv datafile with imageID column')
parser.add_argument('-x', type=int, default=0,
help='save image width resolution')
parser.add_argument('-y', type=int, default=0,
help='save image height resolution')
parser.add_argument('-c', type=int, default=0,
help='save resolution (255 for 8 bit)')
parser.add_argument('-o', type=str, default='omero.ohsu.edu',
help='OMERO host')
args = parser.parse_args()
un = raw_input('enter your OMERO username for %s: ' % args.o)
pw = getpass.getpass('enter your OMERO password for %s: ' % args.o)
if os.path.exists(args.s) == False:
os.makedirs(args.s)
df = pd.read_csv(args.i, usecols = ['ImageID'])
conn = BlitzGateway(un, pw, host=args.o, port=4064)
conn.connect()
for imageID in df.ImageID:
print('downloading %s ' % imageID)
img = conn.getObject("Image", imageID)
pixels = img.getPrimaryPixels()
channels = []
for i in range(img.getSizeC()):
ch = np.array(pixels.getPlane(0,i,0), dtype='f')
i#f (args.x != 0) & (args.y != 0):
#ch = imresize(ch, (args.x, args.y))
if (args.c != 0):
ch = (ch/np.amax(ch))*args.c
channels.append(ch)
plane = np.dstack(channels)
np.save(os.path.join(args.s, str(imageID)), plane)
print('done!')
|
def somar(numero1, numero2):
soma = numero1 + numero2
print("O resultado é: " + str(soma))
num1 = int(input("Digite o primeiro numero: "))
num2 = int(input("Digite o segundo numero: "))
somar(num1, num2)
|
from typing import Text
text= input("Enter a text : ")
if("money" in text):
spam = True
elif("buy now" in text):
spam=True
elif("click this" in text):
spam=True
elif("subscribe this" in text):
spam=True
else:
spam=False
#checking
if(spam):
print("This is SPAM")
else:
print("This is not SPAM") |
from collections import deque
change = {'zero': '0', 'one': '1', 'two': '2', 'three': '3', 'four': '4', 'five': '5', 'six': '6', 'seven': '7',
'eight': '8', 'nine': '9'}
def solution(s):
answer = s
for idx, item in change.items():
answer = answer.replace(idx, item)
return int(answer)
def solution2(s):
answer = 0
temp = []
q = deque(list(s))
while q:
target = q.popleft()
if target.isdigit():
temp.append(target)
else:
tem = target
while True:
if q:
next_target = q.popleft()
if next_target.isdigit():
q.appendleft(next_target)
break
else:
tem += next_target
if tem in change:
temp.append(change[tem])
break
else:
break
return int(''.join(temp)) |
from flask import Flask, redirect, url_for, request, render_template
from src.DataBase import *
from src.config import *
from src.Visualization import *
from src.mkdirPath import *
import matplotlib.pyplot as plt
import os
from datetime import timedelta
app = Flask(__name__)
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0
#Main page
@app.route("/", methods=['POST', 'GET'])
def home():
return render_template('param.html')
#Parameter page to plot one sigle data
@app.route("/paramSingle", methods=['POST', 'GET'])
def single():
return render_template('paramSingle.html')
#Parameter page to plot one sigle data for a specific date
@app.route("/paramSingleDate", methods=['POST', 'GET'])
def singleDate():
return render_template('paramSingleDate.html')
#Parameter page to plot one sigle data
@app.route("/paramCompare", methods=['POST', 'GET'])
def Compare():
return render_template('paramCompare.html')
#Parameter page to plot selected data compare to air quality
@app.route("/paramCompareAirQuality", methods=['POST', 'GET'])
def AirCompare():
return render_template('paramCompareAirQuality.html')
#Single Plot page
@app.route('/plot', methods=['POST', 'GET'])
def image():
#Get information from the param single page
if request.method == 'POST':
result = request.form
city = request.form["city"]
label = request.form["data"]
s_number = int(request.form["sample_number"])
#Check the value of sample number
if s_number < 0:
return render_template('paramSingleAlerte.html')
#Replace "temperature_partie_decimale" by "temperature" because doesn't have the same label name
if(city == "Toulouse_La_Salade" and label == "temperature_partie_entiere"):
label ="temperature_en_degre_c"
#Get the number index of the selected city
nameIndex = NAME_INDEX.index(city)
#Set information of the DB
output_dir = TOULOUSE_PATH_DB
url = TOULOUSE_URL_LIST[nameIndex]
name = TOULOUSE_NAME_LIST[nameIndex]
# Create folder for data base
mkdir_p(output_dir)
# Check time to download the new DB or not, update every 15min (Weather DB)
yearTime = timedelta(days=365)
yearTime.total_seconds()
global TIME
if (yearTime.total_seconds() < TIME[nameIndex]+(15*60)):
UPDATE_DB = 0
else:
UPDATE_DB = 1
TIME[nameIndex] = yearTime.total_seconds()
# Get the corresponding data bases
toulouseDB = DataBase(url, name, UPDATE_DB)
# Read the data bases
toulouseDB._getData(name)
data = toulouseDB._getSpecificData(label)
# Plot data
visu = Visualization()
figName = visu._plotData(label, data, s_number)
#Go the the plot html page
return render_template('plot.html', image_path =figName)
#Single Plot page for a selected date
@app.route('/plotDate', methods=['POST', 'GET'])
def imageDate():
#Get information from the html param page (paramSingleDate)
if request.method == 'POST':
result = request.form
city = request.form["city"]
label = request.form["data"]
month = request.form["month"]
year = request.form["year"]
s_number = int(request.form["sample_number"])
# Check the value of sample number
if s_number < 0:
return render_template('paramSingleDateAlerteSample.html')
#Replace "temperature_partie_decimale" by "temperature" because doesn't have the same label name
if(city == "Toulouse_La_Salade" and label == "temperature_partie_entiere"):
label ="temperature_en_degre_c"
#Get the number index of the selected city
nameIndex = NAME_INDEX.index(city)
output_dir = TOULOUSE_PATH_DB
url = TOULOUSE_URL_LIST[nameIndex]
name = TOULOUSE_NAME_LIST[nameIndex]
# Create folder for data base
mkdir_p(output_dir)
# Check time to download the new DB or not, update every 15min (Weather DB)
yearTime = timedelta(days=365)
yearTime.total_seconds()
global TIME
if (yearTime.total_seconds() < TIME[nameIndex]+(15*60)):
UPDATE_DB = 0
else:
UPDATE_DB = 1
TIME[nameIndex] = yearTime.total_seconds()
# Get the corresponding data bases
toulouseDB = DataBase(url, name,UPDATE_DB)
# Read the data bases
toulouseDB._getData(name)
data = toulouseDB._getDateData(label, month, year)
if len(data) == 0:
return render_template('paramSingleDateAlerte.html')
toulouseDB._getDate()
# Plot data
visu = Visualization()
figName = visu._plotData(label, data, s_number)
return render_template('plotDate.html', image_path =figName)
#Compare Plot page
@app.route('/plotCompare', methods=['POST', 'GET'])
def imageCompare():
#Get information from the html param page (paramCompare)
if request.method == 'POST':
result = request.form
city1 = request.form["city"]
label1 = request.form["data"]
s_number = int(request.form["sample_number"])
city2 = request.form["city2"]
label2 = request.form["data2"]
# Check the value of sample number
if s_number < 0:
return render_template('paramCompareAlerte.html')
#Replace "temperature_partie_decimale" by "temperature" because doesn't have the same label name
if(city1 == "Toulouse_La_Salade" and label1 == "temperature_partie_entiere"):
label1 ="temperature_en_degre_c"
if(city2 == "Toulouse_La_Salade" and label2 == "temperature_partie_entiere"):
label2 ="temperature_en_degre_c"
#Get the number index of the selected city and set information
nameIndex = NAME_INDEX.index(city1)
output_dir1 = TOULOUSE_PATH_DB
url1 = TOULOUSE_URL_LIST[nameIndex]
name1 = TOULOUSE_NAME_LIST[nameIndex]
nameIndex2 = NAME_INDEX.index(city2)
output_dir2 = TOULOUSE_PATH_DB
url2 = TOULOUSE_URL_LIST[nameIndex2]
name2 = TOULOUSE_NAME_LIST[nameIndex2]
# Create folder for data base
mkdir_p(output_dir1)
mkdir_p(output_dir2)
# Check time to download the new DB or not, update every 15min (Weather DB)
yearTime = timedelta(days=365)
yearTime.total_seconds()
global TIME
if (yearTime.total_seconds() < TIME[nameIndex]+(15*60)):
UPDATE_DB = 0
else:
UPDATE_DB = 1
TIME[nameIndex] = yearTime.total_seconds()
# Get the corresponding data bases
dataBase1 = DataBase(url1, name1, UPDATE_DB)
dataBase2 = DataBase(url2, name2, UPDATE_DB)
# Read the data bases
dataBase1._getData(name1)
dataBase2._getData(name2)
data = dataBase1._getSpecificData(label1)
data2 = dataBase2._getSpecificData(label2)
# Plot data
visu = Visualization()
figName = visu._plotCompare(label1, data, label2, data2, s_number, city1, city2)
return render_template('plotCompare.html', image_path =figName)
#The weather data and air quality compare page
@app.route('/plotCompareAirQuality', methods=['POST', 'GET'])
def imageAirCompare():
#Get information from the html param page (paramComapreAirQuality)
if request.method == 'POST':
result = request.form
city1 = request.form["city"]
label1 = request.form["data"]
s_number = int(request.form["sample_number"])
city2 = request.form["city2"]
label2 = "valeur"
# Check the value of sample number
if s_number < 0:
return render_template('paramCompareAirQualityAlerte.html')
#Replace "temperature_partie_decimale" by "temperature" because doesn't have the same label name
if(city1 == "Toulouse_La_Salade" and label1 == "temperature_partie_entiere"):
label1 ="temperature_en_degre_c"
#Get the number index of the selected city and set information
nameIndex = NAME_INDEX.index(city1)
output_dir1 = TOULOUSE_PATH_DB
url1 = TOULOUSE_URL_LIST[nameIndex]
name1 = TOULOUSE_NAME_LIST[nameIndex]
#Set the inforamtion for the Air quality data base
output_dir2 = AIR_PATH_DB
url2 = AIR_URL
name2 = AIR_NAME
# Create folder for data base
mkdir_p(output_dir1)
mkdir_p(output_dir2)
# Check time to download the new DB or not, update every 9min (Air Quality BD)
yearTime = timedelta(days=365)
yearTime.total_seconds()
global TIME_AIR
global TIME
if ((yearTime.total_seconds() < TIME_AIR+(9*60)) or (yearTime.total_seconds() < TIME[nameIndex]+(15*60))):
UPDATE_DB = 0
else:
UPDATE_DB = 1
TIME_AIR = yearTime.total_seconds()
TIME[nameIndex] = yearTime.total_seconds()
# Get the corresponding data bases
dataBase1 = DataBase(url1, name1, UPDATE_DB)
dataBase2 = DataBase(url2, name2, UPDATE_DB)
# Read the data bases
dataBase1._getData(name1)
dataBase2._getDataAIR(name2)
data = dataBase1._getSpecificData(label1)
data2 = dataBase2._getSpecificData(label2)
# Plot data
visu = Visualization()
figName = visu._plotCompare(label1, data, label2, data2, s_number, city1, city2)
return render_template('plotCompare.html', image_path =figName)
if __name__ == "__main__":
app.run()
|
import abc
from utilities import integration_adaptors_logger as log
import pathlib
from typing import Tuple, NamedTuple, Union
from tornado.httputil import HTTPServerRequest
from fake_spine import fake_spine_configuration
logger = log.IntegrationAdaptorsLogger(__name__)
class InboundRequest(NamedTuple):
body: str
headers: dict
class OutboundResponse(NamedTuple):
status: int
body: str
class SpineResponse(abc.ABC):
@abc.abstractmethod
def get_outbound_response(self, request: HTTPServerRequest) -> OutboundResponse:
pass
def get_inbound_request(self, request: HTTPServerRequest) -> Union[InboundRequest, None]:
return None
class SpineResponseBuilder(SpineResponse):
def __init__(self):
self.response_file_location = None
self.inbound_request_file_location = None
self.response_code = 202
self.config = fake_spine_configuration.FakeSpineConfiguration()
def override_response(self, response_file_location: str):
self.response_file_location = response_file_location
return self
def override_inbound_request(self, inbound_request_file_location: str):
self.inbound_request_file_location = inbound_request_file_location
return self
def override_response_code(self, response_code: int):
self.response_code = response_code
return self
def get_outbound_response(self, request: HTTPServerRequest) -> OutboundResponse:
response_from_file = pathlib.Path(self.config.ROOT_DIR) / "configured_responses" / self.response_file_location
return OutboundResponse(self.response_code, response_from_file.read_text())
class SpineMultiResponse(SpineResponse):
"""A class to control the response returned to the MHS depending on how many calls have been made previously"""
def __init__(self):
self.responses = []
self.current_response_count = 0
def with_ordered_response(self, response: SpineResponseBuilder):
"""
Appends a given `SpineResponse` to the list, the order of the response list reflects the order in which
this method was called
:param response: A pre-configured SpineResponse instance
:return: self
"""
self.responses.append(response)
return self
def get_outbound_response(self, request: HTTPServerRequest) -> Tuple[int, str]:
"""
Gets the response of the next `SpineResponse` object in the list, if the final response in the list has been
reached, the count will reset to the first
:return: The next `SpineResponse`
"""
response = self.responses[self.current_response_count]
self.current_response_count = (self.current_response_count + 1) % len(self.responses)
return response.get_outbound_response(request)
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), "frozenordereddict", 'VERSION.txt')) as f:
__version__ = f.read().strip()
setup(
name = "frozenordereddict",
version = __version__,
packages = find_packages(),
install_requires = [],
author = "Warren A. Smith",
author_email = "warren@wandrsmith.net",
description = "Frozen OrderedDict.",
long_description = "An immutable wrapper around an OrderedDict",
long_description_content_type = "text/plain",
license = "MIT",
keywords = "ordereddict frozendict frozenordereddict orderedfrozendict ordered frozen dict",
url = "https://github.com/wsmith323/frozenordereddict",
test_suite = "tests",
classifiers = [
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
],
)
|
from scapy.all import *
import netifaces
iface = "veth2"
imac = netifaces.ifaddresses(iface)[netifaces.AF_LINK][0]['addr']
def handler(pkt):
try:
# pkt.show2()
mac_from = pkt['Ethernet'].src
mac_to = pkt['Ethernet'].dst
ip_from = pkt['IPv6'].src
ip_to = pkt['IPv6'].dst
port_from = pkt['UDP'].sport
port_to = pkt['UDP'].dport
# ipv6 does not have checksum
org_chksum_udp = pkt['UDP'].chksum
del pkt['UDP'].chksum
pkt = pkt.__class__(bytes(pkt))
new_chksum_udp = pkt['UDP'].chksum
if mac_to == imac:
# pkt.show2()
print "server receive:", ip_from, ip_to, port_from, port_to, hex(org_chksum_udp), hex(new_chksum_udp)
sendp(Ether(src=mac_to, dst=mac_from)/IPv6(src=ip_to, dst=ip_from)/UDP(dport=port_from, sport=port_to), iface=iface, verbose=False)
print "server send:", ip_to, ip_from
except Exception as e:
print str(e)
sniff(iface=iface, prn=handler) |
from gym.envs.registration import register
__version__ = '0.0.5'
register(
id='sir-v0',
entry_point='gym_epidemic.envs.sir_single:EnvSIRMorris',
)
register(
id='sir_multi-v0',
entry_point='gym_epidemic.envs.sir_multi:EnvSIRMultiMorris',
)
|
import json
from random import randint
from datetime import datetime, timedelta
from difflib import SequenceMatcher
from flask import Flask, render_template, request
from extensions import db, AdminCode, Schedule, GuessItSession, GameRound, Definition, Player, text
class Worker():
def __init__(self, db):
self.db = db
self.messages = []
for j in range(25):
self.messages.append("")
self.currentMessage = 0
self.players = []
self.words = []
self.currentWord = 0
self.lastWord = 0
self.currentDefinition = [""]
self.currentPoints = [0]
self.active = 1
def setWords(self, words):
self.words = words
def addWord(self, word):
self.words.append(word)
def addPlayer(self, player):
self.players.append(player)
def addPoints(self):
self.currentPoints.append(1)
def getLeader(self):
return self.players[self.currentWord % len(self.players)]
def addMessage(self, msg):
self.messages[self.currentMessage % 25] = msg
self.currentMessage += 1
def setDefinition(self, definition):
self.currentDefinition.append(definition)
def getCurrentWord(self):
return self.words[self.currentWord].word.replace(" ", "").lower()
def getCurrentState(self, nickname):
try:
time = datetime.now().strftime("%Y-%m-%d %H:%M")
schedule = Schedule.query.filter(Schedule.dateHourEnd >= time).order_by(text("dateHour desc")).first()
session = GuessItSession.query.filter(GuessItSession.Schedule == schedule).one()
if schedule != None and session != None:
self.active = 1
if schedule.dateHour > time or len(self.players) == 0:
self.currentWord = 0
return "Next session starts at " + schedule.dateHour
self.words = GameRound.query.filter(GameRound.GuessItSession == session)
found = False
for i in range(self.words.count()):
if self.words[i].time > time:
if i > 0 and self.words[i - 1].time <= time:
self.currentWord = i
if self.currentWord != self.lastWord:
self.words[self.lastWord].addPoints(sum(self.currentPoints))
definition = Definition.query.filter(Definition.GameRound == self.words[self.lastWord]).one()
definition.definition = ", ".join(self.currentDefinition[1:])
if(definition.definition != ""):
db.session.commit()
self.currentDefinition = [""]
self.currentPoints = [0]
self.lastWord = self.currentWord
self.messages = []
for j in range(25):
self.messages.append("")
self.currentMessage = 0
found = True
break
if not found:
if self.active:
self.words[self.currentWord].addPoints(sum(self.currentPoints))
definition = Definition.query.filter(Definition.GameRound == self.words[self.currentWord]).one()
definition.definition = ", ".join(self.currentDefinition[1:])
if(definition.definition != ""):
db.session.commit()
self.currentDefinition = [""]
self.currentPoints = [0]
self.messages = []
for j in range(25):
self.messages.append("")
self.currentMessage = 0
self.active = 0
self.words = []
self.currentWord = 0
self.lastWord = 0
return "Session has ended"
if nickname == self.getLeader():
return json.dumps({"leader": True, "leaderName": self.getLeader(), "definition" : self.currentDefinition[len(self.currentDefinition) - 1], "word" : self.words[self.currentWord].word, "messages": self.messages})
else:
return json.dumps({"leader": False, "leaderName": self.getLeader(), "definition" : self.currentDefinition[len(self.currentDefinition) - 1], "word" : self.words[self.currentWord].shadow_word, "messages": self.messages})
else:
self.messages = []
for j in range(25):
self.messages.append("")
self.currentMessage = 0
self.words = []
self.currentWord = 0
self.lastWord = 0
self.active = 0
return "No sessions coming"
except Exception as e:
self.messages = []
for j in range(25):
self.messages.append("")
self.currentMessage = 0
self.words = []
self.currentWord = 0
self.lastWord = 0
self.active = 0
return "No sessions coming"
def create_app(config_file="settings.py"):
app = Flask(__name__)
app.config.from_pyfile(config_file)
def getShadowWord(word):
nrands = len(word) // 3
temp_word = ['_' for i in word]
for i in range(0, nrands):
while(True):
nrand = randint(0, len(word) - 1)
if temp_word[nrand] == '_':
temp_word[nrand] = word[nrand]
break
return "".join(temp_word)
@app.route('/')
def home_page():
return 'Server \'{}\' active at {}'.format(app.config.get('FLASK_APP', ''), str(datetime.now()))
@app.route('/ping')
def pong():
return 'pong'
db.init_app(app)
worker = Worker(db)
@app.route('/admin-code/<code>')
def admin_code(code):
adminCode = AdminCode.query.filter_by(code=code).first()
if adminCode is not None:
return "success"
else:
return "fail"
@app.route('/new-game-session/<session>')
def new_game_session(session):
try:
# {"dateHour": "2020-11-07 22:22", "duration": 10, "words": ["a", "b"]}
_session = json.loads(session)
dateHour = _session.get('dateHour')
now = datetime.now().strftime("%Y-%m-%d %H:%M")
# if dateHour <= now:
# raise Exception()
duration = _session.get('duration')
_words = _session.get('words')
word_time = int(duration/len(_words))
dateHourEnd = str(datetime.strptime(
dateHour, "%Y-%m-%d %H:%M") + timedelta(seconds=(word_time + duration)*60 + 10 * len(_words)))
dateHourEnd = dateHourEnd[0:len(dateHourEnd) - 3]
unavailable = Schedule.query.all()
for u in unavailable:
if not(u.dateHour > dateHourEnd or u.dateHourEnd < dateHour):
raise Exception()
if not isinstance(_words, list):
raise Exception()
schedule = Schedule(dateHour=dateHour,
duration=duration, dateHourEnd=dateHourEnd)
guessItSession = GuessItSession(Schedule=schedule)
db.session.add(schedule)
db.session.add(guessItSession)
i = 1
for word in _words:
time = str(datetime.strptime(
dateHour, "%Y-%m-%d %H:%M") + timedelta(seconds=i*word_time*60 + 10))
time = time[0:len(time) - 3]
gameRound = GameRound(
time=time, word=word, shadow_word=getShadowWord(word), GuessItSession=guessItSession)
definition = Definition(definition=None, GameRound=gameRound)
db.session.add(gameRound)
db.session.add(definition)
i += 1
db.session.commit()
return "success"
except Exception as e:
print(e)
return "fail"
@app.route('/new-player/<nickname>')
def new_player(nickname):
try:
player = Player(nickname=nickname)
worker.addPlayer(player.nickname)
db.session.add(player)
db.session.commit()
return 'success'
except:
return 'fail'
@app.route('/new-message/<message>')
def new_message(message):
try:
if not worker.active:
raise Exception("Worker not active")
# {"nickname": "nicknameeeee", "message": "messageeee"}
_message = json.loads(message)
nickname = _message.get('nickname')
msg = _message.get('message')
player = Player.query.filter_by(nickname=nickname).first()
if player is None:
raise Exception("Player not exists")
if nickname != worker.getLeader():
if msg == worker.words[worker.currentWord].word or worker.getCurrentWord() in msg.replace(" ", "").lower():
msg = "YOU GOT IT!!"
player.addPoints()
worker.addPoints()
else:
ratio = SequenceMatcher(a=worker.getCurrentWord(),b=msg.replace(" ", "").lower()).ratio()
if ratio > 0.95:
msg = "YOU GOT IT!!"
player.addPoints()
worker.addPoints()
elif ratio > 0.7:
msg = "YOU ARE CLOSE!!"
worker.addMessage({"nickname" : nickname + ': ', "msg" : msg})
db.session.commit()
else:
ratio = SequenceMatcher(a=worker.getCurrentWord(),b=msg.replace(" ", "").lower()).ratio()
if msg == worker.getCurrentWord() or worker.getCurrentWord() in msg.replace(" ", "").lower() or ratio > 0.7:
raise Exception("Leader cannot say the word")
worker.setDefinition(msg)
return msg
except Exception as e:
print(e)
return "fail"
@app.route('/get-messages/<nickname>')
def get_messages(nickname):
return worker.getCurrentState(nickname)
@app.route('/get-leaderboard')
def get_leaderboard():
return json.dumps(Player.leaderboard())
@app.route('/get-definitions')
def get_definitions():
return json.dumps(Definition.definitions())
# Adding some data to the database when the server inits
with app.app_context():
try:
db.drop_all()
db.create_all()
code1 = AdminCode(code=12345)
p1 = Player(nickname="eduardo")
p2 = Player(nickname="pedro")
p3 = Player(nickname="paulo")
p4 = Player(nickname="ponte")
p5 = Player(nickname="daniel")
worker.addPlayer(p1.nickname)
worker.addPlayer(p2.nickname)
worker.addPlayer(p3.nickname)
worker.addPlayer(p4.nickname)
worker.addPlayer(p5.nickname)
time = str(datetime.strptime(datetime.now().strftime("%Y-%m-%d %H:%M"), "%Y-%m-%d %H:%M") + timedelta(seconds=10))
time = time[0:len(time) - 3]
new_game_session('{"dateHour": "' + time + '", "duration": 3, "words": ["software", "flutter", "agile"]}')
db.session.add(code1)
db.session.add(p1)
db.session.add(p2)
db.session.add(p3)
db.session.add(p4)
db.session.add(p5)
db.session.commit()
except Exception as e:
print(e, "on init")
return app |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright © 2014 Sébastien Gross <seb•ɑƬ•chezwam•ɖɵʈ•org>
# Created: 2014-03-11
# Last changed: 2014-12-03 00:22:50
#
# This program is free software. It comes without any warranty, to
# the extent permitted by applicable law. You can redistribute it
# and/or modify it under the terms of the Do What The Fuck You Want
# To Public License, Version 2, as published by Sam Hocevar. See
# http://sam.zoy.org/wtfpl/COPYING for more details.
#
# This file is not part of Ansible
DOCUMENTATION = '''
---
module: system_install
author: Sebastien Gross
version_added: "1.0"
short_description: Install system using C(debootstrap)
description:
- Partition hard drive.
- Install minimal system using c(debootstrap).
- Install minimal packages if specified.
- Install kernel.
- Install grub.
notes:
- Works ONLY for Debian and Ubuntu systems.
- Use setup-storage which is a part of FAI
- WILL ERASE ALL DATA ON THE HARD DRIVE. You have been warned!
options:
path:
required: false
default: '/srv/target'
description:
- Path of the target directory. This is where the system would
be installed.
arch:
required: false
default: 'amd64'
description:
- target architecture to install.
distro:
required: false
default: 'trusty'
description:
- distribution to install.
repository:
required: false
default: 'http://archive.ubuntu.com/ubuntu'
description:
- source repository to use with C(debootstrap).
partition:
required: false
default: None
description:
- C(setup-storage) file to use for partitionning. Use "auto" for
auto partitionning.
kernel:
required: false
default: None
description:
- use "yes" if you want to install kernel in chroot.
root_passwd:
required: false
default: 'Ansible'
description:
- password for root user to be set after installation is complete.
reboot:
required: false
default: False
description:
- reboot server after installation.
'''
import traceback
# added to stave off future warnings about apt api
import warnings
import os
import sys
import subprocess
import atexit
import json
import re
import shutil
import datetime
import fileinput
# import module snippets
from ansible.module_utils.basic import *
class SystemInstall(AnsibleModule):
PSEUDO_FS = ['proc', 'dev', 'dev/pts' ]
FAI_FSTAB = '/tmp/fai/fstab'
FAI_DISK_VAR = '/tmp/fai/disk_var.sh'
DISTRO = {
'debian': {
'components': 'main,contrib,non-free',
'kernel': [ 'ipxe', 'firmware-linux-free',
'firmware-linux-nonfree', 'firmware-bnx2' ],
'kernel-arch' : [ 'linux-image-%s' ],
'dists': [ 'squeeze', 'wheezy', 'jessie', 'sid' ],
},
'ubuntu': {
'components': 'main,restricted,universe,multiverse',
'kernel': [ 'linux-image-generic', 'grub-ipxe' ],
'dists': [ 'precise', 'saucy', 'trusty', 'utopic', 'vivid' ],
},
}
PARTITIONS = [
{ 'min': 0, 'file': 'partitions-0Gb' },
{ 'min': 4 * 1024**3, 'file': 'partitions-4Gb' },
{ 'min': 18 * 1024**3, 'file': 'partitions-18Gb' },
]
def __init__(self):
AnsibleModule.__init__(
self,
argument_spec=dict(
path=dict(required=False, default='/srv/target'),
arch=dict(required=False, default='amd64'),
distro=dict(required=False, default='trusty'),
repository=dict(required=False, default='http://archive.ubuntu.com/ubuntu'),
extra_packages=dict(required=False, default=None),
kernel=dict(required=False, default=None),
partition=dict(required=False, default=None),
root_passwd=dict(required=False, default='Ansible'),
reboot=dict(required=False, type='bool', default=False),
),
supports_check_mode=True)
self.changed = False
self.msg = []
self.stdout = []
self.stderr = []
self.fstab = None
self.logfile = open('/tmp/system-install.log', 'w')
self.log('Starting install')
for v in self.DISTRO.keys():
if self.params['distro'] in self.DISTRO[v]['dists']:
self.vendor = v
break
if not os.path.isdir(self.params['path']):
os.makedirs(self.params['path'])
self.log('Set install root to %s' % self.params['path'])
if not self.params['extra_packages'] is None:
if self.params['extra_packages'].startswith('['):
self.params['extra_packages'] = \
self.params['extra_packages'][1:-1].split(', ')
else:
self.params['extra_packages'] = [ self.params['extra_packages'] ]
self.tmp_path = os.path.dirname(sys.argv[0])
def log(self, msg):
sep = ' '
if isinstance(msg, str):
msg = [ msg ]
sep = ' '
for m in msg:
self.logfile.write('%s%s%s\n' % (
datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S'),
sep, m))
self.logfile.flush()
def install_pkgs(self, pkgs, chroot=True, update_cache=False):
"""Install PKGS on current host. If CHROOT is TRUE, the installation is
performed on the PATH directory using C(chroot). The packages would be
installed on the live system otherwise.
If PKGS is a list, a loop is performed for each item.
"""
if chroot:
chroot_cmd = [ 'chroot', self.params['path'] ]
target = self.params['path']
else:
chroot_cmd =[]
target = '/'
check_cmd = chroot_cmd + [ 'dpkg-query', '-W', '--showformat',
'${Version}' ]
install_cmd = chroot_cmd + ['apt-get', 'install', '--force-yes',
'-y', '--no-install-recommends']
os.putenv('DEBIAN_FRONTEND', 'noninteractive')
os.putenv('DEBIAN_PRIORITY', 'critical')
if update_cache:
self.log('Updating APT cache')
cmd = chroot_cmd + [ 'apt-get', 'update' ]
rc, out, err = self.run_command(cmd)
if rc != 0:
self.fail_json(msg="%s failed: %s" %
(' '.join(pipes.quote(arg) for arg in cmd), err),
stdout=out, stderr=err)
self.log(out.splitlines())
if isinstance(pkgs, str):
pkgs = [ pkgs ]
for p in pkgs:
cmd = check_cmd + [ p ]
rc, _out, _err = self.run_command(cmd)
if _out != '':
self.msg.append("Package %s is installed in %s" % (p, target))
else:
self.log('Installing packages: %s' % ','.join(pkgs))
cmd = install_cmd + [ p ]
rc, _out, _err = self.run_command(cmd)
if rc != 0:
self.fail_json(msg="%s failed: %s" %
(' '.join(pipes.quote(arg) for arg in cmd), _err),
stdout=_out, stderr=_err)
self.log(_out.splitlines())
self.changed = True
self.msg.append("%s installed in %s" % (p, target))
self.stdout.append(_out)
self.stderr.append(_err)
def get_file_content(self, path, default=None):
data = default
if os.path.exists(path) and os.access(path, os.R_OK):
data = open(path).read().strip()
if len(data) == 0:
data = default
try:
return int(data)
except:
return data
def find_hard_drive(self):
"""Returns a list of all detected hard drives on the target system."""
cmd = [ '%s/setup-storage/fai-disk-info' % self.tmp_path ]
(rc, out, err) = self.run_command(cmd)
if rc != 0:
self.log('Searching for harddrive')
self.fail_json(msg="%s failed: %s" %
(' '.join(pipes.quote(arg) for arg in cmd), err),
stdout=out, stderr=err)
self.log(out.splitlines())
drives = out.splitlines()
self.drives = []
for drive in drives:
_new = {
'name': drive,
'sector_size': self.get_file_content(
'/sys/block/%s/queue/hw_sector_size' % drive, 512),
'size': self.get_file_content(
'/sys/block/%s/size' % drive, 0)
}
_new['physical_size'] = _new['sector_size'] * _new['size']
self.drives.append(_new)
return self.drives
def partition_hard_drive(self):
self.install_pkgs(
['liblinux-lvm-perl', 'libparse-recdescent-perl', 'lvm2',
'xfsprogs', 'mdadm', 'parted'],
chroot=False, update_cache=True)
partition_conf = self.params['partition']
if partition_conf == 'auto':
self.find_hard_drive()
p_conf = self.PARTITIONS
p_conf.sort(key=lambda k: k['min'], reverse=True)
for p in p_conf:
if self.drives[0]['physical_size'] >= p['min']:
partition_conf = p['file']
break
if partition_conf == 'auto':
self.fail_json(msg="Could not find automatic partition."
" Please confider to provide one.")
## LVM partition have to be manualy removed since setup-storage
## cannot remove existing LVM partitions:
# https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=717684
# https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=681358
self.log('Removing old LVM partitions')
for c in [
'''lvs --noheadings | awk '{print "/dev/" $2 "/" $1}' '''
'''| xargs --no-run-if-empty lvremove -f''',
'''pvs --noheadings | awk '{print $1}' | '''
'''xargs --no-run-if-empty pvremove -f -f -y''']:
rc, out, err = self.run_command(c, use_unsafe_shell=True)
if rc != 0:
self.fail_json(msg="%s failed: %s\n\nPlease check that partitions are not in use (umount them is necessary)" % (c, err),
stdout=out, stderr=err)
self.log(out.splitlines())
cwd = os.getcwd()
os.chdir('%s/setup-storage' % self.tmp_path)
cmd = [ './setup-storage', '-d', '-X', '-f', 'conf.d/%s' % partition_conf ]
os.chmod('fai-disk-info', stat.S_IEXEC)
os.chmod('fai-vol_id', stat.S_IEXEC)
cmd = [ 'perl', './setup-storage', '-d', '-X', '-f', 'conf.d/%s' % partition_conf ]
# setup-storage parses {pv|vg|lv}display output.
os.putenv('LC_ALL', 'C')
self.log('Running setup-storage -X -f conf.d/%s' % partition_conf)
(rc, out, err) = self.run_command(cmd)
if rc != 0:
self.fail_json(msg="%s failed: %s" %
(' '.join(pipes.quote(arg) for arg in cmd), err),
stdout=out, stderr=err)
self.log(out.splitlines())
os.chdir(cwd)
def parse_fstab(self):
'''Parse fai-generated fstab and store into object's fstab entry.'''
fs = open(self.FAI_FSTAB, 'r')
lines = fs.read().splitlines()
line_re = re.compile(''.join([
'^\s*(?P<device>\S*)',
'\s+(?P<directory>\S+)',
'\s+(?P<fstype>\S+)',
'\s+(?P<options>\S+)',
'\s+(?P<dump>\d+)',
'\s+(?P<fsck>\d+)',
'\s*$']))
self.fstab = []
for line in lines:
sline = line.strip()
if sline == '' or sline.startswith('#'):
continue
match = line_re.match(line)
if match:
_m = {}
_m.update((attr, match.group(attr))
for attr in ['device', 'directory', 'fstype',
'options', 'dump', 'fsck'])
self.fstab.append(_m)
def write_fstab(self):
'''Write SELF.FSTAB to target directory'''
if self.fstab is None:
self.parse_fstab
dest = open('%s/etc/fstab' % self.params['path'], 'w')
dest.write('# fstab file generated by ansible system-install\n\n')
for fs in self.fstab:
# Do no keep root filesystem if it is a tmpfs.
if fs['device'] == 'tmpfs' and fs['directory'] == '/' \
and fs['fstype'] == 'tmpfs':
continue
dest.write('%s\t%s\t%s\t%s\t%s\t%s\n'
% (
fs['device'],
fs['directory'],
fs['fstype'],
fs['options'],
fs['dump'],
fs['fsck']))
dest.close()
def mount_fs(self):
fstab = self.fstab
fstab.sort(key=lambda k: k['directory'])
for f in fstab:
if f['directory'] == 'none':
continue
_target = '%s/%s' % (self.params['path'], f['directory'])
if not os.path.isdir(_target):
os.makedirs(_target)
if os.path.ismount(_target):
continue
if f['fstype'] == 'tmpfs':
cmd = [ 'mount', '-t', 'tmpfs', 'tmpfs', _target ]
else:
cmd = [ 'mount', f['device'], _target ]
self.log(' Mounting %s to %s' % (f['device'], _target))
rc, out, err = self.run_command(cmd)
if rc:
self.fail_json(
msg="%s failed: %s" %
(' '.join(pipes.quote(arg) for arg in cmd), err),
stdout=out, stderr=err)
self.log(out.splitlines())
def mount_pseudo(self):
for p in self.PSEUDO_FS:
_target = '%s/%s' % (self.params['path'], p)
if not os.path.isdir(_target):
os.makedirs(_target)
if os.path.ismount(_target):
continue
cmd = ['mount', '-o', 'bind', '/%s' % p , _target]
self.log('Mounting pseudo FS %s to %s' % (p, _target))
rc, out, err = self.run_command(cmd)
if rc:
self.fail_json(
msg="%s failed: %s" %
(' '.join(pipes.quote(arg) for arg in cmd), err),
stdout=out, stderr=err)
self.log(out.splitlines())
def umount_pseudo(self):
for p in self.PSEUDO_FS[::-1]:
_target = '%s/%s' % (self.params['path'], p)
if not os.path.ismount(_target):
continue
cmd = ['umount', '-l', _target]
self.log('Umounting pseudo FS %s' % _target)
rc, out, err = self.run_command(cmd)
if rc:
self.module.fail_json(
msg="%s failed: %s" %
(' '.join(pipes.quote(arg) for arg in cmd), err),
stdout=out, stderr=err)
self.log(out.splitlines())
def umount_fs(self):
fstab = self.fstab
fstab.sort(key=lambda k: k['directory'], reverse=True)
for p in fstab:
_target = '%s/%s' % (self.params['path'], p['directory'])
if not os.path.ismount(_target):
continue
cmd = ['umount', '-l', _target]
self.log('Umounting FS %s' % _target)
rc, out, err = self.run_command(cmd)
if rc:
self.module.fail_json(
msg="%s failed: %s" %
(' '.join(pipes.quote(arg) for arg in cmd), err),
stdout=out, stderr=err)
self.log(out.splitlines())
def mount(self):
self.mount_fs()
self.mount_pseudo()
def umount(self):
if self.fstab is None:
self.parse_fstab()
self.umount_pseudo()
self.umount_fs()
def partition(self):
self.partition_hard_drive()
self.parse_fstab()
self.mount_fs()
def debootstrap(self):
self.install_pkgs(['debootstrap'], chroot=False)
debootstrap = self.get_bin_path('debootstrap', required=True)
if os.path.exists('%s/sbin/start-stop-daemon' % self.params['path']):
self.msg.append("debootstrap seems to have been run in %s" \
% (self.params['path']))
return
self.log('Running debootstrap')
cmd = [debootstrap, '--arch', self.params['arch'],
'--components', self.DISTRO[self.vendor]['components'],
self.params['distro'], self.params['path'],
self.params['repository'] ]
rc, out, err = self.run_command(cmd)
if rc:
self.fail_json(msg="%s failed: %s" %
(' '.join(pipes.quote(arg) for arg in cmd), err),
stdout=out, stderr=err)
self.log(out.splitlines())
self.changed = True
self.msg.append("debootstrap succeded in %s" % (self.params['path']))
def setup_policy(self):
self.log('Setup policy-rc.d')
policy_rc_d = open('%s/usr/sbin/policy-rc.d' % self.params['path'], 'w')
policy_rc_d.write('#!/bin/sh\n\nexit 101')
policy_rc_d.close()
os.chmod('%s/usr/sbin/policy-rc.d' % self.params['path'], 0o755)
def remove_policy(self):
self.log('Remove policy-rc.d')
os.unlink('%s/usr/sbin/policy-rc.d' % self.params['path'])
def install_extra_packages(self):
self.setup_policy()
self.install_pkgs(self.params['extra_packages'], chroot=True)
self.remove_policy()
def install_kernel(self):
self.setup_policy()
self.mount_pseudo()
self.install_pkgs(self.DISTRO[self.vendor]['kernel'], chroot=True)
if self.DISTRO[self.vendor].has_key('kernel-arch'):
for p in self.DISTRO[self.vendor]['kernel-arch']:
self.install_pkgs(p % self.params['arch'], chroot=True)
# if partition is not defined, the installation has been made in a
# chroot. Then we do not want to install grub in boot device.
if self.params['partition']:
line_re = re.compile('.*\${(.+):-"?(.+?)"?}.*')
self.disk_vars = {}
with open(self.FAI_DISK_VAR, 'r') as fh:
for line in fh.readlines():
match = line_re.match(line)
if match:
self.disk_vars[match.group(1)] = match.group(2)
fh.close()
# remove 'set -e' in /usr/sbin/grub-mkconfig to prevent error:
# failed: mount: special device tmpfs does not exist
# when / is a tmpfs
# Got that issue at least until grub 2.02~beta2-9
set_re=re.compile('^(set -e)$')
for line in fileinput.input(
'%s/%s' % (self.params['path'], '/usr/sbin/grub-mkconfig'),
inplace = 1):
sys.stdout.write(set_re.sub(
r"# \1 # grub-mkconfig does not work well if / is tmpfs",
line))
# First generate GRUB configuration
cmd = [ 'chroot', self.params['path'], 'update-grub']
self.log('running update-grub')
rc, out, err = self.run_command(cmd)
if rc:
self.fail_json(msg="%s failed: %s" %
(' '.join(pipes.quote(arg) for arg in cmd), err),
stdout=out, stderr=err)
self.log(out.splitlines())
self.log(err.splitlines())
# secondly install grub into boot device
cmd = [ 'chroot', self.params['path'], 'grub-install',
self.disk_vars['BOOT_DEVICE'] ]
self.log('running grub-install %s' % self.disk_vars['BOOT_DEVICE'])
rc, out, err = self.run_command(cmd)
if rc:
self.fail_json(msg="%s failed: %s" %
(' '.join(pipes.quote(arg) for arg in cmd), err),
stdout=out, stderr=err)
self.log(out.splitlines())
self.log(err.splitlines())
self.msg.append("grub-install succeded in %s" %
(self.disk_vars['BOOT_DEVICE']))
self.stdout.append(out)
self.stderr.append(err)
self.umount_pseudo()
self.remove_policy()
def setup_root_passwd(self):
cmd = 'echo "root:%s" | chroot %s chpasswd' % \
( self.params['root_passwd'], self.params['path'] )
rc, out, err = self.run_command(cmd, use_unsafe_shell=True)
self.log('Update root password')
if rc:
self.fail_json(msg="%s failed: %s" %
(' '.join(pipes.quote(arg) for arg in cmd), err),
stdout=out, stderr=err)
self.log(out.splitlines())
self.msg.append('root password succeded')
def fix_install(self):
self.write_fstab()
with open('%s/etc/network/interfaces' % self.params['path'], 'w') as iface:
iface.write('\n'.join(
[ '# Auto generated file by Ansible system-install',
'',
'auto lo',
'iface lo inet loopback',
'',
'auto eth0',
'iface eth0 inet dhcp',
'# EOF']))
iface.close
def reboot(self):
self.log('Reboot server in 10s')
print self.jsonify({'changed': self.changed, 'failed': False, 'msg': 'Rebooting in 10s'})
rc, out, err = self.run_command('( sleep 10 && reboot --force ) &', use_unsafe_shell=True)
if rc:
self.fail_json(msg="%s failed: %s" %
(' '.join(pipes.quote(arg) for arg in cmd), err),
stdout=out, stderr=err)
self.log(out.splitlines())
self.msg.append('reboot in progress succeded')
sys.exit(0)
def main():
si = SystemInstall()
if not si.params['partition'] is None:
si.partition()
si.debootstrap()
si.install_extra_packages()
if not si.params['kernel'] is None:
si.install_kernel()
si.setup_root_passwd()
si.fix_install()
#
if not(si.params['reboot'] is False):
si.umount()
si.reboot()
si.exit_json(changed=si.changed,
msg='\r\n'.join(si.msg),
stdout='\r\n'.join(si.stdout),
stderr='\r\n'.join(si.stderr))
main()
|
#!/usr/bin/env python
# coding: utf-8
# In[173]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from scipy.stats import pearsonr as pr
data = pd.read_csv('../input/train.csv')
pr(data.Fare, data.Pclass)
plt.style.use('bmh')
plt.xlabel('Age')
plt.ylabel('Survived')
plt.title('Age vs Survival')
plt.hist(data.Age[(np.isnan(data.Age) == False)], bins= 15, alpha = 0.4, color = 'r', label = 'Before')
plt.hist(data.Age[(np.isnan(data.Age) == False) & (data.Survived == 1)], bins= 15, alpha = 0.4, color = 'b', label = 'After')
#plt.hist(data.Age[data.Age != np.NaN])
plt.legend(loc = 'upper right')
plt.show()
# In[181]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from scipy.stats import pearsonr as pr
data = pd.read_csv('../input/train.csv')
pr(data.Fare, data.Pclass)
plt.style.use('bmh')
plt.xlabel('No. of Sibling/Spouse')
plt.ylabel('Survived')
plt.title('SibSp vs Survival')
plt.hist(data.SibSp, label = 'before', alpha = 0.4, color = 'b')
plt.hist(data.SibSp[data['Survived'] == 1], label = 'after', alpha = 0.4, color = 'r')
plt.legend(loc = 'upper right')
plt.show()
# In[ ]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from scipy.stats import pearsonr as pr
data = pd.read_csv('../input/train.csv')
pr(data.Fare, data.Pclass)
plt.style.use('bmh')
plt.xlabel('Sex')
plt.ylabel('Survived')
plt.title('Sex vs Survived')
plt.hist(data.Sex, color = 'b', alpha = 0.4, label = 'before')
plt.hist(data[data['Survived'] == 1].Sex, color = 'r', alpha = 0.4, label = 'after')
plt.legend(loc = 'upper center')
plt.show()
# In[64]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from scipy.stats import pearsonr as pr
data = pd.read_csv('../input/train.csv')
pr(data.Fare, data.Pclass)
plt.style.use('bmh')
plt.xlabel('Class')
plt.ylabel('Fare')
plt.title('Class vs Fare')
data_to_plot = [data[data['Pclass'] == 1].Fare.values, data[data['Pclass'] == 2].Fare.values, data[data['Pclass'] == 3].Fare.values]
plt.boxplot(data_to_plot)
plt.xticks([1,2,3], ['First', 'Second', 'Third'])
plt.show()
# In[189]:
#Exploration
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import os
data = pd.read_csv('../input/train.csv')
plt.style.use('bmh')
plt.xlabel("PClass")
plt.xticks([1,2,3], ['First', 'Second', 'Third'])
plt.title("Survival vs PClass")
plt.hist(data.Pclass, color = 'b', alpha = 0.4, label = 'before')
plt.hist(data.Pclass[data.Survived == 1], color = 'r', alpha = 0.4, label = 'after')
plt.legend(loc = 'upper left')
plt.show()
# In[165]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import os
data = pd.read_csv('../input/train.csv')
plt.style.use('bmh')
data.describe()
#data.SibSp.value_counts()
# In[188]:
import numpy as np
import pandas as pd
from sklearn.tree import DecisionTreeClassifier
from sklearn.model_selection import GridSearchCV
import os
#Preparing Training Data
data = pd.read_csv('../input/train.csv')
data.Age.fillna(value = np.round(np.mean(data.Age)), inplace = True)
input_data = pd.get_dummies(data.drop(['Cabin', 'Survived', 'Name', 'Ticket', 'PassengerId'], axis = 1))
target_data = data['Survived']
#Preparing Test Data
test_data = pd.read_csv('../input/test.csv')
test_data['Age'].fillna(value = np.mean(test_data['Age']), inplace = True)
test_data['Fare'].fillna(value = np.mean(test_data['Fare']), inplace = True)
input_test_data = pd.get_dummies(test_data.drop(['Cabin', 'Name', 'Ticket', 'PassengerId'], axis = 1))
#model
param_grid = {'max_features': [0.3, 0.5, 0.7, 1.0]}
dt = DecisionTreeClassifier()
model = GridSearchCV(dt, param_grid, cv = 5)
model.fit(input_data, target_data)
survived = model.predict(input_test_data)
p_id = test_data['PassengerId']
result_data = pd.DataFrame({'Survived': survived}, index = p_id)
result_data.to_csv('output_dt.csv')
print("Decision Tree Accuracy (5-fold CV): ", model.best_score_)
# In[ ]:
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
@author Nathan Cruz <nathan.cruz@unifesp.br>
@created 14/04/14
"""
from __future__ import unicode_literals
from __future__ import print_function
from datetime import date
from Employee import Employee
class SalariedEmployee(Employee):
# Atributos #
__weeklySalary = 0
# Construtor #
def __init__(self, first, last, ssn, birthDate, departmentCode, salary):
super(SalariedEmployee, self).__init__(first, last, ssn,
birthDate, departmentCode)
self.setWeeklySalary(salary)
# Getters #
def getWeeklySalary(self):
return self.__weeklySalary
# Setters #
def setWeeklySalary(self, weeklySalary):
self.__weeklySalary = weeklySalary
# Calcula os ganhos do funcionario #
def earnings(self):
hj = date.today()
pai = super(SalariedEmployee, self)
if hj.month == pai.getPrivateBirthDate().month:
return self.getWeeklySalary() + 150
return self.getWeeklySalary()
# Representação textual dos objetos #
def __str__(self):
pai = super(SalariedEmployee, self)
return '''salaried employee: {0}
weekly salary: {1}'''.format(pai.__str__(),
self.getWeeklySalary())
def __repr__(self):
return self.__str__()
|
from django.template.loader import render_to_string
import requests
import datetime
from collections import defaultdict
from bs4 import BeautifulSoup
from pom.bldg_info import *
url = 'http://clusters-lamp.princeton.edu/cgi-bin/clusterinfo.pl'
url_alt = 'http://www.princeton.edu/clusters/printer_list/'
PRINTER_BLDGS = {
'1901': 'LAUGH',
'1937': '1937H',
'1981': 'HARGH',
'Blair': 'BLAIR',
'Bloomberg_315': 'BLOOM',
'Brown': 'BROWN',
'Brush_Gallery': 'JADWH',
'Butler_D033': '1976H',
# Butler Apts
'Campus_Club': 'CCAMP',
'CJL': 'CENJL',
'Dod': 'DODHA',
'Edwards': 'EDWAR',
'Fields_Cntr': 'CFCTR',
'Firestone': 'FIRES',
'Fisher_213': 'FISHH',
'Forbes': 'FORBC',
'Forbes_Lib': 'FORBC',
'Foulke': 'FOULK',
'Friend_016': 'FRIEN',
'Friend_017': 'FRIEN',
'Frist_200': 'FRIST',
'Frist_300': 'FRIST',
'Grad_College': 'GRADC',
# Hibben
'Holder_B11': 'HOLDE',
'Holder_B31': 'HOLDE',
'Lauritzen_409': 'HARGH',
# Lawrence_1
# Lawrence_14
'Little_North': 'LITTL',
'Little_South': 'LITTL',
'Madison': 'MADIH',
'McCosh_B59': 'MCCOS',
'New_GC': 'GRADC',
'Pyne': 'PYNEH',
'Scully_269': 'SCULL',
'Scully_309': 'SCULL',
'Spelman': 'SPELM',
'Whitman_Lib': 'HARGH',
'Wilcox': 'WILCH',
'Witherspoon': 'WITHR',
'Wright': 'PATTN',
'Wu': 'WILCH'
}
class Printer:
def __init__(self, bldg, room, loc):
self.bldg = str(bldg)
self.room = str(room)
self.loc = str(loc)
self.statuses = []
def add_status(self, color, status):
self.statuses.append((str(color), str(status)))
def __str__(self):
return "%s: %s" % (self.loc, self.status)
__repr__ = __str__
####
# The following functions are common to all modules in pom.scrape
# We may want to put them in a class for OO-programming purposes
####
def get_bldgs():
return tuple(PRINTER_BLDGS.values())
def scrape():
'''returns dict of list of printers, bldg_code:[printers]'''
timestamp = datetime.datetime.now()
resp = requests.get(url)
bs = BeautifulSoup(resp.content)
table = bs.find('table')
rows = table.find_all('tr')[1:]
clusters = defaultdict(list)
for row in rows:
ps = row.find_all('p')
loc = ps[0].contents[0][:-1].rstrip('*')
bldg = ps[1].contents[0][:-1]
room = ps[2].contents[0][:-1]
statusTag = ps[3]
if loc in PRINTER_BLDGS:
code = PRINTER_BLDGS[loc]
else:
continue
p = Printer(bldg, room, loc)
for font_tag in statusTag.find_all('font'):
try:
status = font_tag.contents[0]
color = font_tag.attrs['color']
except:
continue
p.add_status(color, status)
clusters[code].append(p)
return (timestamp, clusters)
def render(scraped=None):
if not scraped:
scraped = scrape()
timestamp, printer_mapping = scraped
printer_list = [(bldg_code, BLDG_INFO[bldg_code][0], printers) \
for bldg_code, printers in printer_mapping.items()]
printer_list = sorted(printer_list, key=lambda tup: tup[1])
html = render_to_string('pom/data_printers.html',
{'printers' : printer_list})
return {'timestamp': timestamp.strftime("%B %e, %l:%M %p"),
'html': html}
|
# -*- coding: utf-8 -*-
"""
Created on Sat Apr 6 19:55:37 2019
snp_scanner.py - reads a user-defined raw genome file from [myheritage, 23andme]
and compares the user's snps with a set of lists kept with the script and originally obtained from SNPedia / the polyphasic community
@author: ratheka
"""
import csv
import os
import re
class SNP_Scanner:
def __init__(self):
self.dict_of_known_snps = {}
self.user_gene_data_list = []
self.user_gene_data = {}
self.snp_file_regex = re.compile(r'((\w+)_related_snps.txt)')
self.file_name = ""
self.user_results = {}
for file in os.listdir('./'):
file_check = self.snp_file_regex.search(file)
if file_check:
with open(file_check.group(0), 'r') as opened_file:
self.dict_of_known_snps[file_check.group(2)] = opened_file.readlines()
for key in self.dict_of_known_snps.keys():
for snp_list_entry in self.dict_of_known_snps[key]:
if snp_list_entry == None:
self.dict_of_known_snps[key].remove(snp_list_entry)
continue #Can't guarantee the input files are clean
def read_user(self):
while True:
filename = input('What is the filename of the genetic data to read?')
if not os.path.isfile(filename):
print('I\'m sorry, but I can\'t seem to locate {}.'.format(filename))
continue
else:
self.file_name = filename
break
with open(filename, 'r') as opened_user_data_file:
reader = csv.reader(opened_user_data_file)
self.user_gene_data_list = list(reader)
return
def prepare_data(self):
rsid_regex = re.compile(r'(?i)((r|g)(s)|i)\d+')
base_pair_regex = re.compile(r'(?i)[^\\][A|T|C|G|D|I|-]+')
for line in self.user_gene_data_list:
line_str = str(line).lower()
line_str = line_str.replace(r"\t", " ")
if '#' in line_str:
continue
if 'RSID' in line_str:
continue
rsid_result = rsid_regex.search(line_str)
if rsid_result:
rsid_base_pair = base_pair_regex.search(line_str)
self.user_gene_data[rsid_result.group()] = rsid_base_pair.group()
for key in self.dict_of_known_snps:
for i in range(len(self.dict_of_known_snps[key])):
self.dict_of_known_snps[key][i] = self.dict_of_known_snps[key][i].lower().rstrip()
return
def scan_genes(self):
user_rsids_set = set(self.user_gene_data.keys())
for key in self.dict_of_known_snps.keys():
snp_set = set(self.dict_of_known_snps[key])
self.user_results[key] = list(snp_set.intersection(user_rsids_set))
return
def issue_reports(self):
for key in self.user_results.keys():
with open(self.file_name + '_' + str(key) + '_results.txt', 'w') as f:
for snp_list in self.user_results.values():
for snp in snp_list:
f.write(snp + '\t' + self.user_gene_data[snp] + '\n')
def main():
scanner = SNP_Scanner()
scanner.read_user()
scanner.prepare_data()
scanner.scan_genes()
scanner.issue_reports()
main()
|
import numpy as np
import os
import scipy
from scipy.io import wavfile
from datetime import datetime
import General_Configs
base_dir = General_Configs.base_dir
tmp_folder = base_dir + "TMPGEN/"
dataset_folder = base_dir + "Datasets/"
resources_dir = base_dir + "resources/"
dx_vst = resources_dir + "mda_DX10.vst"
# dx_vst = resources_dir = "Dexed.vst"
midi_fl = resources_dir + "midi_export.mid"
generator = resources_dir + "mrswatson"
def base_command(generator, vst, midi, param, flname):
ret = "{} --channels 1 --quiet --plugin \"{}\" --midi-file {} {} --output \"{}\"".format(generator, vst, midi, param, flname)
return ret
def single_generate(ind_array, file_name):
""" Generate a single file
Args:
ind_array: an 1-D array with the length of the prameter set
Returns:
The wavefile in a numpy array of int16
"""
param_set = ""
for x in range(1, len(ind_array) + 1):
param_set += "--parameter {},{} ".format(str(x), ind_array[x - 1])
cmd = base_command(generator, dx_vst, midi_fl, param_set, file_name)
os.system.__call__(cmd)
|
import matplotlib.pyplot as plt
import numpy as np
import peakutils
class Waveform:
"""Represents a guitar waveform
Arguments:
array - An array containing the values of the oscilloscope read"""
def __init__(self, times, array, fs, v_scale, h_scale, name):
array = np.array(array)
self._fs = fs
self._samples_n = array.size
self._times = np.array(times) * h_scale
self._amplitudes = array * v_scale / 5
self._name = name
@property
def name(self):
return self._name
@name.setter
def name(self, name):
self._name = name
_dft = np.array([])
def dft(self, force=False):
if force or self._dft.size == 0:
self._dft = np.fft.fft(self._amplitudes) / self._samples_n
self._dft = self._dft[range(int(self._samples_n / 2))]
return self._dft
_abs_dft_freq = np.array([])
_abs_dft = np.array([])
def abs_dft(self, force=False):
if force or not self._abs_dft.size or not self._abs_dft_freq.size:
frq = np.arange(self._samples_n) * self._fs / self._samples_n
self._abs_dft_freq = frq[range(int(self._samples_n / 2))]
self._abs_dft = abs(self.dft())
return self._abs_dft_freq, self._abs_dft
_normalized_abs_dft_freq = np.array([])
_normalized_abs_dft = np.array([])
def normalized_abs_dft(self, force=False, cutoff_threshold=1500):
if force or not self._normalized_abs_dft.any() or not \
self._normalized_abs_dft.size:
n_abs_dft = self.abs_dft()[1] / np.amax(self.abs_dft()[1])
n_abs_dft_freq = self.abs_dft()[0]
if cutoff_threshold:
i = 0
for val in n_abs_dft_freq:
if val > cutoff_threshold:
break
else:
i += 1
self._normalized_abs_dft = n_abs_dft[0:i]
self._normalized_abs_dft_freq = n_abs_dft_freq[0:i]
else:
self._normalized_abs_dft = n_abs_dft
self._normalized_abs_dft_freq = n_abs_dft_freq
return self._normalized_abs_dft_freq, self._normalized_abs_dft
_peaks = np.array([])
def ft_peaks(self, force=False, min_dist=10, thres=0.7):
if force or not self._peaks.size:
self._peaks = \
peakutils.indexes(self.abs_dft(force=force)[1],
min_dist=min_dist, thres=thres)
return self._peaks
def plot(self, x_unc=0, y_unc=0):
if x_unc == 0 and y_unc == 0:
plt.plot(self._times, self._amplitudes)
else:
plt.errorbar(self._times, self._amplitudes,
xerr=x_unc, yerr=y_unc, ecolor='r',
capsize=1.5, linewidth=1)
def plot_ft(self, length=100, ax=None, zs=0):
x, y = self.abs_dft()
if not ax:
plt.plot(x[0:length], y[0:length])
else:
ax.plot(x[0:length], y[0:length], zs=zs, zdir='y')
def plot_normalized_ft(self, length=100, ax=None, zs=0):
x, y = self.normalized_abs_dft()
if not ax:
plt.plot(x[0:length], y[0:length])
else:
ax.plot(x[0:length], y[0:length], zs=zs, zdir='y')
|
#IVAN GASTELUM
#COURSE 2302 DATA STRUCTURES
#INSTRUCTOR DR. OLAC FUENTES , TA: ANINDITA
#DATE LAST MODIFICATION 3/11/2019
import numpy as np
import matplotlib.pyplot as plt
import math
#NUMBER 1: GRAPHING BST with all nodes after inserting
class BST(object):
# Constructor
def __init__(self, item, left=None, right=None):
self.item = item
self.left = left
self.right = right
#Inserting method will be the only method to use for this
def Insert(T,newItem):
if T == None:
T = BST(newItem)
elif T.item > newItem:
T.left = Insert(T.left,newItem)
else:
T.right = Insert(T.right,newItem)
return T
#Recursive method to plot lines and Tree values
def Draw_Tree(ax,p,n,w,t,T):
if T is not None:
if T.left is not None:
p[0,0] = p[1,0] - w #fisrt point goes to the left direction
p[0,1] = p[1,1] - t #first point goes down in y direction
p[2,0] = p[1,0] #second point goes to the right direction
p[2,1] = p[1,1] #second point goes down in y direction
ax.plot(p[:,0],p[:,1],color='k')
plt.text(p[1,0] - w,p[1,1] - t, T.left.item, bbox={"boxstyle":"circle","color":"grey"})
q = np.array([[0,0],[p[0,0],p[0,1]],[0,0]])
r = np.array([[0,0],[p[2,0],p[2,1]],[0,0]])
Draw_Tree(ax,q,n-1,w/2,t,T.left)
if T.right is not None:
p[0,0] = p[1,0] #fisrt point goes to the left direction
p[0,1] = p[1,1] #first point goes down in y direction
p[2,0] = p[1,0] + w #second point goes to the right direction
p[2,1] = p[1,1] - t #second point goes down in y direction
ax.plot(p[:,0],p[:,1],color='k')
plt.text(p[1,0] + w,p[1,1] - t, T.right.item, bbox={"boxstyle":"circle","color":"grey"})
q = np.array([[0,0],[p[0,0],p[0,1]],[0,0]])
r = np.array([[0,0],[p[2,0],p[2,1]],[0,0]])
Draw_Tree(ax,r,n-1,w/2,t,T.right)
#Declaring Tree and inserting values in same order as in sheet from lab 3
T = None
A = [10,4,15,2,8,12,18,1,3,5,9,7]
#Search_Iterative(T,key)
for a in A:
T = Insert(T,a)
plt.close("all")
origin = np.array([[0,0],[0,0],[0,0]]) #Origin will have the three points needed to create one branch of two roots
n=3 #Number of recursive calls
w=100 #the width of the graph in the x cooridnate
t=200/n #the length of the graph in the y coordinate
fig, ax = plt.subplots()
plt.text(origin[0,0],origin[1,1], T.item, bbox={"boxstyle":"circle","color":"grey"})
Draw_Tree(ax,origin,n,w,t,T)
ax.set_aspect(1.0)
ax.axis('off')
plt.show()
fig.savefig('BST Tree.png') |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Generate .h file for TCG code generation.
"""
__author__ = "Lluís Vilanova <vilanova@ac.upc.edu>"
__copyright__ = "Copyright 2012-2017, Lluís Vilanova <vilanova@ac.upc.edu>"
__license__ = "GPL version 2 or (at your option) any later version"
__maintainer__ = "Stefan Hajnoczi"
__email__ = "stefanha@linux.vnet.ibm.com"
from tracetool import out, Arguments
import tracetool.vcpu
def vcpu_transform_args(args):
assert len(args) == 1
return Arguments([
args,
# NOTE: this name must be kept in sync with the one in "tcg_h"
# NOTE: Current helper code uses TCGv_env (CPUArchState*)
("TCGv_env", "__tcg_" + args.names()[0]),
])
def generate(events, backend, group):
if group == "root":
header = "trace-root.h"
else:
header = "trace.h"
out('/* This file is autogenerated by tracetool, do not edit. */',
'/* You must include this file after the inclusion of helper.h */',
'',
'#ifndef TRACE_%s_GENERATED_TCG_TRACERS_H' % group.upper(),
'#define TRACE_%s_GENERATED_TCG_TRACERS_H' % group.upper(),
'',
'#include "exec/helper-proto.h"',
'#include "%s"' % header,
'',
)
for e in events:
# just keep one of them
if "tcg-exec" not in e.properties:
continue
out('static inline void %(name_tcg)s(%(args)s)',
'{',
name_tcg=e.original.api(e.QEMU_TRACE_TCG),
args=tracetool.vcpu.transform_args("tcg_h", e.original))
if "disable" not in e.properties:
args_trans = e.original.event_trans.args
args_exec = tracetool.vcpu.transform_args(
"tcg_helper_c", e.original.event_exec, "wrapper")
if "vcpu" in e.properties:
trace_cpu = e.args.names()[0]
cond = "trace_event_get_vcpu_state(%(cpu)s,"\
" TRACE_%(id)s)"\
% dict(
cpu=trace_cpu,
id=e.original.event_exec.name.upper())
else:
cond = "true"
out(' %(name_trans)s(%(argnames_trans)s);',
' if (%(cond)s) {',
' gen_helper_%(name_exec)s(%(argnames_exec)s);',
' }',
name_trans=e.original.event_trans.api(e.QEMU_TRACE),
name_exec=e.original.event_exec.api(e.QEMU_TRACE),
argnames_trans=", ".join(args_trans.names()),
argnames_exec=", ".join(args_exec.names()),
cond=cond)
out('}')
out('',
'#endif /* TRACE_%s_GENERATED_TCG_TRACERS_H */' % group.upper())
|
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Amir Mohammadi <amir.mohammadi@idiap.ch>
# Wed 20 July 16:20:12 CEST 2016
#
"""
Very simple tests for Implementations
"""
from pathlib import Path
import bob.bio.base
from bob.bio.base.config.dummy.database import database as dummy_database
from bob.bio.base.database import CSVDatabase
from bob.pipelines import DelayedSample, SampleSet
DATA_DIR = Path(__file__).parent / "data"
def test_all_samples():
all_samples = dummy_database.all_samples(groups=None)
assert len(all_samples) == 400
assert all([isinstance(s, DelayedSample) for s in all_samples])
assert len(dummy_database.all_samples(groups=["train"])) == 200
assert len(dummy_database.all_samples(groups=["dev"])) == 200
assert len(dummy_database.all_samples(groups=[])) == 400
def test_atnt():
database = bob.bio.base.load_resource(
"atnt", "database", preferred_package="bob.bio.base"
)
train_set = database.background_model_samples()
assert len(train_set) > 0
assert isinstance(train_set[0], DelayedSample)
references = database.references()
assert len(references) > 0
references_sset = references[0]
assert isinstance(references_sset, SampleSet)
assert hasattr(references_sset, "key")
assert hasattr(references_sset, "subject_id")
assert hasattr(references_sset, "template_id")
references_sample = references_sset.samples[0]
assert isinstance(references_sample, DelayedSample)
assert hasattr(references_sample, "key")
probes = database.probes()
assert len(probes) > 0
assert isinstance(probes[0], SampleSet)
assert isinstance(probes[0].samples[0], DelayedSample)
all_samples = database.all_samples()
assert len(all_samples) > 0
assert isinstance(all_samples[0], DelayedSample)
def test_metadata():
local_protocol_definition_path = DATA_DIR / "example_csv_filelist"
database = CSVDatabase(
name="dummy_metadata",
protocol="protocol_only_dev_metadata",
dataset_protocols_path=local_protocol_definition_path,
templates_metadata=["subject_metadata"],
)
references_sset = database.references()[0]
assert hasattr(references_sset, "subject_metadata")
references_sample = references_sset.samples[0]
assert hasattr(references_sample, "sample_metadata")
probes_sset = database.probes()[0]
assert hasattr(probes_sset, "subject_metadata")
probes_sample = probes_sset.samples[0]
assert hasattr(probes_sample, "sample_metadata")
|
"""
This type stub file was generated by pyright.
"""
import vtkmodules.vtkRenderingCore as __vtkmodules_vtkRenderingCore
class vtkOpenGLRenderer(__vtkmodules_vtkRenderingCore.vtkRenderer):
"""
vtkOpenGLRenderer - OpenGL renderer
Superclass: vtkRenderer
vtkOpenGLRenderer is a concrete implementation of the abstract class
vtkRenderer. vtkOpenGLRenderer interfaces to the OpenGL graphics
library.
"""
def Clear(self):
"""
V.Clear()
C++: void Clear(void) override;
Clear the image to the background color.
"""
...
def DeviceRender(self):
"""
V.DeviceRender()
C++: void DeviceRender(void) override;
Concrete open gl render method.
"""
...
def DeviceRenderOpaqueGeometry(self, vtkFrameBufferObjectBase):
"""
V.DeviceRenderOpaqueGeometry(vtkFrameBufferObjectBase)
C++: void DeviceRenderOpaqueGeometry(
vtkFrameBufferObjectBase *fbo=nullptr) override;
Overridden to support hidden line removal.
"""
...
def DeviceRenderTranslucentPolygonalGeometry(self, vtkFrameBufferObjectBase):
"""
V.DeviceRenderTranslucentPolygonalGeometry(
vtkFrameBufferObjectBase)
C++: void DeviceRenderTranslucentPolygonalGeometry(
vtkFrameBufferObjectBase *fbo=nullptr) override;
Render translucent polygonal geometry. Default implementation
just call UpdateTranslucentPolygonalGeometry(). Subclasses of
vtkRenderer that can deal with depth peeling must override this
method.
"""
...
def GetDepthPeelingHigherLayer(self):
"""
V.GetDepthPeelingHigherLayer() -> int
C++: int GetDepthPeelingHigherLayer()
Is rendering at translucent geometry stage using depth peeling
and rendering a layer other than the first one? (Boolean value)
If so, the uniform variables UseTexture and Texture can be set.
(Used by vtkOpenGLProperty or vtkOpenGLTexture)
"""
...
def GetEnvMapIrradiance(self):
"""
V.GetEnvMapIrradiance() -> vtkPBRIrradianceTexture
C++: vtkPBRIrradianceTexture *GetEnvMapIrradiance()
Get environment textures used for image based lighting.
"""
...
def GetEnvMapLookupTable(self):
"""
V.GetEnvMapLookupTable() -> vtkPBRLUTTexture
C++: vtkPBRLUTTexture *GetEnvMapLookupTable()
Get environment textures used for image based lighting.
"""
...
def GetEnvMapPrefiltered(self):
"""
V.GetEnvMapPrefiltered() -> vtkPBRPrefilterTexture
C++: vtkPBRPrefilterTexture *GetEnvMapPrefiltered()
Get environment textures used for image based lighting.
"""
...
def GetLightingComplexity(self):
"""
V.GetLightingComplexity() -> int
C++: virtual int GetLightingComplexity()
"""
...
def GetLightingCount(self):
"""
V.GetLightingCount() -> int
C++: virtual int GetLightingCount()
"""
...
def GetLightingUniforms(self):
"""
V.GetLightingUniforms() -> string
C++: const char *GetLightingUniforms()
"""
...
def GetNumberOfGenerationsFromBase(self, string):
"""
V.GetNumberOfGenerationsFromBase(string) -> int
C++: vtkIdType GetNumberOfGenerationsFromBase(const char *type)
override;
Given a the name of a base class of this class type, return the
distance of inheritance between this class type and the named
class (how many generations of inheritance are there between this
class and the named class). If the named class is not in this
class's inheritance tree, return a negative value. Valid
responses will always be nonnegative. This method works in
combination with vtkTypeMacro found in vtkSetGet.h.
"""
...
def GetNumberOfGenerationsFromBaseType(self, string):
"""
V.GetNumberOfGenerationsFromBaseType(string) -> int
C++: static vtkIdType GetNumberOfGenerationsFromBaseType(
const char *type)
Given a the name of a base class of this class type, return the
distance of inheritance between this class type and the named
class (how many generations of inheritance are there between this
class and the named class). If the named class is not in this
class's inheritance tree, return a negative value. Valid
responses will always be nonnegative. This method works in
combination with vtkTypeMacro found in vtkSetGet.h.
"""
...
def GetState(self):
"""
V.GetState() -> vtkOpenGLState
C++: vtkOpenGLState *GetState()
"""
...
def GetUserLightTransform(self):
"""
V.GetUserLightTransform() -> vtkTransform
C++: vtkTransform *GetUserLightTransform()
Set the user light transform applied after the camera transform.
Can be null to disable it.
"""
...
def HaveApplePrimitiveIdBug(self):
"""
V.HaveApplePrimitiveIdBug() -> bool
C++: bool HaveApplePrimitiveIdBug()
Indicate if this system is subject to the Apple/AMD bug of not
having a working glPrimitiveId <rdar://20747550>. The bug is
fixed on macOS 10.11 and later, and this method will return false
when the OS is new enough.
"""
...
def HaveAppleQueryAllocationBug(self):
"""
V.HaveAppleQueryAllocationBug() -> bool
C++: static bool HaveAppleQueryAllocationBug()
Indicate if this system is subject to the apple/NVIDIA bug that
causes crashes in the driver when too many query objects are
allocated.
"""
...
def IsA(self, string):
"""
V.IsA(string) -> int
C++: vtkTypeBool IsA(const char *type) override;
Return 1 if this class is the same type of (or a subclass of) the
named class. Returns 0 otherwise. This method works in
combination with vtkTypeMacro found in vtkSetGet.h.
"""
...
def IsDualDepthPeelingSupported(self):
"""
V.IsDualDepthPeelingSupported() -> bool
C++: bool IsDualDepthPeelingSupported()
Dual depth peeling may be disabled for certain runtime
configurations. This method returns true if
vtkDualDepthPeelingPass will be used in place of
vtkDepthPeelingPass.
"""
...
def IsTypeOf(self, string):
"""
V.IsTypeOf(string) -> int
C++: static vtkTypeBool IsTypeOf(const char *type)
Return 1 if this class type is the same type of (or a subclass
of) the named class. Returns 0 otherwise. This method works in
combination with vtkTypeMacro found in vtkSetGet.h.
"""
...
def NewInstance(self):
"""
V.NewInstance() -> vtkOpenGLRenderer
C++: vtkOpenGLRenderer *NewInstance()
"""
...
def ReleaseGraphicsResources(self, vtkWindow):
"""
V.ReleaseGraphicsResources(vtkWindow)
C++: void ReleaseGraphicsResources(vtkWindow *w) override;
"""
...
def SafeDownCast(self, vtkObjectBase):
"""
V.SafeDownCast(vtkObjectBase) -> vtkOpenGLRenderer
C++: static vtkOpenGLRenderer *SafeDownCast(vtkObjectBase *o)
"""
...
def SetEnvironmentTexture(self, vtkTexture, bool):
"""
V.SetEnvironmentTexture(vtkTexture, bool)
C++: void SetEnvironmentTexture(vtkTexture *texture,
bool isSRGB=false) override;
Overriden in order to connect the texture to the environment map
textures.
"""
...
def SetUserLightTransform(self, vtkTransform):
"""
V.SetUserLightTransform(vtkTransform)
C++: void SetUserLightTransform(vtkTransform *transform)
Set the user light transform applied after the camera transform.
Can be null to disable it.
"""
...
def UpdateLightingUniforms(self, vtkShaderProgram):
"""
V.UpdateLightingUniforms(vtkShaderProgram)
C++: void UpdateLightingUniforms(vtkShaderProgram *prog)
"""
...
def UpdateLights(self):
"""
V.UpdateLights() -> int
C++: int UpdateLights(void) override;
Ask lights to load themselves into graphics pipeline.
"""
...
def __delattr__(self, *args, **kwargs):
""" Implement delattr(self, name). """
...
def __getattribute__(self, *args, **kwargs):
""" Return getattr(self, name). """
...
def __init__(self, *args, **kwargs) -> None:
...
@staticmethod
def __new__(*args, **kwargs):
""" Create and return a new object. See help(type) for accurate signature. """
...
def __repr__(self, *args, **kwargs):
""" Return repr(self). """
...
def __setattr__(self, *args, **kwargs):
""" Implement setattr(self, name, value). """
...
def __str__(self, *args, **kwargs) -> str:
""" Return str(self). """
...
__this__ = ...
Directional = ...
Headlight = ...
LightingComplexityEnum = ...
NoLighting = ...
Positional = ...
__dict__ = ...
__vtkname__ = ...
|
old_string="abdd"
string_list = list(old_string)
string_list[2] = "c"
new_string = "".join(string_list)
print(new_string) |
# from fractions import gcd
from math import gcd
k = int(input())
ans = 0
for i in range(1, k + 1):
for j in range(i + 1, k + 1):
ans += gcd(i, j)
for t in range(j + 1, k + 1):
ans += gcd(gcd(i, j), t)
ans *= 6
ans += sum([i for i in range(1, k + 1)])
print(ans)
|
import config
import numpy as np
import json
import re
import helper_functions
import mysql.connector
import db_config
print('connected to db')
for URL in config.subforums:
forum_name = URL.split('/')[-1].replace('.html', '')
href_list = helper_functions.get_all_thread_hrefs(URL, helper_functions.get_subforum_pages(URL))
print('number of threads: ' + str(len(href_list)))
list_to_upload = [(href_list[i], forum_name) for i in range(0, len(href_list))]
helper_functions.upload_csv_to_mysql('thread_hrefs', forum_name, list_to_upload)
##TODO figure out how to save to the MySQL server
print('done')
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Feb 23 21:09:02 2018
@author: shubham
"""
for _ in range(int(input())):
s = input()
countA = 0
countB = 0
countA+=s.count('A')
countB+=s.count('B')
for i in range(len(s)):
if s[i] == 'A':
A = []
for j in range (i+1,len(s)):
if s[j] == 'A':
countA += len(A)
break
elif s[j] == 'B':
A.clear()
break
else:
A.append(s[j])
if s[i] == 'B':
B = []
for j in range (i+1,len(s)):
if s[j] == 'B':
countB += len(B)
break
elif s[j] == 'A':
B.clear()
break
else:
B.append(s[j])
print (countA,countB) |
from unittest import TestCase
from DeckOfCards.suit import Suit
class SuitTests(TestCase):
def test_init(self):
suit = Suit(3)
self.assertEqual(suit.get_suit_value(), 3)
def test_get_value(self):
suit = Suit(0)
self.assertEqual(suit.get_suit_value(), 0)
def test_get_suit_from_value(self):
suit = Suit(1)
self.assertEqual(suit.get_suit_from_value(1), Suit.Diamond)
|
# coding: utf-8
import tensorflow as tf
from module import IS_from_logits
def dPPOc(act, policy_logits, behavior_logits, advantage, clip):
ros = IS_from_logits(
policy_logits=policy_logits,
act=act,
behavior_logits=behavior_logits)
# old_policy_logits = tf.stop_gradient(old_policy_logits)
# act_space = get_shape(policy_logits)[-1]
# act_onehot = tf.one_hot(act, depth=act_space, dtype=tf.float32)
# p = tf.reduce_sum(tf.nn.softmax(policy_logits) * act_onehot, axis=-1)
# old_p = tf.maximum(tf.reduce_sum(tf.nn.softmax(old_policy_logits) * act_onehot, axis=-1), 1e-8)
# ros = p / old_p
neg_loss = advantage * ros
if clip is not None:
ros_clip = tf.clip_by_value(ros, 1.0 - clip, 1.0 + clip)
neg_loss = tf.minimum(neg_loss, advantage * ros_clip)
loss = - neg_loss
return loss
|
from typing import List
class Solution:
def twoSum(self, numbers: List[int], target: int) -> List[int]:
small = 0
large = len(numbers)-1
while large > small:
if numbers[small] + numbers[large] > target:
large -= 1
elif numbers[small] + numbers[large] < target:
small += 1
else:
break
return [small+1, large+1]
|
"""empty message
Revision ID: e91a8ebd12d4
Revises: 43cfdf0f229c
Create Date: 2018-08-05 17:24:09.706201
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e91a8ebd12d4'
down_revision = '43cfdf0f229c'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('amenity', 'hotel_id',
existing_type=sa.INTEGER(),
nullable=False)
op.add_column('cuisine', sa.Column('desc', sa.Text(), nullable=True))
op.add_column('cuisine', sa.Column('featured', sa.Boolean(), nullable=True))
op.add_column('cuisine', sa.Column('image', sa.String(), nullable=True))
op.alter_column('deal', 'room_id',
existing_type=sa.INTEGER(),
nullable=False)
op.alter_column('deal', 'website_id',
existing_type=sa.INTEGER(),
nullable=False)
op.alter_column('dish', 'restaurant_id',
existing_type=sa.INTEGER(),
nullable=False)
op.alter_column('facility', 'room_id',
existing_type=sa.INTEGER(),
nullable=False)
op.alter_column('image', 'hotel_id',
existing_type=sa.INTEGER(),
nullable=False)
op.alter_column('member', 'room_id',
existing_type=sa.INTEGER(),
nullable=False)
op.alter_column('menu', 'restaurant_id',
existing_type=sa.INTEGER(),
nullable=False)
op.add_column('restaurant', sa.Column('locality', sa.String(), nullable=True))
op.alter_column('restaurant_amenity', 'restaurant_id',
existing_type=sa.INTEGER(),
nullable=False)
op.alter_column('restaurant_image', 'restaurant_id',
existing_type=sa.INTEGER(),
nullable=False)
op.alter_column('room', 'hotel_id',
existing_type=sa.INTEGER(),
nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('room', 'hotel_id',
existing_type=sa.INTEGER(),
nullable=True)
op.alter_column('restaurant_image', 'restaurant_id',
existing_type=sa.INTEGER(),
nullable=True)
op.alter_column('restaurant_amenity', 'restaurant_id',
existing_type=sa.INTEGER(),
nullable=True)
op.drop_column('restaurant', 'locality')
op.alter_column('menu', 'restaurant_id',
existing_type=sa.INTEGER(),
nullable=True)
op.alter_column('member', 'room_id',
existing_type=sa.INTEGER(),
nullable=True)
op.alter_column('image', 'hotel_id',
existing_type=sa.INTEGER(),
nullable=True)
op.alter_column('facility', 'room_id',
existing_type=sa.INTEGER(),
nullable=True)
op.alter_column('dish', 'restaurant_id',
existing_type=sa.INTEGER(),
nullable=True)
op.alter_column('deal', 'website_id',
existing_type=sa.INTEGER(),
nullable=True)
op.alter_column('deal', 'room_id',
existing_type=sa.INTEGER(),
nullable=True)
op.drop_column('cuisine', 'image')
op.drop_column('cuisine', 'featured')
op.drop_column('cuisine', 'desc')
op.alter_column('amenity', 'hotel_id',
existing_type=sa.INTEGER(),
nullable=True)
# ### end Alembic commands ###
|
#!/usr/bin/python
import base64
import sys
import urllib2
import json
import time
import os
import argparse
import shutil
from subprocess import call, check_output
parser = argparse.ArgumentParser()
parser.add_argument("image", type=str, help="Docker image name")
parser.add_argument("slave", type=str, help="Slave name")
parser.add_argument("port", type=str, help="SSH Port to expose from container")
parser.add_argument("jenkins", type=str, help="Jenkins to connect to",
nargs='?', default="factory.couchbase.com")
parser.add_argument("--ccache-dir", type=str, help="Host directory to mount as ~/.ccache")
parser.add_argument("--no-workspace", action="store_true", help="Skip mounting /home/couchbase/jenkins")
parser.add_argument("--no-std-mounts", action="store_true", help="Skip mounting /buildteam /latestbuilds /releases")
parser.add_argument("--mount-docker", action="store_true", help="Mount docker.sock")
parser.add_argument("--mount-dir", type=str, help="Mount local directories",
nargs="+")
args = parser.parse_args()
image = args.image
slave = args.slave
port = args.port
jenkins = args.jenkins
mount_dirs = args.mount_dir
if mount_dirs is None:
mount_dirs = []
devnull = open(os.devnull, "w")
# Auth token is now required for all jenkins.
# Environment variables jenkins_user & jenkins_token need to be defined
# jenkins_user: user's github account that is used to log into jenkins
# jenkins_token: personal access token generated via https://github.com/settings/tokens
auth_required = True
# If auth is required, check we have the expected env vars (or die)
if auth_required:
if not os.environ.get('jenkins_user') or not os.environ.get('jenkins_token'):
print("Authentication required for '{0}'".format(jenkins))
print("Ensure jenkins_user and jenkins_token environment variables are populated")
exit(1)
# See if Jenkins thinks the slave is connected
print "Seeing if {1} is connected to Jenkins master '{0}'...".format(jenkins, slave)
slaveurl = 'http://{0}/computer/{1}/api/json?tree=offline,executors[idle],oneOffExecutors[idle]'
while True:
if auth_required:
request = urllib2.Request(slaveurl.format(jenkins, slave))
base64string = base64.b64encode('%s:%s' % (os.environ.get('jenkins_user'), os.environ.get('jenkins_token')))
request.add_header("Authorization", "Basic %s" % base64string)
response = urllib2.urlopen(request)
else:
# no auth needed
response = urllib2.urlopen(slaveurl.format(jenkins, slave))
slavedata = json.load(response)
# If slave is "offline", fine. Otherwise, if ALL executors are "idle", fine.
if (slavedata["offline"]):
break
executors = slavedata["executors"] + slavedata["oneOffExecutors"]
if (not (False in [x["idle"] for x in executors])):
break
print "Slave {0} is currently busy, waiting 30 seconds...".format(slave)
time.sleep(30)
# See if slave is running locally
print "Checking if {0} is running locally...".format(slave)
result = call(["docker", "inspect", slave], stdout=devnull, stderr=devnull)
if result == 0:
print "Killing {0}".format(slave)
output = check_output(["docker", "rm", "-f", slave])
if output.strip() != slave:
print "Stopped slave had wrong name, but continuing to start new..."
# Create/empty slave Jenkins directory.
slave_dir = "/home/couchbase/slaves/{0}".format(slave)
print "Emptying local slave directory {0}...".format(slave_dir)
if os.path.isdir(slave_dir):
for root, dirs, files in os.walk(slave_dir, topdown=False):
os.chmod(root, 0o777)
for name in files:
os.remove(os.path.join(root, name))
for name in dirs:
path = os.path.join(root, name)
if (os.path.islink(path)):
os.remove(path)
else:
os.rmdir(path)
# Check out the Docker network situation
output = check_output(["docker", "network", "ls", "--format", "{{ .Name }}"])
networks = output.split("\n")
if not "jenkins-slaves" in networks:
print "Creating 'jenkins-slaves' Docker network..."
output = check_output(["docker", "network", "create", "jenkins-slaves"])
# Start constructing the big "docker run" command
run_args = [
"docker", "run", "--name={0}".format(slave), "--detach=true",
"--sysctl=net.ipv6.conf.lo.disable_ipv6=0",
"--privileged",
"--restart=unless-stopped",
"--net=jenkins-slaves",
"--publish={0}:22".format(port),
"--volume=/home/couchbase/reporef:/home/couchbase/reporef",
"--volume=/etc/localtime:/etc/localtime",
"--volume=/etc/timezone:/etc/timezone",
"--volume=/home/couchbase/jenkinsdocker-ssh:/ssh"
]
if not args.no_std_mounts:
run_args.extend([
"--volume=/buildteam:/buildteam",
"--volume=/san/latestbuilds:/latestbuilds",
"--volume=/san/releases:/releases"
])
if not args.no_workspace:
run_args.append(
"--volume=/home/couchbase/slaves/{0}:/home/couchbase/jenkins".format(slave)
)
if args.mount_docker:
run_args.append(
"--volume=/var/run/docker.sock:/var/run/docker.sock"
)
if args.ccache_dir is not None:
if not os.path.isdir(args.ccache_dir):
os.makedirs(args.ccache_dir)
run_args.append(
"--volume={0}:/home/couchbase/.ccache".format(args.ccache_dir)
)
for mount in mount_dirs:
(dir, path) = mount.split(':')
if not os.path.isdir(dir):
os.makedirs(dir)
run_args.append(
"--volume={0}:{1}".format(dir, path)
)
run_args.extend([
"--ulimit=core=-1",
image,
"default"
])
# Finally, create new slave container.
print "Creating new {0} container...".format(slave)
output = check_output(run_args)
print "Result: {0}".format(output)
|
try :
from numpy import *
isnumeric=False
except :
from Numeric import *
isnumeric=True
import re
import sys
#from string import split
class madtable:
"""
import a mad table, examples:
from pymadtable import *
t=madtable('twiss.lhcb1.x1y5.data')
t.select(t.pattern("IP"),t.name,t.s)
t.select(t.range("IP"),t.name,t.s)
t.select(t.rmul(t.range('IP5','MQY.*R5'), t.radd(t.pattern('^MQ'),[i-1 for i in t.pattern('^MQ')]) ),t.name,t.s-t.s[t.elem('IP5')])
"""
dict={}
def __init__(self, filename, regexp=None):
self.data={}
self.descs={}
self.idx={}
self.lines=-1
if (regexp): c=re.compile(regexp, re.IGNORECASE)
for line in open(filename):
f=line.split()
if (f[0] == '@'): # descriptor lines
self.descs[f[1]]=self.conv(f[2],f[3])
elif ( f[0] == '*'): # self.labels lines
f.pop(0) ; self.labels=f
for l in self.labels: self.data[l]=[]
elif (f[0] == '$'): # type lines
f.pop(0) ; types=f
else : # data lines
if (regexp):
if (c.search(line)): save=True
else : save=False
else : save=True
if (save):
self.lines=self.lines+1
f=map(self.conv,types,f)
# print self.labels
for l in self.labels:
d=f.pop(0)
self.data[l].append(d)
if (l == 'NAME'):
self.idx[d]=self.lines
# End of file
# for l in self.descs.keys():
# setattr(self,l.lower(),self.descs[l])
# setattr(self,l,self.descs[l])
for l in self.labels:
if (not (isnumeric and (l == 'NAME') )) :
self.data[l]=array(self.data[l])
setattr(self,l.lower(),self.data[l])
setattr(self,l,self.data[l])
self.__class__.dict[id(self.data[l])]=l
def conv(self,t,i) :
if ('e' in t): i=float(i)
if ( ('s' in t) and (i[0]=='"') ): i=i[1:-1]
if ('d' in t): i=int(i)
return i
def pattern(self,regexp,*a) :
"""
t.name[t.pattern("MQ")]
t.name[t.pattern("^IP1$")]
t.name[t.pattern(1.387,t.s)]
"""
if len(a)==1 :
col=a[0]
else :
col=self.name
regexp=str(regexp)
c=re.compile(regexp, re.IGNORECASE)
idx=[]
for i in xrange(0,len(col)) :
c.search(str(col[i])) and idx.append(i)
return idx
def name2idx(self,name) :
"""
t.name[t.name2idx(["MQXC.3L5.B2","MQXB.B2L5.B2"])]
"""
idx=[ self.idx[str(s)] for s in name]
return idx
def elem(self,ra,*a) :
"""
t.s[t.elem("MQXC.3L5.B2")]
t.name[t.elem(483.937625784,t.s)]
"""
if len(a)==1 :
col=a[0]
else :
col=self.name
ra=str(ra).lower()
idx=[]
for i in xrange(0,len(col)) :
if (str(col[i]).lower() == ra) :
idx.append(i)
return idx
def range(self,ra,rb,*a) :
"""
t.name[t.range("MQT.13L5.B2","MS.13L5.B2")]
"""
if len(a)==1 :
col=a[0]
else :
col=self.name
ra=str(ra).lower()
rb=str(rb).lower()
c=re.compile(ra, re.IGNORECASE)
d=re.compile(rb, re.IGNORECASE)
try:
ida=self.pattern(ra)[0]
idb=self.pattern(rb)[0]
return range(ida,idb+1)
except IndexError:
return []
def nrange(self,sa,sb,*a) :
"""
t.name[t.nrange(1,7)]
t.name[t.nrange(100,200,t.betx)]
"""
if len(a)==1 :
col=a[0]
else :
col=self.s
idx=[]
for i in xrange(0,len(col)) :
if (col[i] >= sa) and (col[i] <= sb) :
idx.append(i)
return idx
def cpattern(self,regexp) :
"""
t.cpattern("BET")
array(t.cpattern("BET"))[:,t.nrange(0,7)]
"""
c=re.compile(regexp, re.IGNORECASE)
col = [getattr(self,l) for l in self.labels if c.search(l) ]
return col
def lpattern(self,regexp) :
"""
t.lpattern("BET")
"""
c=re.compile(regexp, re.IGNORECASE)
lab = [l for l in self.labels if c.search(l) ]
return lab
def lab2col(self,lab) :
"""
t.lab2col(t.lpattern("BET"))
"""
col=[getattr(self,l) for l in lab]
return col
def select(self,*a) :
"""
t.select("MQ","BET")
t.select(["MQT.13L5.B2", "MCBH.13L5.B2"],["betx", "bety"])
t.select([1,3],[t.betx, t.bety])
"""
if len(a)==0 :
a1=""
a2=self.name
elif len(a)==1 :
a1=a[0]
a2=self.name
elif len(a)>1 :
a1=a[0]
a2=a[1]
idx=[]
try :
if (type(a1) is type("")) :
for a in a1.split() :
idx.extend(self.pattern(a))
elif type(a1) is type([]):
if a1 :
if type(a1[0]) is type("") :
idx=self.name2idx(a1)
elif type(a1[0]) is type(0) :
idx=a1
except :
print "Unexpected error:", sys.exc_info()[0]
idx=[]
col=[]
try :
if (type(a2) is type("")) :
for a in a2.split() :
col.extend(self.cpattern(a))
elif type(a2) is type([]):
if a2 :
if ( type(a2[0]) is type("") ) :
col=self.lab2col(a2)
elif ( type(a2[0]) is type(array([])) ) :
col=a2
except :
print "Unexpected error:", sys.exc_info()[0]
return idx,col
def extract(self,*a) :
"""
t.extract("MQ","BET")
t.extract("MQ",["NAME","S"])
t.extract("MQ",[t.name,t.s])
"""
ind,col=self.select(*a)
a=array(col).transpose()
a=a[ind,:]
return a
def show(self,*a) :
"""
t.show("MQ","BET")
"""
ind,col=self.select(*a)
labels=[]
for i in col:
try :
labels.append(self.__class__.dict[id(i)])
except KeyError:
labels.append("USER")
s="%-"+str(15)+"s "
s=s*len(col)
a=array(col)
print s % tuple(labels)
for i in ind:
print s % tuple(a[:,i])
return
def write(self,*a) :
"""
"""
file=a.pop()
ind,col=self.select(*a)
a=array( col )
a=a[ind,:]
return a
def rsort(self,a):
u={}
for i in a: u[i]=True
a=u.keys()
a.sort()
return a
def radd(self,a,b) :
u=a+b
u=self.rsort(u)
return u
def rmul(self,a,b) :
u=[]
for i in a :
try :
b.index(i)
u.append(i)
except ValueError:
pass
return u
class envelope:
"""
find aperture given a survey and a twiss table, example:
s1=madtable('survey.lhcb1.data')
s2=madtable('survey.lhcb2.data')
t1=madtable('twiss.lhcb1.data')
t2=madtable('twiss.lhcb2.data')
ap1=aperture(s1,t1)
ap2=aperture(s1,t1)
hold(False)
figure(figsize=(6,6))
hold(True)
plot(ap1.co[:,2],ap1.co[:,0])
plot(ap1.xp[:,2],ap1.xp[:,0],'g',linewidth=.1)
plot(ap1.yp2D[:,2],ap1.yp2D[:,0],'r',linewidth=.1)
plot(ap1.xm[:,2],ap1.xm[:,0],'g',linewidth=.1)
plot(ap1.ym2D[:,2],ap1.ym2D[:,0],'r',linewidth=.1)
axis([-10000,10000,-15000,5000])
t1.select(t1.pattern("IP1"),t1.name,t1.s,ap1.xsize,ap1.ysize)
savefig('ring.eps',dpi=600)
"""
# kbeta=1.1, # beta beating
# nsigma=9.5, # 9.5
# emit=3.75E-6, #3.75E-6
# delta=1.129E-4, # RMS energy spread
# tol=4.6E-3, # CO=3mm + dtol=1.6mm
# deltamax=8E-4, # for chromaticity measurment
# betamaxarc=180, # maximum beta in the arcs
# dxmaxarc=2, # maximum beta in the arc
def __init__(self,s,t, kbeta=1.1, nsigma=9.5, nemit=3.75E-6, delta=1.129E-4, tol=4.6E-3, deltamax=8E-4, betamaxarc=180, dxmaxarc=2, gamma=7000):
self.co=zeros([len(s.x),3],float)
self.xp=zeros([len(s.x),3],float)
self.xm=zeros([len(s.x),3],float)
self.yp=zeros([len(s.x),3],float)
self.ym=zeros([len(s.x),3],float)
self.yp2D=zeros([len(s.x),3],float)
self.ym2D=zeros([len(s.x),3],float)
self.xsize=zeros(len(s.x),float)
self.ysize=zeros(len(s.x),float)
for i in xrange(len(s.x)):
vro = array([s.x[i],s.y[i],s.z[i]])
theta,phi,psi = s.theta[i],s.phi[i],s.psi[i]
betx,bety,dx,dy,x,y= t.betx[i],t.bety[i],t.dx[i],t.dy[i],t.x[i],t.y[i]
thetam=array([[cos(theta) , 0,sin(theta)],
[ 0, 1, 0],
[-sin(theta), 0,cos(theta)]])
phim= array([[ 1, 0, 0],
[ 0,cos(phi) , sin(phi)],
[ 0,-sin(phi) , cos(phi)]])
psim= array([[ cos(psi), -sin(psi), 0],
[ sin(psi), cos(psi), 0],
[ 0, 0, 1]])
wm=matrixmultiply(thetam,matrixmultiply(phim,psim))
ex=matrixmultiply(wm,array([1,0,0]))
ey=matrixmultiply(wm,array([0,1,0]))
self.co[i]=vro+x * ex + y * ey
emit=nemit/gamma
dx+= dxmaxarc*sqrt(betx/betamaxarc)
dy+= dxmaxarc*sqrt(bety/betamaxarc)
xsize=kbeta* (nsigma*sqrt(betx*emit + (dx*delta)**2) + deltamax*dx)+ tol
ysize=kbeta* (nsigma*sqrt(bety*emit + (dy*delta)**2) + deltamax*dx)+ tol
self.xp[i]=self.co[i] + xsize * ex
self.xm[i]=self.co[i] - xsize * ex
self.yp[i]=self.co[i] + ysize * ey
self.ym[i]=self.co[i] - ysize * ey
self.yp2D[i]=self.co[i] + ysize * ex
self.ym2D[i]=self.co[i] - ysize * ex
self.xsize[i]=xsize
self.ysize[i]=ysize
if __name__ == '__main__':
s1=madtable('survey.lhcb1.data')
s2=madtable('survey.lhcb2.data')
t1=madtable('twiss.lhcb1.data')
t2=madtable('twiss.lhcb2.data')
ap1=aperture(s1,t1)
ap2=aperture(s1,t1)
hold(False)
figure(figsize=(6,6))
hold(True)
plot(ap1.co[:,2],ap1.co[:,0])
plot(ap1.xp[:,2],ap1.xp[:,0],'g',linewidth=.1)
plot(ap1.yp2D[:,2],ap1.yp2D[:,0],'r',linewidth=.1)
plot(ap1.xm[:,2],ap1.xm[:,0],'g',linewidth=.1)
plot(ap1.ym2D[:,2],ap1.ym2D[:,0],'r',linewidth=.1)
axis([-10000,10000,-15000,5000])
savefig('ring.eps',dpi=600)
plot(s2.z,s2.x)
if __name__ == '__main__':
import sys
usage = 'Usage: %s madtable' % sys.argv[0]
# try:
# infilename = sys.argv.pop()
# except:
# print usage; sys.exit(1)
x=madtable('twiss.lhcb1.data')
y=madtable('twiss.lhcb1.data',r'IP.*')
x.select(x.name,'IP')
x.select([t.name,t.s],'mq')
# print x.descs
# print x.data
class elem:
pass
class madinput:
"""
"""
def __init__(self, filename)::
self.v={}
self.e={}
filename="dipfirst.thin.seq"
f=open(filename,"rU")
s=f.read()
s=s.lower()
r=re.compile("!.*")
s=r.sub("",s)
s=s.replace('\n','')
s=s.replace(' ','')
r=re.compile("[^;]*;")
#rvar=re.compile("([\w\.]+):?=([\w\.]+);")
rvar=re.compile("(^[\w\.]+):?=([^;:,]+)")
relem=re.compile("(^[\w\.]+):([\w\.]+),(.+)?;")
for i in r.finditer(s):
st=i.group()
# variable parsing
if (rvar.match(st)):
tok=rvar.match(st).groups()
self.v[ tok[0] ]=tok[1]
# elements parsing
elif (relem.match(st)):
tok=relem.match(st).groups()
el=elem()
el.name=tok[0]
el.parent=tok[1]
el.sequence=[]
# attribute parsing
attr=list(tok[2])
ch=False
for i in range(len(attr)):
if attr[i]=='{':
ch=True
if attr[i]=='}':
ch=False
if (ch) and (attr[i]==',') :
attr[i]=' '
attr=(''.join(attr)).split(',')
for a in attr:
stok=rvar.match(a).groups()
setattr(el,stok[0],stok[1])
# store elem in the dictionary
self.e[ tok[0] ]=el
# sequence management
if (el.parent=='sequence'):
currseq=el.name
el.elements=[]
elif (currseq) :
el.sequence.append(currseq)
self.e[currseq].elements.append(el.name)
# sequence parsing
elif (st=='endsequence;'):
currseq=None
# globals().update(self.e)
|
class Node:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
def width(root):
"""
1 1
1 2
/
2
1 3
/
2
# \
3
\ 4
"""
if not root:
return 0
lr = [0,0]
width_help(root, 0, lr)
return lr[1] - lr[0] +1
def width_help(root, pos, lr):
if not root:
return
if pos < lr[0]:
lr[0] = pos
if pos > lr[1]:
lr[1] = pos
if root.left:
width_help(root.left, pos-1, lr)
if root.right:
width_help(root.right, pos+1, lr)
if __name__ == "__main__":
a = Node(1)
print(width(a))
b = Node(2)
c = Node(3)
a.left = b
print(width(a))
b.right = c
d = Node(4)
c.right = d
print(width(a))
|
# -*- encoding: utf-8 -*-
#
# Copyright © 2012 eNovance <licensing@enovance.com>
#
# Author: Julien Danjou <julien@danjou.info>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import datetime
import multiprocessing
from lxml import etree
from nova import flags
from ceilometer import counter
from ceilometer.compute import plugin
from ceilometer.compute import instance as compute_instance
from ceilometer.openstack.common import importutils
from ceilometer.openstack.common import log
from ceilometer.openstack.common import timeutils
FLAGS = flags.FLAGS
def get_libvirt_connection():
"""Return an open connection for talking to libvirt."""
# The direct-import implementation only works with Folsom because
# the configuration setting changed.
try:
return importutils.import_object_ns('nova.virt',
FLAGS.compute_driver)
except ImportError:
# Fall back to the way it was done in Essex.
import nova.virt.connection
return nova.virt.connection.get_connection(read_only=True)
def make_counter_from_instance(instance, name, type, volume):
return counter.Counter(
name=name,
type=type,
volume=volume,
user_id=instance.user_id,
project_id=instance.project_id,
resource_id=instance.uuid,
timestamp=timeutils.isotime(),
resource_metadata=compute_instance.get_metadata_from_dbobject(
instance),
)
class LibVirtPollster(plugin.ComputePollster):
def is_enabled(self):
# Use a fairly liberal substring check.
return 'libvirt' in FLAGS.compute_driver.lower()
class InstancePollster(LibVirtPollster):
def get_counters(self, manager, instance):
yield make_counter_from_instance(instance,
name='instance',
type=counter.TYPE_GAUGE,
volume=1,
)
yield make_counter_from_instance(instance,
name='instance:%s' %
instance.instance_type.name,
type=counter.TYPE_GAUGE,
volume=1,
)
class DiskIOPollster(LibVirtPollster):
LOG = log.getLogger(__name__ + '.diskio')
DISKIO_USAGE_MESSAGE = ' '.join(["DISKIO USAGE:",
"%s %s:",
"read-requests=%d",
"read-bytes=%d",
"write-requests=%d",
"write-bytes=%d",
"errors=%d",
])
def _get_disks(self, conn, instance):
"""Get disks of an instance, only used to bypass bug#998089."""
domain = conn._conn.lookupByName(instance)
tree = etree.fromstring(domain.XMLDesc(0))
return filter(bool,
[target.get('dev')
for target in tree.findall('devices/disk/target')
])
def get_counters(self, manager, instance):
conn = get_libvirt_connection()
# TODO(jd) This does not work see bug#998089
# for disk in conn.get_disks(instance.name):
try:
disks = self._get_disks(conn, instance.name)
except Exception as err:
self.LOG.warning('Ignoring instance %s: %s',
instance.name, err)
self.LOG.exception(err)
else:
r_bytes = 0
r_requests = 0
w_bytes = 0
w_requests = 0
for disk in disks:
stats = conn.block_stats(instance.name, disk)
self.LOG.info(self.DISKIO_USAGE_MESSAGE,
instance, disk, stats[0], stats[1],
stats[2], stats[3], stats[4])
r_bytes += stats[0]
r_requests += stats[1]
w_bytes += stats[3]
w_requests += stats[2]
yield make_counter_from_instance(instance,
name='disk.read.requests',
type=counter.TYPE_CUMULATIVE,
volume=r_requests,
)
yield make_counter_from_instance(instance,
name='disk.read.bytes',
type=counter.TYPE_CUMULATIVE,
volume=r_bytes,
)
yield make_counter_from_instance(instance,
name='disk.write.requests',
type=counter.TYPE_CUMULATIVE,
volume=w_requests,
)
yield make_counter_from_instance(instance,
name='disk.write.bytes',
type=counter.TYPE_CUMULATIVE,
volume=w_bytes,
)
class CPUPollster(LibVirtPollster):
LOG = log.getLogger(__name__ + '.cpu')
utilization_map = {}
def get_cpu_util(self, instance, cpu_info):
prev_times = self.utilization_map.get(instance.uuid)
self.utilization_map[instance.uuid] = (cpu_info['cpu_time'],
datetime.datetime.now())
cpu_util = 0.0
if prev_times:
prev_cpu = prev_times[0]
prev_timestamp = prev_times[1]
delta = self.utilization_map[instance.uuid][1] - prev_timestamp
elapsed = (delta.seconds * (10 ** 6) + delta.microseconds) * 1000
cores_fraction = instance.vcpus * 1.0 / multiprocessing.cpu_count()
# account for cpu_time being reset when the instance is restarted
time_used = (cpu_info['cpu_time'] - prev_cpu
if prev_cpu <= cpu_info['cpu_time'] else
cpu_info['cpu_time'])
cpu_util = 100 * cores_fraction * time_used / elapsed
return cpu_util
def get_counters(self, manager, instance):
conn = get_libvirt_connection()
self.LOG.info('checking instance %s', instance.uuid)
try:
cpu_info = conn.get_info(instance)
self.LOG.info("CPUTIME USAGE: %s %d",
dict(instance), cpu_info['cpu_time'])
cpu_util = self.get_cpu_util(instance, cpu_info)
self.LOG.info("CPU UTILIZATION %%: %s %0.2f",
dict(instance), cpu_util)
# FIXME(eglynn): once we have a way of configuring which measures
# are published to each sink, we should by default
# disable publishing this derived measure to the
# metering store, only publishing to those sinks
# that specifically need it
yield make_counter_from_instance(instance,
name='cpu_util',
type=counter.TYPE_GAUGE,
volume=cpu_util,
)
yield make_counter_from_instance(instance,
name='cpu',
type=counter.TYPE_CUMULATIVE,
volume=cpu_info['cpu_time'],
)
except Exception as err:
self.LOG.error('could not get CPU time for %s: %s',
instance.uuid, err)
self.LOG.exception(err)
class NetPollster(LibVirtPollster):
LOG = log.getLogger(__name__ + '.net')
NET_USAGE_MESSAGE = ' '.join(["NETWORK USAGE:", "%s %s:", "read-bytes=%d",
"write-bytes=%d"])
def _get_vnics(self, conn, instance):
"""Get disks of an instance, only used to bypass bug#998089."""
domain = conn._conn.lookupByName(instance.name)
tree = etree.fromstring(domain.XMLDesc(0))
vnics = []
for interface in tree.findall('devices/interface'):
vnic = {}
vnic['name'] = interface.find('target').get('dev')
vnic['mac'] = interface.find('mac').get('address')
vnic['fref'] = interface.find('filterref').get('filter')
for param in interface.findall('filterref/parameter'):
vnic[param.get('name').lower()] = param.get('value')
vnics.append(vnic)
return vnics
@staticmethod
def make_vnic_counter(instance, name, type, volume, vnic_data):
resource_metadata = copy.copy(vnic_data)
resource_metadata['instance_id'] = instance.uuid
return counter.Counter(
name=name,
type=type,
volume=volume,
user_id=instance.user_id,
project_id=instance.project_id,
resource_id=vnic_data['fref'],
timestamp=timeutils.isotime(),
resource_metadata=resource_metadata
)
def get_counters(self, manager, instance):
conn = get_libvirt_connection()
self.LOG.info('checking instance %s', instance.uuid)
try:
vnics = self._get_vnics(conn, instance)
except Exception as err:
self.LOG.warning('Ignoring instance %s: %s',
instance.name, err)
self.LOG.exception(err)
else:
domain = conn._conn.lookupByName(instance.name)
for vnic in vnics:
rx_bytes, rx_packets, _, _, \
tx_bytes, tx_packets, _, _ = \
domain.interfaceStats(vnic['name'])
self.LOG.info(self.NET_USAGE_MESSAGE, instance.name,
vnic['name'], rx_bytes, tx_bytes)
yield self.make_vnic_counter(instance,
name='network.incoming.bytes',
type=counter.TYPE_CUMULATIVE,
volume=rx_bytes,
vnic_data=vnic,
)
yield self.make_vnic_counter(instance,
name='network.outgoing.bytes',
type=counter.TYPE_CUMULATIVE,
volume=tx_bytes,
vnic_data=vnic,
)
yield self.make_vnic_counter(instance,
name='network.incoming.packets',
type=counter.TYPE_CUMULATIVE,
volume=rx_packets,
vnic_data=vnic,
)
yield self.make_vnic_counter(instance,
name='network.outgoing.packets',
type=counter.TYPE_CUMULATIVE,
volume=tx_packets,
vnic_data=vnic,
)
|
import inspect
from datetime import datetime
from ..models import HistoryModel
class HistoryUpdater(object):
def __init__(self, model_inst, group_name, subject_type, tracker=None, tracked_test_codes=None):
"""Updates the HistoryModel model class with values from a model instance.
Args:
model_inst: a model instance that meets the requirements for a lab_tracker (see site_lab_tracker register()).
group_name: the LabTracker group name, just used as a reference value for the HistoryModel.
tracker: a valid tracker (namedtuple) instance (Default:None). Required for calls to :func:`update`
tracker_test_codes: a list of test codes that the caller (labTracker) is monitoring (Default:None). Required for calls to :func:`update`
"""
self._model_inst = None
self._tracker = None
self._value = None
self._value_datetime = None
self._subject_identifier = None
self._subject_type = None
self._test_code = None
self._group_name = None
self.set_model_inst(model_inst)
self.set_group_name(group_name)
self.set_subject_type(subject_type)
self._tracked_test_codes = tracked_test_codes
if tracker:
self.set_tracker(tracker)
def set_model_inst(self, value=None):
"""Sets the model instance that is used to set most instance attributes and to update the HistoryModel."""
self._model_inst = value
if not self._model_inst:
raise TypeError('self._model_inst may not be None.')
def get_model_inst(self):
if not self._model_inst:
self.set_model_inst()
return self._model_inst
def set_tracker(self, value=None):
"""Sets the tracker provided by the caller which has information on how to inspect the model instance."""
self._tracker = value
if not self._tracker:
raise TypeError('self._tracker may not be None.')
def get_tracker(self):
if not self._tracker:
self.set_tracker()
return self._tracker
def set_group_name(self, value=None):
"""Sets the group name provided by the caller which is only needed as a reference field value for the HistoryModel."""
self._group_name = value
if not self._group_name:
raise TypeError('self._group_name may not be None.')
def get_group_name(self):
if not self._group_name:
self.set_group_name()
return self._group_name
def set_value(self):
"""Sets the result value from the model instance."""
self._value = None
if 'get_result_value' in dir(self.get_model_inst()):
self._value = self._get_method(self.get_model_inst().get_result_value, 'attr', self.get_tracker().value_attr)
else:
try:
self._value = getattr(self.get_model_inst(), self.get_tracker().value_attr)
except:
raise TypeError('Cannot get result value from instance. Expected model attribute \'{0}\' or method \'get_result_value()\' on instance {0}.'.format(self.get_tracker().value_attr, self.get_model_inst()._meta.object_name))
def get_value(self):
if not self._value:
self.set_value()
return self._value
def set_value_datetime(self):
"""Sets the result datetime by accessing a method or field attribute on the model instance."""
self._value_datetime = None
if 'get_result_datetime' in dir(self.get_model_inst()):
self._value_datetime = self._get_method(self.get_model_inst().get_result_datetime, 'attr', self.get_tracker().value_attr)
else:
self._value_datetime = getattr(self.get_model_inst(), self.get_tracker().datetime_attr)
if not self._value_datetime:
raise TypeError('self._value_datetime may not be None.')
def get_value_datetime(self):
if not self._value_datetime:
self.set_value_datetime()
return self._value_datetime
def set_subject_identifier(self):
"""Sets the subject identifier by accessing the method get_subject_identifier on the model instance."""
self._subject_identifier = None
if 'get_subject_identifier' in dir(self.get_model_inst()):
self._subject_identifier = self.get_model_inst().get_subject_identifier()
if not self._subject_identifier:
raise TypeError('self._subject_identifier may not be None.')
def get_subject_identifier(self):
if not self._subject_identifier:
self.set_subject_identifier()
return self._subject_identifier
def set_subject_type(self, value=None):
"""Sets the subject type by accessing the method get_subject_type or the subject_type attr on the model instance."""
self._subject_type = value
# if not self.
# if 'get_subject_type' in dir(self.get_model_inst()):
# self._subject_type = self.get_model_inst().get_subject_type()
# else:
# self._subject_type = self.get_model_inst().subject_type
if not self._subject_type:
raise TypeError('self._subject_type may not be None.')
def get_subject_type(self):
if not self._subject_type:
self.set_subject_type()
return self._subject_type
def set_test_code(self):
"""Sets the test_code for this value by inspecting the model instance.
model instance method :func:`get_test_code` may return a test code conditional to the tracker.value_attr.
model instance method :func:`get_test_code` may specify parameter \'attr\'. All other parameters will be ignored.
"""
self._test_code = None
if 'get_test_code' in dir(self.get_model_inst()):
self._test_code = self._get_method(self.get_model_inst().get_test_code, 'attr', self.get_tracker().value_attr)
if not self._test_code:
raise TypeError('Cannot determine the test code for model {0}. Perhaps add a get_test_code(self) or get_test_code(self, attr) method to the model. '.format(self.get_model_inst()))
def get_test_code(self):
if not self._test_code:
self.set_test_code()
return self._test_code
def get_tracked_test_codes(self):
"""Gets the list of test codes to inspect the model instance.
* If a model instance test code is not listed then the save is aborted.
* Comes from the calling lab_tracker."""
return self._tracked_test_codes
def update(self):
"""Updates the history model given a registered tracker model instance.
.. note:: Default values are not saved to the history model.
.. note:: An instance from ResultItem may be sent from the signal. Do not automatically
accept it, first send it to check if the testcode is being tracked.
"""
history_model = None
# if model instance test code is not listed with this lab tracker, abort
if not self.get_test_code() in self.get_tracked_test_codes():
return None
# the instance must be listed as a model with the passed tracker
if not self.get_tracker().model_cls == self.get_model_inst().__class__:
raise TypeError('Model {0} in tracker tuple does not match instance class. Got {1}.'.format(self.get_tracker().model_cls, self.get_model_inst()._meta.object_name.lower()))
if self.get_value() and self.get_value_datetime():
# update the history model, get or create
history_model, created = HistoryModel.objects.get_or_create(
source_app_label=self.get_model_inst()._meta.app_label,
source_identifier=self.get_model_inst().pk,
test_code=self.get_test_code(),
group_name=self.get_group_name(),
subject_identifier=self.get_subject_identifier(),
subject_type=self.get_subject_type(),
value_datetime=self.get_value_datetime(),
defaults={'value': self.get_value(),
'history_datetime': datetime.today(),
'report_datetime': self.get_model_inst().get_report_datetime(),
'source_model_name': self.get_model_inst()._meta.object_name.lower()})
if not created:
history_model.value = self.get_value()
history_model.history_datetime = datetime.today()
history_model.report_datetime = self.get_model_inst().get_report_datetime()
history_model.source_model_name = self.get_model_inst()._meta.object_name.lower()
history_model.save()
else:
self.delete()
return history_model
def delete(self):
"""Deletes a single instance from the HistoryModel."""
HistoryModel.objects.filter(
source_app_label=self.get_model_inst()._meta.app_label,
source_model_name=self.get_model_inst()._meta.object_name.lower(),
source_identifier=self.get_model_inst().pk,
group_name=self.get_group_name(),
).delete()
def _get_method(self, func, parameter_name, parameter_value):
"""Calls the given func with or without a value depending on inspection."""
argspec = inspect.getargspec(func)
if parameter_name in argspec.args:
return func(parameter_value)
else:
return func()
|
# Uses python3
import sys
def get_change(m):
#write your code here
change = 0
if m < 10 and m < 5:
return (m)
elif m < 10 and m > 5:
mid = m%5
return (1+mid)
else:
ten_coin = m//10
mid = m%10
if mid < 5:
return (ten_coin+mid)
else:
return (ten_coin+mid-4)
if __name__ == '__main__':
m = int(input())
print(get_change(m))
|
import pickle
def save(obj, filename):
with open(filename, 'w') as f:
pickle.dump(obj, f)
def load(filename):
with open(filename, 'r') as f:
return pickle.load(f)
|
import re, os.path as path, wiki_parser as w_p, props, logging, adapter, cgi
from datetime import datetime
from os import sep
LOG = logging.getLogger('wiki-interpreter')
class wiki_interpreter:
def __init__(self, page, rawText=None):
LOG.debug(adapter.wiki_dir)
self.title = path.basename(page)
self.wikipath = path.join(adapter.wiki_dir, self.title + ".txt")
self.content = rawText
LOG.debug("Wiki directory path: " + self.wikipath)
LOG.debug("Wiki Page: " + page)
if path.exists(self.wikipath):
self.exists = True
self._last_modified = path.getmtime(self.wikipath)
else:
self.exists = False
self._last_modified = datetime.now()
def load(self):
if not self.content:
if self.exists:
f = open(self.wikipath)
self.content = f.read()
f.close()
else:
self.content = ""
return self
def raw_content(self):
return self.content
def last_modified(self):
lastmod = datetime.fromtimestamp(self._last_modified)
return lastmod.strftime("%d-%m-%Y %H:%M")
def parsed_content(self):
try:
return self.processed_content
except AttributeError:
return self.process()
def process(self):
parser = w_p.parser(self.content)
self.processed_content = parser.parsed_text()
return self.processed_content
class path_parser:
def __init__(self, pth):
reqpath, name = path.split(pth)
name_act = name.split('?')
self.name = name_act[0]
self.action = ''
self.attributes = {}
if len(name_act) > 1:
query = name_act[1]
pairs = cgi.parse_qs(query)
self.action = pairs['action'][0]
LOG.debug("Action %s requested." % self.action)
self.attributes = pairs
def process_wiki_link(match):
link = match.group(1)
interp = wiki_interpreter(link)
print "LINK:", link
if interp.exists or link in props.special:
return wiki_link(link)
else:
return link + wiki_link(link, '?', 'edit')
wiki_word_pattern = re.compile(r"((?:[A-Z][a-z]+){2,})")
def wiki_link(title, link_text="", command=""):
if not command == "":
command = "?" + command
if link_text == "":
link_text = title
return r"<a class='wikilink' href='%s%s'>%s</a>" % (title, command, link_text)
def isWikiWord(name):
"""
Tests whether a string is a valid wiki word or not.
>>> isWikiWord("CamelCase")
True
>>> isWikiWord("bob")
False
>>> isWikiWord("TuTu")
True
>>> isWikiWord("javaWord")
False
>>> isWikiWord("TestHTMLCode")
False
"""
if wiki_word_pattern.match(name):
return True
else:
return False
class trail:
def __init__(self, maximum=10):
self.__trail = []
self.__max_size = 10
def update(self, title):
try:
index = self.__trail.index(title)
print "INDEX: ", index
self.__trail = self.__trail[:index]
except ValueError:
print title, "Not Found!", self.__trail
length = len(self.__trail)
maxlen = self.__max_size
if length > maxlen:
self.trail = self.trail[length - maxlen:]
self.__trail.append(title)
def trail_html(self):
traillinks = map(wiki_link, self.__trail[:len(self.__trail)-1])
trailstring = " > ".join(traillinks)
return trailstring
if __name__ == "__main__":
print "Running Tests..."
import doctest, sys
doctest.testmod(sys.modules[__name__])
|
def selection_sort_v1(array):
n = len(array)
for i in range(n):
min_j = i
for j in range(i+1, n):
if array[j] < array[min_j]:
min_j = j
if min_j != i:
array[i], array[min_j] = array[min_j], array[i]
return array
def selection_sort_v2(array):
for i in range(len(array)-1, 0, -1):
min_j = 0
for j in range(1, i+1):
if array[j] > array[min_j]:
min_j = j
array[i], array[min_j] = array[min_j], array[i]
return array
if __name__ == '__main__':
unordered_list = [1, 4, 0, 1, 2, 6, 9]
print('Ordered list with V1:', selection_sort_v1(unordered_list))
print('Ordered list with V1:', selection_sort_v2(unordered_list)) |
import os
SECRET_KEY = 'ENTER_SECRET_KEY'
DEBUG = True
os.environ["YELP_CONSUMER_KEY"] = "ENTER_YELP_CONSUMER_KEY"
os.environ["YELP_CONSUMER_SECRET"] = "ENTER_YELP_CONSUMER_SECRET"
os.environ["YELP_TOKEN"] = "ENTER_YELP_TOKEN"
os.environ["YELP_TOKEN_SECRET"] = "ENTER_YELP_TOKEN_SECRET"
os.environ["SECRET_KEY"] = "ENTER_SECRET_KEY"
os.environ["GOOGLE_MAPS_KEY"] = "ENTER_GOOGLE_MAPS_KEY" |
from pyspark import SparkContext
sc = SparkContext("local", "first app")
# data_dir = r"C:\Users\caumont\Dropbox\Data"
data_dir = r"Z:\Dropbox\Data"
print(sc.version)
readme = sc.textFile(data_dir + "\README.md")
print(readme.count()) |
import sys
from purity_analysis import Tracer
tracer = Tracer([])
sys.settrace(tracer.trace_calls)
sys.setprofile(tracer.trace_c_calls)
global_x = global_y = 0
# closure reads global vars
def foo1():
def bar1():
z = global_x + global_y
bar1()
# closure reads parent's local vars
def foo2():
def bar2():
z = nonlocal_x + nonlocal_y
nonlocal_x = nonlocal_y = 0
bar2()
# closure writes parent's local vars
def foo3():
def bar3():
nonlocal nonlocal_x, nonlocal_y
nonlocal_x = nonlocal_y = 1
nonlocal_x = nonlocal_y = 0
bar3()
def main():
foo1()
foo2()
foo3()
main()
sys.settrace(None)
sys.setprofile(None)
tracer.log_annotations(__file__)
|
import multi_page_scraper as mps
import streamlit as st
from database import Review
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
import pandas as pd
import os
import review_scaper as rs
import matplotlib.pyplot as plt
import seaborn as sns
import plotly.express as px
st.set_page_config(layout='wide')
pd.set_option('display.expand_frame_repr', False)
def opendb():
engine = create_engine('sqlite:///db.sqlite3') # connect
Session = sessionmaker(bind=engine)
return Session()
def load_from_database():
db = opendb()
results = db.query(Review).all()
db.close()
return results
st.title("sentiment analysis on products using web scraping")
st.header("Scrape products")
with st.beta_expander("show contents"):
url = "https://www.flipkart.com/search?"
search_term = st.text_input("enter a product category to search")
output_area = st.empty()
if st.button("start scraper") and search_term:
page = 1
filename = f'scraped_data/product_{search_term}.csv'
scraped_products = []
with st.spinner("loading"):
while True:
starturl = f"{url}q={search_term}&page={page}"
output_area.info(f'getting data from {starturl} ...')
soup = mps.get(starturl)
if not soup:
output_area.info('scraper closed')
break
else:
output = mps.extract(soup)
if len(output) == 0:
output_area.info('scraper closed')
break
scraped_products.extend(output)
output_area.info(f'total size of collected data {len(scraped_products)}')
page += 1
# save the stuff
file = mps.save(scraped_products,filename)
st.balloons()
if file:
st.success(f"products data saved in {file}")
st.header("Scrape products reviews")
with st.beta_expander("show scraping options"):
files = os.listdir("scraped_data")
files = [os.path.join("scraped_data",file) for file in files if file.endswith(".csv")]
file = st.selectbox("select a product", files)
df = pd.read_csv(files)
limit = st.slider("select number of products to scraped review from",1,len(df),10)
if st.button("scraped reviews"):
with st.spinner("scraping"):
dataset = rs.get_reviews(df,limit)
db = opendb()
for row in dataset:
db.add(Review(**row))
db.commit()
st.balloons()
db.close()
st.header("Display Sentiment")
with st.beta_expander("show results"):
db = opendb()
results = db.query(Review).all()
df = pd.read_sql(db.query(Review).statement, db.bind)
st.dataframe(df)
st.header("general sentiment visualization")
# sentiment analysis
if st.checkbox("raw sentiment data"):
st.dataframe(df.groupby('product')['sentiment'].sum())
product_sentiment_df =df.groupby('product')['sentiment'].sum().reset_index()
fig =px.bar(product_sentiment_df,x='sentiment',y=product_sentiment_df.index,color='sentiment',hover_data=['product'],orientation='h',title='Product sentiment sum')
st.plotly_chart(fig,use_container_width=True)
if st.checkbox("raw product sentiment mean data"):
st.dataframe(df.groupby('product')['sentiment'].mean().reset_index())
product_sentiment_avg_df =df.groupby('product')['sentiment'].mean().reset_index()
fig =px.scatter(product_sentiment_avg_df,x='sentiment',y=product_sentiment_avg_df.index,color='sentiment',hover_data=['product'],orientation='v',title='Product sentiment average',size='sentiment')
st.plotly_chart(fig,use_container_width=True)
if st.checkbox("raw product count data"):
st.dataframe(df.groupby('product')['product'].count())
product_count_df = df.groupby('product')['product'].count()
fig =px.bar(product_count_df,x='product',y='product',color='product',hover_data=[product_count_df.index],orientation='h',title='Product review count')
st.plotly_chart(fig,use_container_width=True)
# if st.checkbox("show sentiment distribution"):
fig,ax =plt.subplots()
sns.distplot(df['sentiment'], bins=20, kde=True, rug=True,ax=ax)
ax.set_title("show sentiment distribution")
st.pyplot(fig,use_container_width=True)
fig,ax =plt.subplots(figsize=(15,10))
sns.violinplot(y='sentiment',x='product',data=df,ax=ax,)
plt.xticks(rotation=90)
st.pyplot(fig,use_container_width=True)
st.header("product wise sentiment visualization")
product = st.selectbox("select a product",df['product'].unique().tolist())
product_df = df[df['product']==product]
if st.checkbox("show product raw data"):
st.dataframe(product_df)
fig,ax =plt.subplots()
sns.distplot(product_df['sentiment'], bins=20, kde=True, rug=True,ax=ax)
st.pyplot(fig,use_container_width=True)
fig,ax =plt.subplots()
sns.violinplot(product_df['sentiment'],ax=ax,)
st.pyplot(fig,use_container_width=True)
fig,ax =plt.subplots()
product_df.groupby('sentiment_name')['sentiment'].count().plot(kind='pie',ax=ax,autopct='%1.1f%%',legend=True,title=f"{product} sentiment distribution",labels=None,wedgeprops={'edgecolor':'black','width':.5 })
st.pyplot(fig,use_container_width=True)
db.close() |
def merge_the_tools(string, k):
lst3=[]
for i in string:
lst3.append(i)
lst2=[]
count=0
for i in range(int(len(lst3)/k)):
lst=[]
lst4=[]
for j in range(k):
lst.append(lst3[count])
count+=1
for j in lst:
if j not in lst4:
lst4.append(j)
str1=""
for i in lst4:
str1=str1+i
print(str1)
if __name__ == '__main__':
string, k = input(), int(input())
merge_the_tools(string, k) |
# -*- coding: utf-8 -*-
# Copyright European Organization for Nuclear Research (CERN) since 2012
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import os
import shutil
import tempfile
import os.path
from uuid import uuid4 as uuid
import pytest
from rucio.common import exception
from rucio.common.utils import adler32, md5
from rucio.rse import rsemanager as mgr
from rucio.tests.common import skip_rse_tests_with_accounts, load_test_conf_file
@skip_rse_tests_with_accounts
class MgrTestCases:
files_local = [
"1_rse_local_put.raw",
"2_rse_local_put.raw",
"3_rse_local_put.raw",
"4_rse_local_put.raw",
]
files_local_and_remote = [
"1_rse_local_and_remote_put.raw",
"2_rse_local_and_remote_put.raw",
]
files_remote = [
"1_rse_remote_get.raw",
"2_rse_remote_get.raw",
"3_rse_remote_get.raw",
"4_rse_remote_get.raw",
"1_rse_remote_delete.raw",
"2_rse_remote_delete.raw",
"3_rse_remote_delete.raw",
"4_rse_remote_delete.raw",
"1_rse_remote_exists.raw",
"2_rse_remote_exists.raw",
"1_rse_remote_rename.raw",
"2_rse_remote_rename.raw",
"3_rse_remote_rename.raw",
"4_rse_remote_rename.raw",
"5_rse_remote_rename.raw",
"6_rse_remote_rename.raw",
"7_rse_remote_rename.raw",
"8_rse_remote_rename.raw",
"9_rse_remote_rename.raw",
"10_rse_remote_rename.raw",
"11_rse_remote_rename.raw",
"12_rse_remote_rename.raw",
"1_rse_remote_change_scope.raw",
"2_rse_remote_change_scope.raw",
]
def init(self, tmpdir, rse_settings, user, vo, impl=None):
self.tmpdir = tmpdir
self.rse_settings = rse_settings
self.user = user
self.vo = vo
self.impl = None
if impl:
if len(impl.split(".")) == 1:
self.impl = "rucio.rse.protocols." + impl + ".Default"
else:
self.impl = "rucio.rse.protocols." + impl
@classmethod
def setup_common_test_env(cls, rse_name, vo, tmp_path_factory):
rse_settings = mgr.get_rse_info(rse=rse_name, vo=vo)
tmpdir = tmp_path_factory.mktemp(cls.__name__)
user = uuid()
try:
data = load_test_conf_file("rse-accounts.cfg.template")
rse_settings["credentials"] = data[rse_name]
except KeyError:
print("No credentials found for this RSE.")
pass
# Generate local files
with open("%s/data.raw" % tmpdir, "wb") as out:
out.seek((1024 * 1024) - 1) # 1 MB
out.write(b'\0')
for f in itertools.chain(cls.files_local, cls.files_local_and_remote):
shutil.copy('%s/data.raw' % tmpdir, '%s/%s' % (tmpdir, f))
return rse_settings, str(tmpdir), user
def setup_scheme(self, scheme):
"""(RSE/PROTOCOLS): Make mgr to select this scheme first."""
for protocol in self.rse_settings['protocols']:
if scheme and protocol['scheme'] != scheme:
self.rse_settings['protocols'].remove(protocol)
# Mgr-Tests: PUT
def test_put_mgr_ok_multi(self):
"""(RSE/PROTOCOLS): Put multiple files to storage (Success)"""
if self.rse_settings['protocols'][0]['hostname'] == 'ssh1':
result = mgr.upload(self.rse_settings, [{'name': '1_rse_local_put.raw', 'scope': 'user.%s' % self.user,
'md5': md5(str(self.tmpdir) + '/1_rse_local_put.raw'),
'filesize': os.stat('%s/1_rse_local_put.raw' % self.tmpdir)[
os.path.stat.ST_SIZE]},
{'name': '2_rse_local_put.raw', 'scope': 'user.%s' % self.user,
'md5': md5(str(self.tmpdir) + '/2_rse_local_put.raw'),
'filesize': os.stat('%s/2_rse_local_put.raw' % self.tmpdir)[
os.path.stat.ST_SIZE]}], source_dir=self.tmpdir, vo=self.vo,
impl=self.impl)
else:
result = mgr.upload(self.rse_settings, [{'name': '1_rse_local_put.raw', 'scope': 'user.%s' % self.user,
'adler32': adler32('%s/1_rse_local_put.raw' % self.tmpdir),
'filesize': os.stat('%s/1_rse_local_put.raw' % self.tmpdir)[
os.path.stat.ST_SIZE]},
{'name': '2_rse_local_put.raw', 'scope': 'user.%s' % self.user,
'adler32': adler32('%s/2_rse_local_put.raw' % self.tmpdir),
'filesize': os.stat('%s/2_rse_local_put.raw' % self.tmpdir)[
os.path.stat.ST_SIZE]}], source_dir=self.tmpdir, vo=self.vo)
status = result[0]
details = result[1]
assert status
assert details['user.%s:1_rse_local_put.raw' % self.user] is True
assert details['user.%s:2_rse_local_put.raw' % self.user] is True
def test_put_mgr_ok_single(self):
"""(RSE/PROTOCOLS): Put a single file to storage (Success)"""
if self.rse_settings['protocols'][0]['hostname'] == 'ssh1':
mgr.upload(self.rse_settings, {'name': '3_rse_local_put.raw', 'scope': 'user.%s' % self.user,
'md5': md5('%s/3_rse_local_put.raw' % self.tmpdir), 'filesize': os.stat('%s/3_rse_local_put.raw' % self.tmpdir)[os.path.stat.ST_SIZE]}, source_dir=self.tmpdir, vo=self.vo, impl=self.impl)
else:
mgr.upload(self.rse_settings, {'name': '3_rse_local_put.raw', 'scope': 'user.%s' % self.user,
'adler32': adler32('%s/3_rse_local_put.raw' % self.tmpdir), 'filesize': os.stat('%s/3_rse_local_put.raw' % self.tmpdir)[os.path.stat.ST_SIZE]}, source_dir=self.tmpdir, vo=self.vo)
def test_put_mgr_source_not_found_multi(self):
"""(RSE/PROTOCOLS): Put multiple files to storage (SourceNotFound)"""
result = mgr.upload(self.rse_settings, [{'name': 'not_existing_data.raw', 'scope': 'user.%s' % self.user,
'adler32': 'some_random_stuff', 'filesize': 4711},
{'name': '4_rse_local_put.raw', 'scope': 'user.%s' % self.user,
'adler32': adler32('%s/4_rse_local_put.raw' % self.tmpdir), 'filesize': os.stat('%s/4_rse_local_put.raw' % self.tmpdir)[os.path.stat.ST_SIZE]}], source_dir=self.tmpdir, vo=self.vo, impl=self.impl)
details = result[1]
assert details['user.%s:4_rse_local_put.raw' % self.user]
assert isinstance(details['user.%s:not_existing_data.raw' % self.user], exception.SourceNotFound)
def test_put_mgr_source_not_found_single(self):
"""(RSE/PROTOCOLS): Put a single file to storage (SourceNotFound)"""
with pytest.raises(exception.SourceNotFound):
mgr.upload(self.rse_settings, {'name': 'not_existing_data2.raw', 'scope': 'user.%s' % self.user, 'adler32': 'random_stuff', 'filesize': 0}, source_dir=self.tmpdir, vo=self.vo, impl=self.impl)
def test_put_mgr_file_replica_already_exists_multi(self):
"""(RSE/PROTOCOLS): Put multiple files to storage (FileReplicaAlreadyExists)"""
result = mgr.upload(self.rse_settings, [{'name': '1_rse_remote_get.raw', 'scope': 'user.%s' % self.user, 'adler32': "bla-bla", 'filesize': 4711},
{'name': '2_rse_remote_get.raw', 'scope': 'user.%s' % self.user, 'adler32': "bla-bla", 'filesize': 4711}], source_dir=self.tmpdir, vo=self.vo, impl=self.impl)
details = result[1]
assert isinstance(details['user.%s:1_rse_remote_get.raw' % self.user], exception.FileReplicaAlreadyExists)
assert isinstance(details['user.%s:2_rse_remote_get.raw' % self.user], exception.FileReplicaAlreadyExists)
def test_put_mgr_file_replica_already_exists_single(self):
"""(RSE/PROTOCOLS): Put a single file to storage (FileReplicaAlreadyExists)"""
with pytest.raises(exception.FileReplicaAlreadyExists):
mgr.upload(self.rse_settings, {'name': '1_rse_remote_get.raw', 'scope': 'user.%s' % self.user, 'adler32': 'bla-bla', 'filesize': 4711}, source_dir=self.tmpdir, vo=self.vo, impl=self.impl)
# MGR-Tests: DELETE
def test_delete_mgr_ok_multi(self):
"""(RSE/PROTOCOLS): Delete multiple files from storage (Success)"""
result = mgr.delete(self.rse_settings, [{'name': '1_rse_remote_delete.raw', 'scope': 'user.%s' % self.user}, {'name': '2_rse_remote_delete.raw', 'scope': 'user.%s' % self.user}], impl=self.impl)
status = result[0]
details = result[1]
assert status
assert details['user.%s:1_rse_remote_delete.raw' % self.user] is True or isinstance(details['user.%s:1_rse_remote_delete.raw' % self.user], NotImplementedError)
assert details['user.%s:2_rse_remote_delete.raw' % self.user] is True or isinstance(details['user.%s:2_rse_remote_delete.raw' % self.user], NotImplementedError)
def test_delete_mgr_ok_single(self):
"""(RSE/PROTOCOLS): Delete a single file from storage (Success)"""
mgr.delete(self.rse_settings, {'name': '3_rse_remote_delete.raw', 'scope': 'user.%s' % self.user}, impl=self.impl)
def test_delete_mgr_ok_dir(self):
"""(RSE/PROTOCOLS): Delete a directory from storage (Success)"""
mgr.delete(self.rse_settings, {'path': 'user/%s' % self.user, 'name': 'user.%s' % self.user, 'scope': 'user.%s' % self.user}, impl=self.impl)
def test_delete_mgr_source_not_found_multi(self):
"""(RSE/PROTOCOLS): Delete multiple files from storage (SourceNotFound)"""
status, details = mgr.delete(self.rse_settings, [{'name': 'not_existing_data.raw', 'scope': 'user.%s' % self.user}, {'name': '4_rse_remote_delete.raw', 'scope': 'user.%s' % self.user}], impl=self.impl)
assert details['user.%s:4_rse_remote_delete.raw' % self.user] is True
assert isinstance(details['user.%s:not_existing_data.raw' % self.user], exception.SourceNotFound)
def test_delete_mgr_source_not_found_single(self):
"""(RSE/PROTOCOLS): Delete a single file from storage (SourceNotFound)"""
with pytest.raises(exception.SourceNotFound):
mgr.delete(self.rse_settings, {'name': 'not_existing_data.raw', 'scope': 'user.%s' % self.user}, impl=self.impl)
# MGR-Tests: EXISTS
def test_exists_mgr_ok_multi(self):
"""(RSE/PROTOCOLS): Check multiple files on storage (Success)"""
pfn_a = list(mgr.lfns2pfns(self.rse_settings, {'name': '3_rse_remote_get.raw', 'scope': 'user.%s' % self.user}, impl=self.impl).values())[0]
pfn_b = list(mgr.lfns2pfns(self.rse_settings, {'name': '4_rse_remote_get.raw', 'scope': 'user.%s' % self.user}, impl=self.impl).values())[0]
status, details = mgr.exists(self.rse_settings, [{'name': '1_rse_remote_get.raw', 'scope': 'user.%s' % self.user},
{'name': '2_rse_remote_get.raw', 'scope': 'user.%s' % self.user},
{'name': pfn_a},
{'name': pfn_b}], impl=self.impl, vo=self.vo)
assert status
assert details['user.%s:1_rse_remote_get.raw' % self.user] is True
assert details['user.%s:2_rse_remote_get.raw' % self.user] is True
assert details[pfn_a] is True
assert details[pfn_b] is True
def test_exists_mgr_ok_single_lfn(self):
"""(RSE/PROTOCOLS): Check a single file on storage using LFN (Success)"""
mgr.exists(self.rse_settings, {'name': '1_rse_remote_get.raw', 'scope': 'user.%s' % self.user}, impl=self.impl, vo=self.vo)
def test_exists_mgr_ok_single_pfn(self):
"""(RSE/PROTOCOLS): Check a single file on storage using PFN (Success)"""
pfn = list(mgr.lfns2pfns(self.rse_settings, {'name': '1_rse_remote_get.raw', 'scope': 'user.%s' % self.user}, impl=self.impl).values())[0]
mgr.exists(self.rse_settings, {'name': pfn}, impl=self.impl, vo=self.vo)
def test_exists_mgr_false_multi(self):
"""(RSE/PROTOCOLS): Check multiple files on storage (Fail)"""
pfn_a = list(mgr.lfns2pfns(self.rse_settings, {'name': '2_rse_remote_get.raw', 'scope': 'user.%s' % self.user}, impl=self.impl).values())[0]
pfn_b = list(mgr.lfns2pfns(self.rse_settings, {'name': '1_rse_not_existing.raw', 'scope': 'user.%s' % self.user}, impl=self.impl).values())[0]
status, details = mgr.exists(self.rse_settings, [{'name': '1_rse_remote_get.raw', 'scope': 'user.%s' % self.user},
{'name': 'not_existing_data.raw', 'scope': 'user.%s' % self.user},
{'name': pfn_a},
{'name': pfn_b}], impl=self.impl, vo=self.vo)
assert not status
assert details['user.%s:1_rse_remote_get.raw' % self.user] is True
assert details['user.%s:not_existing_data.raw' % self.user] is False
assert details[pfn_a] is True
assert details[pfn_b] is False
def test_exists_mgr_false_single_lfn(self):
"""(RSE/PROTOCOLS): Check a single file on storage using LFN (Fail)"""
assert not mgr.exists(self.rse_settings, {'name': 'not_existing_data.raw', 'scope': 'user.%s' % self.user}, impl=self.impl, vo=self.vo)
def test_exists_mgr_false_single_pfn(self):
"""(RSE/PROTOCOLS): Check a single file on storage using PFN (Fail)"""
pfn = list(mgr.lfns2pfns(self.rse_settings, {'name': '1_rse_not_existing.raw', 'scope': 'user.%s' % self.user}, impl=self.impl).values())[0]
assert not mgr.exists(self.rse_settings, {'name': pfn}, impl=self.impl, vo=self.vo)
# MGR-Tests: RENAME
def test_rename_mgr_ok_multi(self):
"""(RSE/PROTOCOLS): Rename multiple files on storage (Success)"""
protocol = mgr.create_protocol(self.rse_settings, 'write', impl=self.impl)
pfn_a = list(protocol.lfns2pfns({'name': '7_rse_remote_rename.raw', 'scope': 'user.%s' % self.user}).values())[0]
pfn_a_new = list(protocol.lfns2pfns({'name': '7_rse_new_rename.raw', 'scope': 'user.%s' % self.user}).values())[0]
pfn_b = list(protocol.lfns2pfns({'name': '8_rse_remote_rename.raw', 'scope': 'user.%s' % self.user}).values())[0]
pfn_b_new = list(protocol.lfns2pfns({'name': '8_rse_new_rename.raw', 'scope': 'user.%s' % self.user}).values())[0]
status, details = mgr.rename(self.rse_settings, [{'name': '1_rse_remote_rename.raw', 'scope': 'user.%s' % self.user, 'new_name': '1_rse_remote_renamed.raw'},
{'name': '2_rse_remote_rename.raw', 'scope': 'user.%s' % self.user, 'new_name': '2_rse_remote_renamed.raw'},
{'name': pfn_a, 'new_name': pfn_a_new},
{'name': pfn_b, 'new_name': pfn_b_new}], impl=self.impl)
assert status
assert details['user.%s:1_rse_remote_rename.raw' % self.user]
assert details['user.%s:2_rse_remote_rename.raw' % self.user]
assert details[pfn_a]
assert details[pfn_b]
def test_rename_mgr_ok_single_lfn(self):
"""(RSE/PROTOCOLS): Rename a single file on storage using LFN (Success)"""
mgr.rename(self.rse_settings, {'name': '3_rse_remote_rename.raw', 'scope': 'user.%s' % self.user, 'new_name': '3_rse_remote_renamed.raw', 'new_scope': 'user.%s' % self.user}, impl=self.impl)
def test_rename_mgr_ok_single_pfn(self):
"""(RSE/PROTOCOLS): Rename a single file on storage using PFN (Success)"""
pfn = list(mgr.lfns2pfns(self.rse_settings, {'name': '9_rse_remote_rename.raw', 'scope': 'user.%s' % self.user}, impl=self.impl).values())[0]
pfn_new = list(mgr.lfns2pfns(self.rse_settings, {'name': '9_rse_new.raw', 'scope': 'user.%s' % self.user}, impl=self.impl).values())[0]
mgr.rename(self.rse_settings, {'name': pfn, 'new_name': pfn_new}, impl=self.impl)
def test_rename_mgr_file_replica_already_exists_multi(self):
"""(RSE/PROTOCOLS): Rename multiple files on storage (FileReplicaAlreadyExists)"""
pfn_a = list(mgr.lfns2pfns(self.rse_settings, {'name': '10_rse_remote_rename.raw', 'scope': 'user.%s' % self.user}, impl=self.impl).values())[0]
pfn_a_new = list(mgr.lfns2pfns(self.rse_settings, {'name': '1_rse_remote_get.raw', 'scope': 'user.%s' % self.user}, impl=self.impl).values())[0]
pfn_b = list(mgr.lfns2pfns(self.rse_settings, {'name': '11_rse_remote_rename.raw', 'scope': 'user.%s' % self.user}, impl=self.impl).values())[0]
pfn_b_new = list(mgr.lfns2pfns(self.rse_settings, {'name': '11_rse_new_rename.raw', 'scope': 'user.%s' % self.user}, impl=self.impl).values())[0]
status, details = mgr.rename(self.rse_settings, [{'name': '4_rse_remote_rename.raw', 'scope': 'user.%s' % self.user, 'new_name': '1_rse_remote_get.raw', 'new_scope': 'user.%s' % self.user},
{'name': '5_rse_remote_rename.raw', 'scope': 'user.%s' % self.user, 'new_name': '5_rse_new.raw'},
{'name': pfn_a, 'new_name': pfn_a_new},
{'name': pfn_b, 'new_name': pfn_b_new}], impl=self.impl)
assert not status
assert isinstance(details[pfn_a], exception.FileReplicaAlreadyExists)
assert isinstance(details['user.%s:4_rse_remote_rename.raw' % self.user], exception.FileReplicaAlreadyExists)
assert details['user.%s:5_rse_remote_rename.raw' % self.user]
assert details[pfn_b]
def test_rename_mgr_file_replica_already_exists_single_lfn(self):
"""(RSE/PROTOCOLS): Rename a single file on storage using LFN (FileReplicaAlreadyExists)"""
with pytest.raises(exception.FileReplicaAlreadyExists):
mgr.rename(self.rse_settings, {'name': '6_rse_remote_rename.raw', 'scope': 'user.%s' % self.user, 'new_name': '1_rse_remote_get.raw', 'new_scope': 'user.%s' % self.user}, impl=self.impl)
def test_rename_mgr_file_replica_already_exists_single_pfn(self):
"""(RSE/PROTOCOLS): Rename a single file on storage using PFN (FileReplicaAlreadyExists)"""
pfn = list(mgr.lfns2pfns(self.rse_settings, {'name': '12_rse_remote_rename.raw', 'scope': 'user.%s' % self.user}, impl=self.impl).values())[0]
pfn_new = list(mgr.lfns2pfns(self.rse_settings, {'name': '1_rse_remote_get.raw', 'scope': 'user.%s' % self.user}, impl=self.impl).values())[0]
with pytest.raises(exception.FileReplicaAlreadyExists):
mgr.rename(self.rse_settings, {'name': pfn, 'new_name': pfn_new}, impl=self.impl)
def test_rename_mgr_source_not_found_multi(self):
"""(RSE/PROTOCOLS): Rename multiple files on storage (SourceNotFound)"""
pfn_a = list(mgr.lfns2pfns(self.rse_settings, {'name': '12_rse_not_existing.raw', 'scope': 'user.%s' % self.user}, impl=self.impl).values())[0]
pfn_b = list(mgr.lfns2pfns(self.rse_settings, {'name': '1_rse_not_created.raw', 'scope': 'user.%s' % self.user}, impl=self.impl).values())[0]
status, details = mgr.rename(self.rse_settings, [{'name': '1_rse_not_existing.raw', 'scope': 'user.%s' % self.user, 'new_name': '1_rse_new_not_created.raw'},
{'name': pfn_a, 'new_name': pfn_b}], impl=self.impl)
assert not status
assert isinstance(details['user.%s:1_rse_not_existing.raw' % self.user], exception.SourceNotFound)
assert isinstance(details[pfn_a], exception.SourceNotFound)
def test_rename_mgr_source_not_found_single_lfn(self):
"""(RSE/PROTOCOLS): Rename a single file on storage using LFN (SourceNotFound)"""
with pytest.raises(exception.SourceNotFound):
mgr.rename(self.rse_settings, {'name': '1_rse_not_existing.raw', 'scope': 'user.%s' % self.user, 'new_name': '1_rse_new_not_created.raw'}, impl=self.impl)
def test_rename_mgr_source_not_found_single_pfn(self):
"""(RSE/PROTOCOLS): Rename a single file on storage using PFN (SourceNotFound)"""
pfn = list(mgr.lfns2pfns(self.rse_settings, {'name': '1_rse_not_existing.raw', 'scope': 'user.%s' % self.user}, impl=self.impl).values())[0]
pfn_new = list(mgr.lfns2pfns(self.rse_settings, {'name': '1_rse_new_not_created.raw', 'scope': 'user.%s' % self.user}, impl=self.impl).values())[0]
with pytest.raises(exception.SourceNotFound):
mgr.rename(self.rse_settings, {'name': pfn, 'new_name': pfn_new}, impl=self.impl)
def test_change_scope_mgr_ok_single_lfn(self):
"""(RSE/PROTOCOLS): Change the scope of a single file on storage using LFN (Success)"""
mgr.rename(self.rse_settings, {'name': '1_rse_remote_change_scope.raw', 'scope': 'user.%s' % self.user, 'new_scope': 'group.%s' % self.user}, impl=self.impl)
def test_change_scope_mgr_ok_single_pfn(self):
"""(RSE/PROTOCOLS): Change the scope of a single file on storage using PFN (Success)"""
pfn = list(mgr.lfns2pfns(self.rse_settings, {'name': '2_rse_remote_change_scope.raw', 'scope': 'user.%s' % self.user}, impl=self.impl).values())[0]
pfn_new = list(mgr.lfns2pfns(self.rse_settings, {'name': '2_rse_remote_change_scope.raw', 'scope': 'group.%s' % self.user}, impl=self.impl).values())[0]
mgr.rename(self.rse_settings, {'name': pfn, 'new_name': pfn_new}, impl=self.impl)
def test_download_protocol_ok_single_pfn(self):
"""(RSE/PROTOCOLS): Check a single file download using PFN (Success)"""
filename = '1_rse_remote_get.raw'
pfn = list(mgr.lfns2pfns(self.rse_settings, {'name': filename, 'scope': 'user.%s' % self.user}, impl=self.impl).values())[0]
protocol = mgr.create_protocol(self.rse_settings, 'write', impl=self.impl)
protocol.connect()
with tempfile.TemporaryDirectory() as tmpdirname:
protocol.get(pfn, dest='%s/%s' % (tmpdirname, filename), transfer_timeout=None)
assert filename in os.listdir(tmpdirname)
assert os.path.isfile('%s/%s' % (tmpdirname, filename))
size = os.stat('%s/%s' % (tmpdirname, filename)).st_size
assert size == 1048576
def test_download_protocol_ok_single_pfn_timeout(self):
"""(RSE/PROTOCOLS): Check a single file download using PFN and timeout parameter (Success)"""
filename = '1_rse_remote_get.raw'
pfn = list(mgr.lfns2pfns(self.rse_settings, {'name': filename, 'scope': 'user.%s' % self.user}, impl=self.impl).values())[0]
protocol = mgr.create_protocol(self.rse_settings, 'write', impl=self.impl)
protocol.connect()
with tempfile.TemporaryDirectory() as tmpdirname:
protocol.get(pfn, dest='%s/%s' % (tmpdirname, filename), transfer_timeout='10')
assert filename in os.listdir(tmpdirname)
assert os.path.isfile('%s/%s' % (tmpdirname, filename))
size = os.stat('%s/%s' % (tmpdirname, filename)).st_size
assert size == 1048576
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.