index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
3,700 | 816f4cfe98f5e5b23f2c8f9f42c5f3ed8458042f | #!/usr/bin/python
import platform
from numpy import ctypeslib,empty,array,exp,ascontiguousarray,zeros,asfortranarray
from ctypes import c_float,c_double,c_int
from time import time
def resize(img,scale):
"""
downsample img to scale
"""
sdims=img.shape
datatype=c_double
if img.dtype!=datatype:
print "Error the image must be of doubles!"
raise RuntimeError
if scale>1.0:
print "Invalid scaling factor!"
raise RuntimeError
img = asfortranarray(img,c_double) # make array continguous
try:
mresize = ctypeslib.load_library("libresize.so",".")
except:
print "Unable to load resize library"
raise RuntimeError
#use two times the 1d resize to get a 2d resize
fresize = mresize.resize1dtran
fresize.restype = None
fresize.argtypes = [ ctypeslib.ndpointer(dtype=datatype, ndim=3), c_int,ctypeslib.ndpointer(dtype=datatype, ndim=3), c_int, c_int , c_int ]
ddims = [int(round(sdims[0]*scale)),int(round(sdims[1]*scale)),sdims[2]];
mxdst = zeros((ddims), dtype=datatype)
tmp = zeros((ddims[0],sdims[1],sdims[2]), dtype=datatype)
img1=img
t1=time()
fresize(img1, sdims[0], tmp, ddims[0], sdims[1], sdims[2]);
fresize(tmp, sdims[1], mxdst, ddims[1], ddims[0], sdims[2]);
t2=time()
return mxdst.reshape(ddims[2],ddims[1],ddims[0]).T
if __name__ == "__main__":
from numpy.random import random_integers
from time import time
from pylab import imread,figure,imshow
from ctypes import c_float,c_double,c_int
img=imread("test.png").astype(c_double)
imshow(img)
img1=resize(img,0.25)
figure()
imshow(img1)
|
3,701 | 3089dba0956151bd43e443b679ec0b24da644d08 | import random
s = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890!@#$%^&*()_+=-/.,;'[]{}:<>?"
i = 0
fin = ""
while i == 0:
num = int(input("What length do you want? "))
password = "".join(random.sample(s, num))
print(password)
j = 0
while(j ==0):
want = input("Do you this password? (yes or no) ")
want.lower()
if want == "yes":
print("Your Password is " + password)
break
elif want == "no":
break
if want == "yes":
fin = input("Do you want a new password. yes or no? ")
fin.lower()
while j == 0:
if fin == "yes":
break
elif fin == "no":
break
if fin == "no":
print("This is your final password " + password)
break |
3,702 | bc8bf06f1adedeb7b364308591bff09ac42d6c29 | from .dataset_readers import *
from .models import * |
3,703 | 1a09b38838f40c4c6049da8e6a72ba3d56806c07 | import tensorflow as tf
def data_rescale(x):
return tf.subtract(tf.divide(x, 127.5), 1)
def inverse_rescale(y):
return tf.round(tf.multiply(tf.add(y, 1), 127.5))
|
3,704 | b6dc29ae5661f84273ff91a124420bc10c7b6f6e | from .candles import CandleCallback
from .firestore import FirestoreTradeCallback
from .gcppubsub import GCPPubSubTradeCallback
from .thresh import ThreshCallback
from .trades import (
NonSequentialIntegerTradeCallback,
SequentialIntegerTradeCallback,
TradeCallback,
)
__all__ = [
"FirestoreTradeCallback",
"GCPPubSubTradeCallback",
"CandleCallback",
"TradeCallback",
"ThreshCallback",
"SequentialIntegerTradeCallback",
"NonSequentialIntegerTradeCallback",
]
|
3,705 | 224e13331ad93278f47a5582bbd24208d9ce5dcc | array = [1,2,3,4,5]
for x in array:
print (x)
|
3,706 | e4761c925643417f4fe906e8dd2c9356ae970d52 | # encoding = utf-8
"""
A flask session memcached store
"""
from datetime import timedelta, datetime
from uuid import uuid4
__author__ = 'zou'
import memcache
import pickle
from flask.sessions import SessionMixin, SessionInterface
from werkzeug.datastructures import CallbackDict
class MemcachedSession(CallbackDict, SessionMixin):
""""""
def __init__(self, initial=None, sid=None, new=False):
def on_update(self):
self.modified = True
CallbackDict.__init__(self, initial, on_update)
self.sid = sid
self.new = new
self.modified = False
class MemcachedSessionInterface(SessionInterface):
serializer = pickle
session_class = MemcachedSession
def generate_sid(self):
return str(uuid4())
def get_memcache_expiration_time(self, app, session):
if session.permanent:
return app.permanent_session_lifetime
return timedelta(days=8)
def __init__(self, client=None, prefix="session:"):
if client is None:
client = memcache.Client()
self.client = client
self.prefix = prefix
def open_session(self, app, request):
sid = request.args.get("sessionid", None) or request.cookies.get(app.session_cookie_name)
if not sid:
sid = self.generate_sid()
return self.session_class(sid=sid)
val = self.client.get(str(self.prefix + sid))
if val is not None:
data = self.serializer.loads(val)
self.client.set(self.prefix + str(sid), val, int(timedelta(days=8).total_seconds()))
return self.session_class(data, sid=sid)
new_sid = self.generate_sid()
return self.session_class(sid=new_sid, new=True)
def save_session(self, app, session, response):
domain = self.get_cookie_domain(app)
if not session:
self.client.delete(str(self.prefix + session.sid))
if session.modified:
response.delete_cookie(app.session_cookie_name, domain=domain)
return
memcache_exp = self.get_memcache_expiration_time(app, session)
cookie_exp = self.get_expiration_time(app, session)
val = self.serializer.dumps(dict(session))
self.client.set(self.prefix + str(session.sid), val, int(memcache_exp.total_seconds()))
response.set_cookie(app.session_cookie_name, session.sid, expires=cookie_exp, httponly=True, domain=domain, max_age= 7*24*60*60)
def set_cas_ticket_to_session_mapping(self, app, session, ticket):
memcache_exp = self.get_memcache_expiration_time(app, session)
val = str(session.sid)
self.client.set(str(ticket), val, int(memcache_exp.total_seconds()))
def del_ticket_session_mapping(self, ticket):
session_sid = self.client.get(str(ticket))
if session_sid:
r = self.client.delete(self.prefix + str(session_sid))
# if r == 1:
# print 'already delete session id= ' + session_sid
r = self.client.delete(str(ticket))
# if r == 1:
# print 'already delete ticket = ' + ticket
|
3,707 | 1adaca88cf41d4e4d3a55996022278102887be07 | from functools import wraps
from flask import request, abort
# Apply Aspect Oriented Programming to server routes using roles
# e.g. we want to specify the role, perhaps supplied
# by the request or a jwt token, using a decorator
# to abstract away the authorization
# possible decorator implementation
def roles_required(roles):
def decorator(func):
# can't skip this @wraps function
# or error 'View function mapping is overwriting an existing endpoint function
# stackoverflow.com/questions/19964079
@wraps(func)
def wrapper(*args, **kwargs):
print(roles, 'required')
print(args, kwargs, 'provided')
if (kwargs['role']):
print(kwargs['role'])
if (kwargs['role'] not in roles):
print('unauthorised')
return abort(401)
else:
print('authorised')
return func(*args, **kwargs)
#return abort(401)
#func()
return wrapper
return decorator
# can in theory use jwt token parsing to check role here
|
3,708 | ce69f7b7cf8c38845bfe589c83fdd6e43ab50912 | #
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from abc import ABC, abstractmethod
from bigdl.dllib.utils.common import DOUBLEMAX
from bigdl.orca.learn.optimizers.schedule import Scheduler
from bigdl.dllib.utils.log4Error import invalidInputError
from typing import (Any, Optional, Dict, TYPE_CHECKING)
if TYPE_CHECKING:
from bigdl.dllib.optim import optimizer
import numpy as np
class Optimizer(ABC):
@abstractmethod
def get_optimizer(self):
pass
class SGD(Optimizer):
"""
A plain implementation of SGD
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param weightdecay weight decay
:param momentum momentum
:param dampening dampening for momentum
:param nesterov enables Nesterov momentum
:param learningrates 1D tensor of individual learning rates
:param weightdecays 1D tensor of individual weight decays
>>> sgd = SGD()
creating: createDefault
creating: createSGD
"""
def __init__(self,
learningrate: float = 1e-3,
learningrate_decay: float = 0.0,
weightdecay: float = 0.0,
momentum: float = 0.0,
dampening: float = DOUBLEMAX,
nesterov: bool = False,
learningrate_schedule: Optional["Scheduler"] = None,
learningrates: Optional["np.ndarray"] = None,
weightdecays: Optional["np.ndarray"] = None) -> None:
from bigdl.dllib.optim.optimizer import SGD as BSGD
invalidInputError(isinstance(learningrate_schedule, Scheduler),
"learningrate_schedule should be an "
"bigdl.orca.learn.optimizers.schedule.Scheduler,"
" but got {learningrate_schedule}")
self.optimizer = BSGD(learningrate,
learningrate_decay,
weightdecay,
momentum,
dampening,
nesterov,
learningrate_schedule.get_scheduler(), # type: ignore
learningrates,
weightdecays,
bigdl_type="float")
def get_optimizer(self) -> "optimizer.SGD":
return self.optimizer
class Adagrad(Optimizer):
"""
An implementation of Adagrad. See the original paper:
http://jmlr.org/papers/volume12/duchi11a/duchi11a.pdf
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param weightdecay weight decay
>>> adagrad = Adagrad()
creating: createAdagrad
"""
def __init__(self,
learningrate: float = 1e-3,
learningrate_decay: float = 0.0,
weightdecay: float = 0.0) -> None:
from bigdl.dllib.optim.optimizer import Adagrad as BAdagrad
self.optimizer = BAdagrad(learningrate, learningrate_decay,
weightdecay, bigdl_type="float")
def get_optimizer(self) -> "optimizer.Adagrad":
return self.optimizer
class LBFGS(Optimizer):
"""
This implementation of L-BFGS relies on a user-provided line
search function (state.lineSearch). If this function is not
provided, then a simple learningRate is used to produce fixed
size steps. Fixed size steps are much less costly than line
searches, and can be useful for stochastic problems.
The learning rate is used even when a line search is provided.
This is also useful for large-scale stochastic problems, where
opfunc is a noisy approximation of f(x). In that case, the learning
rate allows a reduction of confidence in the step size.
:param max_iter Maximum number of iterations allowed
:param max_eval Maximum number of function evaluations
:param tolfun Termination tolerance on the first-order optimality
:param tolx Termination tol on progress in terms of func/param changes
:param ncorrection
:param learningrate
:param verbose
:param linesearch A line search function
:param linesearch_options If no line search provided, then a fixed step size is used
>>> lbfgs = LBFGS()
creating: createLBFGS
"""
def __init__(self,
max_iter: int = 20,
max_eval: float = DOUBLEMAX,
tolfun: float = 1e-5,
tolx: float = 1e-9,
ncorrection: int = 100,
learningrate: float = 1.0,
verbose: bool = False,
linesearch: Any = None,
linesearch_options: Optional[Dict[Any, Any]]=None) -> None:
from bigdl.dllib.optim.optimizer import LBFGS as BLBFGS
self.optimizer = BLBFGS(
max_iter,
max_eval,
tolfun,
tolx,
ncorrection,
learningrate,
verbose,
linesearch,
linesearch_options,
bigdl_type="float"
)
def get_optimizer(self) -> "optimizer.LBFGS":
return self.optimizer
class Adadelta(Optimizer):
"""
Adadelta implementation for SGD: http://arxiv.org/abs/1212.5701
:param decayrate interpolation parameter rho
:param epsilon for numerical stability
>>> adagrad = Adadelta()
creating: createAdadelta
"""
def __init__(self,
decayrate: float = 0.9,
epsilon: float = 1e-10) -> None:
from bigdl.dllib.optim.optimizer import Adadelta as BAdadelta
self.optimizer = BAdadelta(decayrate,
epsilon,
bigdl_type="float")
def get_optimizer(self) -> "optimizer.Adadelta":
return self.optimizer
class Adam(Optimizer):
"""
An implementation of Adam http://arxiv.org/pdf/1412.6980.pdf
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param beta1 first moment coefficient
:param beta2 second moment coefficient
:param epsilon for numerical stability
>>> adam = Adam()
creating: createAdam
"""
def __init__(self,
learningrate: float = 1e-3,
learningrate_decay: float = 0.0,
beta1: float = 0.9,
beta2: float = 0.999,
epsilon: float = 1e-8) -> None:
from bigdl.dllib.optim.optimizer import Adam as BAdam
self.optimizer = BAdam(learningrate,
learningrate_decay,
beta1,
beta2,
epsilon,
bigdl_type="float")
def get_optimizer(self) -> "optimizer.Adam":
return self.optimizer
class ParallelAdam(Optimizer):
"""
An implementation of Adam http://arxiv.org/pdf/1412.6980.pdf
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param beta1 first moment coefficient
:param beta2 second moment coefficient
:param epsilon for numerical stability
>>> pAdam = ParallelAdam()
creating: createParallelAdam
"""
def __init__(self,
learningrate: float = 1e-3,
learningrate_decay: float = 0.0,
beta1: float = 0.9,
beta2: float = 0.999,
epsilon: float = 1e-8,
parallel_num: int = -1) -> None:
from bigdl.dllib.optim.optimizer import ParallelAdam as BParallelAdam
self.optimizer = BParallelAdam(learningrate,
learningrate_decay,
beta1,
beta2,
epsilon,
parallel_num,
bigdl_type="float")
def get_optimizer(self) -> "optimizer.ParallelAdam":
return self.optimizer
class Ftrl(Optimizer):
"""
An implementation of Ftrl https://www.eecs.tufts.edu/~dsculley/papers/ad-click-prediction.pdf.
Support L1 penalty, L2 penalty and shrinkage-type L2 penalty.
:param learningrate learning rate
:param learningrate_power double, must be less or equal to zero. Default is -0.5.
:param initial_accumulator_value double, the starting value for accumulators,
require zero or positive values.
:param l1_regularization_strength double, must be greater or equal to zero. Default is zero.
:param l2_regularization_strength double, must be greater or equal to zero. Default is zero.
:param l2_shrinkage_regularization_strength double, must be greater or equal to zero.
Default is zero. This differs from l2RegularizationStrength above. L2 above is a
stabilization penalty, whereas this one is a magnitude penalty.
>>> ftrl = Ftrl()
creating: createFtrl
>>> ftrl2 = Ftrl(1e-2, -0.1, 0.2, 0.3, 0.4, 0.5)
creating: createFtrl
"""
def __init__(self,
learningrate: float = 1e-3,
learningrate_power: float = -0.5,
initial_accumulator_value: float = 0.1,
l1_regularization_strength: float = 0.0,
l2_regularization_strength: float = 0.0,
l2_shrinkage_regularization_strength: float = 0.0) -> None:
from bigdl.dllib.optim.optimizer import Ftrl as BFtrl
self.optimizer = BFtrl(learningrate,
learningrate_power,
initial_accumulator_value,
l1_regularization_strength,
l2_regularization_strength,
l2_shrinkage_regularization_strength,
bigdl_type="float")
def get_optimizer(self) -> "optimizer.Ftrl":
return self.optimizer
class Adamax(Optimizer):
"""
An implementation of Adamax http://arxiv.org/pdf/1412.6980.pdf
:param learningrate learning rate
:param beta1 first moment coefficient
:param beta2 second moment coefficient
:param epsilon for numerical stability
>>> adagrad = Adamax()
creating: createAdamax
"""
def __init__(self,
learningrate: float = 0.002,
beta1: float = 0.9,
beta2: float = 0.999,
epsilon: float = 1e-38) -> None:
from bigdl.dllib.optim.optimizer import Adamax as BAdamax
self.optimizer = BAdamax(learningrate,
beta1,
beta2,
epsilon,
bigdl_type="float")
def get_optimizer(self) -> "optimizer.Adamax":
return self.optimizer
class RMSprop(Optimizer):
"""
An implementation of RMSprop
:param learningrate learning rate
:param learningrate_decay learning rate decay
:param decayrate decay rate, also called rho
:param epsilon for numerical stability
>>> adagrad = RMSprop()
creating: createRMSprop
"""
def __init__(self,
learningrate: float = 1e-2,
learningrate_decay: float = 0.0,
decayrate: float = 0.99,
epsilon: float = 1e-8) -> None:
from bigdl.dllib.optim.optimizer import RMSprop as BRMSprop
self.optimizer = BRMSprop(learningrate,
learningrate_decay,
decayrate,
epsilon,
bigdl_type="float")
def get_optimizer(self) -> "optimizer.RMSprop":
return self.optimizer
|
3,709 | 5430e1861a6244c25c00699323efa0921a5af940 | import grpc
import time
import json
import sys
import uuid
from arch.api.proto import inference_service_pb2
from arch.api.proto import inference_service_pb2_grpc
import threading
def run(address):
ths = []
with grpc.insecure_channel(address) as channel:
for i in range(1):
th = threading.Thread(target=send, args=(channel,))
ths.append(th)
st = int(time.time())
for th in ths:
th.start()
for th in ths:
th.join()
et = int(time.time())
def process_response(call_future):
print(call_future.result())
def send(channel):
stub = inference_service_pb2_grpc.InferenceServiceStub(channel)
request = inference_service_pb2.InferenceMessage()
request_data = dict()
request_data['serviceId'] = 'xxxxxxxxx'
request_data['applyId'] = ''
# request_data['modelId'] = 'arbiter-10000#guest-10000#host-10000#model' # You can specify the model id this way
# request_data['modelVersion'] = 'acd3e1807a1211e9969aacde48001122' # You can specify the model version this way
request_data['caseid'] = uuid.uuid1().hex
feature_data = dict()
feature_data['fid1'] = 5.1
feature_data['fid2'] = 6.2
feature_data['fid3'] = 7.6
request_data['featureData'] = feature_data
request_data['sendToRemoteFeatureData'] = feature_data
print(json.dumps(request_data, indent=4))
request.body = json.dumps(request_data).encode(encoding='utf-8')
print(stub.inference(request))
if __name__ == '__main__':
run(sys.argv[1])
|
3,710 | bb198978ffc799bb43acf870467496e1dcc54d4b | # template for "Stopwatch: The Game"
import math
import simplegui
# define global variables
successcount = 0;
totalstopcount = 0;
count = 0;
T = True;
F = True;
# define helper function format that converts time
# in tenths of seconds into formatted string A:BC.D
def format(t):
A = str(t // 600);
tem = (t // 10);
tem = (tem) % 60;
B = str(tem // 10);
C = str(tem % 10);
D = str(t % 10);
return A + ":" + B + C + "." + D;
# define event handlers for buttons; "Start", "Stop", "Reset"
def stop():
global successcount, totalstopcount, T;
timer.stop();
if (T == True):
if (F == False):
totalstopcount = totalstopcount + 1;
T = False;
if ((count % 10 == 0) and (count != 0)):
successcount = successcount + 1;
def start():
global T, F;
T = True;
F = False;
timer.start();
def reset():
global successcount, totalstopcount, count, F;
count = 0;
successcount = 0;
totalstopcount = 0;
F = True;
# define event handler for timer with 0.1 sec interval
def tick():
global count;
count = count + 1;
# define draw handler
def draw(canvas):
global count;
canvas.draw_text(format(count), [250, 250], 40, "red");
canvas.draw_text(str(successcount) + "/" + str(totalstopcount), [400, 100], 30, "orange");
# create frame
frame = simplegui.create_frame("Stopwatch", 500, 500);
frame.add_button("START", start);
frame.add_button("STOP", stop);
frame.add_button("RESET", reset);
# register event handlers
frame.set_draw_handler(draw);
timer = simplegui.create_timer(100, tick)
# start frame
frame.start();
# Please remember to review the grading rubric
|
3,711 | a5856e12c281ed6a252f499a380f9c51082ea740 | import os
import closet
import unittest
import tempfile
def in_response(response, value):
return value.encode() in response.data
def is_404(response):
response.status_code == 404
class ClosetTestBase(unittest.TestCase):
def setUp(self):
"""Set up test environment befor each test"""
self.db_fd, closet.app.config['DATABASE'] = tempfile.mkstemp()
closet.app.config['TESTING'] = True
self.app = closet.app.test_client()
closet.init_db()
def tearDown(self):
"""Tear down test environment after each test"""
os.close(self.db_fd)
os.unlink(closet.app.config['DATABASE'])
def login(self, username, password):
"""Login to test website as specified user with the specified
password
"""
return self.app.post('/login', data=dict(
username=username,
password=password
), follow_redirects=True)
def logout(self):
"""Logout of test website"""
return self.app.get('/logout', follow_redirects=True)
def authenticate(self):
"""Login to test website as the standard test user"""
self.login(closet.app.config['USERNAME'],
closet.app.config['PASSWORD'])
class ClosetTestCase(ClosetTestBase):
# Generic Tests
def test_empty_db(self):
"""Start with a blank database."""
rv = self.app.get('/')
assert b'Your closet is empty.' in rv.data
def test_login_logout(self):
"""Make sure login and logout works"""
rv = self.login(closet.app.config['USERNAME'],
closet.app.config['PASSWORD'])
assert b'You were logged in' in rv.data
rv = self.logout()
assert b'You were logged out' in rv.data
rv = self.login(closet.app.config['USERNAME'] + 'x',
closet.app.config['PASSWORD'])
assert b'Invalid username' in rv.data
rv = self.login(closet.app.config['USERNAME'],
closet.app.config['PASSWORD'] + 'x')
assert b'Invalid password' in rv.data
class ModelBase(unittest.TestCase):
# Model based view test helpers
def __init__(self, *args, **kwargs):
super(ModelBase, self).__init__(*args, **kwargs)
self.base_url = '/'
self.add_url = 'add'
self.edit_url = 'edit'
self.delete_url = 'delete'
self.name = ''
self.nice_name = ''
self.name_field = 'name'
self.id_field = 'slug'
self.fields = {}
def get_url(self, *args):
"""Create a URL from a tuple of strings based on the base url"""
try:
url = '/'.join((self.base_url, ) + args)
except TypeError:
url = '/'.join((self.base_url, ) + args[0])
return url.rstrip('/')
def get(self, url):
"""Process a GET request to the app"""
return self.app.get(get_url(url), follow_redirects=True)
def post(self, url, data):
"""Process a POST request to the app"""
return self.app.post(get_url(url), data=data, follow_redirects=True)
def verify_object(self, data):
"""Verify the model object data"""
rv = self.get(data[self.id_field])
result = not is_404(rv)
if result:
for key, value in data:
if not in_response(rv, value):
return False
return result
def get_add_form(self):
"""Test that the "add" form is accessible and contains all the
fields
"""
rv = self.get(self.add_url)
assert not is_404(rv)
assert in_response(rv, 'Add {}'.format(self.nice_name))
for field, name in self.fields:
assert in_response(rv, name)
return rv
def get_edit_form(self, data):
"""Test that the edit form is accessible and contains all the
fields
"""
self.add_success(data)
rv = self.get((data[self.id_field], self.edit_url))
assert not is_404(rv)
assert in_response(rv, 'Edit {}'.format(data[self.name_field]))
for field, name in self.fields:
assert in_response(rv, name)
return rv
def get_delete_confirmation_form(self, data):
"""Test that the delete confirmation form is accessible"""
self.add_success(data)
rv = self.get((data[self.id_field], self.delete_url))
assert not is_404(rv)
assert in_response(rv, 'Delete {}'.format(data[self.name_field]))
return rv
def add_success(self, data):
"""Test that adding a model with the given data succeeds"""
rv = self.post(self.add_url, data)
assert not in_response(rv, 'Add {}'.format(self.nice_name))
assert self.verify_object(data)
return rv
def edit_success(self, id_, data):
"""Test that updating a model with the given data succeeds"""
rv = self.post((id_, self.edit_url), data)
assert not in_response(rv, 'Edit {}'.format(data[self.name_field]))
assert self.verify_object(data)
return rv
def update_success(self, data, new_data):
"""Test that updating a model with the given data succeeds"""
self.add_success(data)
return self.edit_success(data[self.id_field], new_data)
def delete_success(self, id_):
"""Test that deleting the specified model succeeds"""
rv = self.post((id_, self.delete_url), dict(post='yes'))
assert not self.verify_object({self.id_field: id_})
return rv
def add_fail(self, data, message):
"""Test that adding a model with the given data fails"""
rv = self.post(self.add_url, data)
assert in_response(rv, 'Add {}'.format(self.nice_name))
assert in_response(rv, message)
return rv
def edit_fail(self, id_, data, message):
"""Test that updating a model with the given data fails"""
rv = self.post((id_, self.edit_url), data)
assert in_response(rv, 'Edit {}'.format(data[self.name_field]))
assert in_response(rv, message)
return rv
def update_fail(self, data, new_data, message):
"""Test that updating a model with the given data fails"""
self.add_success(data)
return self.edit_fail(data[self.id_field], new_data, message)
def delete_fail(self, id_, message):
"""Test that deleting the specified model fails"""
rv = self.post((id_, self.delete_url), dict(post='yes'))
assert in_response(rv, message)
assert self.verify_object({self.id_field: id_})
return rv
def bad_data_fail(self, good_data, bad_data, message):
"""Test that adding and updating a model with the given data
fails
"""
self.add_fail(bad_data, message)
self.update_fail(good_data, bad_data, message)
def add_required_field_fail(self, field, data):
"""Test that adding a model with a blank or missing required
field fails
"""
message = '{} is required'.format(self.fields[field])
data = data.copy()
data[field] = ''
self.add_fail(data, message)
assert not self.verify_object(data)
del data[field]
self.add_fail(data, message)
assert not self.verify_object(data)
def update_required_field_fail(self, field, data):
"""Test that updating a model with a blank or missing required
field fails
"""
message = '{} is required'.format(self.fields[field])
data = data.copy()
id_ = data[self.id_field]
self.add_success(data)
data[field] = ''
self.edit_fail(id_, data, message)
assert not self.verify_object(data)
del data[field]
self.edit_fail(id_, data, message)
assert not self.verify_object(data)
# Delete base model?
def required_field_fail(self, field, data):
"""Test that adding and updating a model with a blank or missing
required field fails
"""
self.add_required_field_fail(field, data)
self.update_required_field_fail(field, data)
def add_existing_key_fail(self, data):
"""Test that adding a model with an existing key fails"""
message = 'exists'
rv = self.add_success(data)
assert not in_response(rv, message)
return self.add_fail(data, message)
def update_existing_key_fail(self, data, new_data):
"""Test that adding a model with an existing key fails"""
message = 'exists'
rv = self.add_success(data)
assert not in_response(rv, message)
rv = self.add_success(new_data)
assert not in_response(rv, message)
rv = self.update_fail(data, message)
assert self.verify_object(new_data)
return rv
def existing_key_fail(self, data, new_data):
"""Test that adding and updating a model with an existing key
fails
"""
message = 'exists'
rv = self.add_success(data)
assert not in_response(rv, message)
self.add_fail(data, message)
rv = self.add_success(new_data)
assert not in_response(rv, message)
self.update_fail(data, message)
assert self.verify_object(new_data)
def data_sorted(self, before_data, after_data, url):
"""Test that the models will be sorted in the correct order"""
self.add_success(after_data)
self.add_success(before_data)
rv = self.get(url)
after_index = rv.data.index(after_data[self.name_field].encode())
before_index = rv.data.index(before_data[self.name_field].encode())
assert after_index > before_index
def delete_does_not_exist_fail(self, id_):
"""Test that deleting a model that does not exist fails"""
assert is_404(self.get((id_, self.delete_url)))
self.delete_fail(id_, 'does not exist')
class CategoryTestCase(ClosetTestBase, ModelBase):
def __init__(self, *args, **kwargs):
super(ModelBase, self).__init__(*args, **kwargs)
self.base_url = '/categories'
self.name = 'category'
self.nice_name = 'Category'
self.fields = {
'name': 'Name',
'parent': 'Parent'}
self.test_data = {
'pants': {
'name': 'Pants',
'slug': 'pants'},
'shirts': {
'name': 'Shirts',
'slug': 'shirts'},
'jeans': {
'name': 'Jeans',
'slug': 'jeans',
'parent': 'pants'},
't-shirts': {
'name': 'T-shirts',
'slug': 't-shirts',
'parent': 'shirts'},
'hats': {
'name': 'Hats',
'slug': 'hats',
'parent': 'spam'},
'polo-shirts': {
'name': 'Polo Shirts',
'slug': 'polo-shirts'},
'symbols': {
'name': ':)',
'slug': ''},
'keyword': {
'name': 'Add',
'slug': 'add-1'}}
def setUp(self):
super(CategoryTestCase, self).setUp()
self.authenticate()
def test_get_category_forms(self):
"""Test that the category forms are accessible"""
self.get_add_form()
self.get_edit_form(self.test_data['pants'])
self.get_delete_confirmation_form(self.test_data['shirts'])
def test_add_category(self):
"""Test that adding a category works"""
self.add_success(self.test_data['pants'])
def test_update_category(self):
"""Test that updating a category works"""
self.update_success(self.test_data['pants'], self.test_data['shirts'])
def test_delete_category(self):
"""Test that deleting a category works"""
self.add_success(self.test_data['pants'])
self.delete_success('pants')
def test_add_child_category(self):
"""Test that adding a child category works"""
self.add_success(self.test_data['pants'])
rv = self.get('pants')
assert in_response(rv, 'This category is empty.')
self.add_success(self.test_data['jeans'])
rv = self.get('pants')
assert not in_response(rv, 'This category is empty.')
assert in_response(rv, 'Jeans')
def test_update_child_category(self):
"""Test that updating child categories works"""
self.add_success(self.test_data['pants'])
self.add_success(self.test_data['shirts'])
self.add_success(self.test_data['jeans'])
rv = self.get('pants')
assert not in_response(rv, 'This category is empty.')
assert in_response(rv, 'Jeans')
self.edit_success('jeans', self.test_data['t-shirts'])
rv = self.get('pants')
assert in_response(rv, 'This category is empty.')
assert not in_response(rv, 'Jeans')
assert not in_response(rv, 'T-Shirts')
rv = self.get('shirts')
assert not in_response(rv, 'This category is empty.')
assert in_response(rv, 'T-Shirts')
assert not in_response(rv, 'Jeans')
def test_name_required(self):
"""Test that adding/updating a category without a name fails"""
self.required_field_fail('name', self.test_data['pants'])
def test_parent_does_not_exist(self):
"""Test that adding/updating a category with a non-existent
parent fails
"""
self.bad_data_fail(self.test_data['pants'],
self.test_data['hats'], 'Parent does not exist')
def test_category_already_exists(self):
self.existing_key_fail(
self.test_data['pants'],
self.test_data['shirts'])
def test_categories_are_sorted(self):
"""Test that categories are sorted alphabetically by name"""
self.data_sorted(self.test_data['shirts'], self.test_data['pants'])
def test_delete_category_does_not_exist(self):
"""Test that deleting a category that doesn't exist fails"""
self.delete_does_not_exist_fail('hats')
def test_add_category_slug_special(self):
"""Test that adding a category with an incorrect name fails"""
self.add_success(self.test_data['polo-shirts'])
assert self.verify_object(dict(name='Polo Shirts', slug='polo-shirts'))
self.add_fail(self.test_data['symbols'], '')
self.add_success('Add')
def test_update_category_slug_special(self):
"""Test that updating a category with an incorrect slug fails"""
rv = self.app.post(self.get_category_url('add'), data=dict(
name='Pants', slug='pants'
), follow_redirects=True)
rv = self.app.post(self.get_category_url('pants', 'edit'), data=dict(
name='Polo Shirts', slug='polo shirts'
), follow_redirects=True)
assert b'Edit Pants' in rv.data
assert b'Slug is formatted incorrectly' in rv.data
rv = self.app.post(self.get_category_url('pants', 'edit'), data=dict(
name=':)', slug=':)'
), follow_redirects=True)
assert b'Edit Pants' in rv.data
assert b'Slug is formatted incorrectly' in rv.data
rv = self.app.post(self.get_category_url('pants', 'edit'), data=dict(
name='Add', slug='add'
), follow_redirects=True)
assert b'Edit Pants' in rv.data
assert b'Slug "add" is not allowed' in rv.data
if __name__ == '__main__':
unittest.main()
|
3,712 | 05764d1cfd9573616fcd6b125280fddf2e5ce7ad | from collections import Counter, defaultdict
from random import randrange
from copy import deepcopy
import sys
def election(votes, message=True, force_forward=False):
votes = deepcopy(votes)
N = len(votes)
for i in range(N):
obtained = Counter([v[-1] for v in votes if len(v)]).most_common()
M = len(obtained)
top = obtained[0]
if M == 1:
return top[0]
accum = [0]
for ob in obtained[::-1]:
accum.append(accum[-1] + ob[1])
accum = accum[:0:-1]
candidates = {top[0]}
for m in range(1,M):
if accum[m] < obtained[m-1][1]:
break
else:
candidates.add(obtained[m][0])
else:
m += 1
if message:
print('The {}-th vote: {}'.format(i+1, obtained))
if m == 1:
return top[0]
elif m >= M:
l = M-2
while l >= 0 and obtained[l][1] == obtained[-1][1]:
l -= 1
candidates = {obtained[i][0] for i in range(l+1)}
fighting = {obtained[i][0] for i in range(l+1,M)}
losers = set()
for f in fighting:
tmp_votes = deepcopy(votes)
tmp_candidates = candidates | {f}
for n in range(N):
while len(tmp_votes[n]) > 0 and not tmp_votes[n][-1] in tmp_candidates:
tmp_votes[n].pop()
tmp_result = election(tmp_votes, message=False)
if tmp_result != f and not (isinstance(tmp_result,list) and f in dict(tmp_result)):
losers.add(f)
candidates |= fighting
candidates -= losers
if losers:
if message:
print(' Candidates {} survived.'.format([ obtained[j][0] for j in range(m) if obtained[j][0] in candidates]))
else:
if message:
print(' All the candidates survived.')
if force_forward:
drop = obtained[randrange(l+1,M)][0]
candidates.discard(drop)
if message:
print(' Drop the candidate \'{}\'.'.format(drop))
elif message:
print(' Final winner was not determined.')
return obtained
elif message:
print(' Candidates {} survived.'.format([ obtained[j][0] for j in range(m)]))
for n in range(N):
while len(votes[n]) > 0 and not votes[n][-1] in candidates:
votes[n].pop()
if __name__ == '__main__':
args = sys.argv
if len(args) <= 1:
K = 0
else:
K = int(args[1])
votes = []
while True:
try:
votes.append(list(input().strip().upper()[::-1]))
except EOFError:
break
if K == 0:
winner = election(votes)
print('---')
if isinstance(winner, list):
print('The candidates \'{}\' are still surviving.'.format(winner))
else:
print('The candidate \'{}\' is the Final Winner !!!'.format(winner))
else:
win_times = defaultdict(int)
for _ in range(K):
win_times[election(votes, message=False, force_forward=True)] += 1
result = list(win_times.items())
if len(result) == 1:
winner = result[0][0]
print('The candidate \'{}\' is the Final Winner !!'.format(winner))
else:
print('Final winner was not determined.')
print('The winner distribution is: {}'.format(dict(win_times)))
|
3,713 | 67de51e2a176907fd89793bd3ec52f898130e104 | from django.shortcuts import render,redirect
from django.contrib.auth.decorators import login_required
from .form import UserForm, ProfileForm, PostForm
from django.contrib import messages
from .models import Profile, Projects
from django.contrib.auth.models import User
from django.http import HttpResponseRedirect
# Create your views here.
def home(request):
return render(request, 'home.html')
@login_required(login_url='/accounts/login/')
def profile(request):
if request.method == 'POST':
userform = UserForm(request.POST, instance=request.user)
profileform = ProfileForm(request.POST, request.FILES, instance=request.user.profile)
if userform.is_valid and profileform.is_valid():
userform.save()
profileform.save()
messages.success(request, 'Profile updated successfully')
return redirect('profile')
userform = UserForm()
profileform = ProfileForm()
curr_profile = Profile.objects.get(username = request.user)
curr_projects = Projects.user_projects(request.user)
params = {'curr_user': curr_profile,
'curr_project': curr_projects,
'userform':userform,
'profileform':profileform,
}
return render(request, 'profile/index.html', params)
@login_required(login_url='/accounts/login/')
def postpoject(request):
if request.method == 'POST':
postform = PostForm(request.POST, request.FILES)
if postform.is_valid:
pro = postform.save(commit=False)
pro.projectowner = request.user
pro.save()
return redirect('profile')
postform = PostForm()
params = {'postform':postform,}
return render(request, 'profile/postproject.html', params)
@login_required(login_url='/accounts/login/')
def userprofile(request, id):
try:
userdetail = Profile.objects.get(id=id)
curr_projects = Projects.user_projects(userdetail.username)
if request.user.username == str(userdetail.username):
return redirect('profile')
else:
return render(request, 'userprofile.html', {'userdetail':userdetail, 'curr_projects':curr_projects})
except Profile.DoesNotExist:
return HttpResponseRedirect(', Sorry the Page You Looking For Doesnt Exist.')
@login_required(login_url='/accounts/login/')
def projectdetails(request, id):
specproject = Projects.objects.get(id=id)
return render(request, 'profile/projectdetails.html', {'specproject':specproject})
def search(request):
if 'search' in request.GET and request.GET['search']:
search_term = request.GET.get('search')
searchresults = Projects.searchProjects(search_term)
return render(request, 'search.html', {'searchresults':searchresults, 'search_term':search_term})
else:
return redirect('home')
|
3,714 | 02ddf213cd3f455f8d8fbde8621fc4788124d5a9 | from django.db import models
class Building(models.Model):
Number = models.CharField(max_length=60)
Description = models.CharField(max_length=120)
OSMWAYID = models.DecimalField(decimal_places=0, max_digits=15) # the osm way id
Lat = models.CharField(max_length=20) #lat/lon of then center
Lon = models.CharField(max_length=20) # lat/lon of the center of the building
class BuildingPoint(models.Model):
parent = models.ForeignKey('Building', null=False, blank=False, related_name='points')
OSMNODEID = models.DecimalField(decimal_places=0, max_digits=15) # the osm id
Lat = models.CharField(max_length=20) #lat/lon of then center
Lon = models.CharField(max_length=20) # lat/lon of the center of the building
class Facinet(models.Model):
##
Building = models.ForeignKey('Building', null=False, blank=False, related_name='FacinetNodes')
location = models.IntegerField(unique=True, db_column='Location') #
name = models.TextField(db_column='Name') #
connectionstring = models.TextField(db_column='ConnectionString') #
tapidevice = models.TextField(db_column='TapiDevice', blank=True) #
synctime = models.CharField(max_length=3, db_column='SyncTime') #
online = models.CharField(max_length=3, db_column='Online') #
onlineall = models.CharField(max_length=3, db_column='OnlineAll') #
## location for display
Lat = models.CharField(max_length=20) #lat/lon of facinet collector
Lon = models.CharField(max_length=20) # lat/lon of facinet collector
class Logger(models.Model):
Facinet = models.ForeignKey('Facinet', null=False, blank=False, related_name='Loggers')
loggerindex = models.IntegerField(unique=True, db_column='LoggerIndex') #
name = models.TextField(db_column='Name') #
online = models.IntegerField(db_column='Online') #
## location for display
Lat = models.CharField(max_length=20) #lat/lon of the logger
Lon = models.CharField(max_length=20) # lat/lon of the logger
class LoggerMeasurement(models.Model):
Logger = models.ForeignKey('Logger', null=False, blank=False, related_name='Measurement')
timestamp = models.DateTimeField()
measurement = models.DecimalField(max_digits=12, decimal_places=4)
|
3,715 | 3a5c8ee49c50820cea201c088acca32e018c1501 | """
BCXYZ company has up to
employees.
The company decides to create a unique identification number (UID) for each of its employees.
The company has assigned you the task of validating all the randomly generated UIDs.
A valid UID must follow the rules below:
It must contain at least 2 uppercase English alphabet characters.
It must contain at least 3 digits.
It should only contain alphanumeric characters
No character should repeat.
There must be exactly 10 characters in a valid UID.
"""
import re
for _ in range(int(input())):
imp = input()
if bool(re.search(r'[a-zA-Z0-9]{10}', imp)) and bool(re.search(r'([A-Z].*){2}', imp)) and \
bool(re.search(r'([0-9].*){3}', imp)) and not bool(re.search(r'.*(.).*\1', imp)):
print("Valid")
else:
print("Invalid") |
3,716 | 9a665d126d7b48adbd876b48c3d8806eabea1108 | from .entity import EventBase, event_class
from .. import LOG as _LOG
LOG = _LOG.getChild('entity.event')
@event_class()
class FunctionCallEvent(EventBase):
"""
function call
"""
deferred = True
def parse_jsondict(self, jsdict):
assert 'func_name' in jsdict['option'], 'func_name required'
super(FunctionCallEvent, self).parse_jsondict(jsdict)
@event_class()
class PacketEvent(EventBase):
"""
L7 packet message
"""
deferred = True
@classmethod
def from_message(cls, src_process, dst_process, message):
inst = cls()
# we do not set inst.process here
inst.option = {
'src_process': src_process,
'dst_process': dst_process,
'message': message
}
return inst
@event_class()
class LogEvent(EventBase):
"""
syslog (not deferrable)
"""
deferred = False
@event_class()
class InspectionEndEvent(EventBase):
"""
Inspection end (not deferrable)
"""
deferred = False
|
3,717 | e6851e86fa86ab2096f059218b2b8a2994642807 | """
You have a map that marks the locations of treasure islands. Some of the map area has jagged rocks and dangerous reefs.
Other areas are safe to sail in. There are other explorers trying to find the treasure.
So you must figure out a shortest route to one of the treasure islands.
Assume the map area is a two dimensional grid, represented by a matrix of characters.
You must start from one of the starting point (marked as S) of the map and can move one block up, down,
left or right at a time. The treasure island is marked as X. Any block with dangerous rocks or reefs will be marked as
D. You must not enter dangerous blocks. You cannot leave the map area. Other areas O are safe to sail in.
Output the minimum number of steps to get to any of the treasure islands.
"""
import math
def find_treasure_util(grid, i, j):
rows, columns = len(grid), len(grid[0])
queue = [((i, j), 0)]
directions = [[0, 1], [0, -1], [1, 0], [-1, 0]]
visited = [[-1 for _ in range(columns)] for _ in range(rows)]
while queue:
(x, y), step = queue.pop()
visited[x][y] = step
for direction in directions:
curr_x = x + direction[0]
curr_y = y + direction[1]
if 0 <= curr_x < rows and 0 <= curr_y < columns and grid[curr_x][curr_y] == 'X':
return step + 1
elif 0 <= curr_x < rows and 0 <= curr_y < columns \
and grid[curr_x][curr_y] != 'D' \
and visited[curr_x][curr_y] == -1:
queue.append(((curr_x, curr_y), step + 1))
return -1
def find_treasure(grid):
if not len(grid) or not len(grid[0]):
return -1
minimum_steps = math.inf
for i in range(len(grid)):
for j in range(len(grid[i])):
if grid[i][j] == 'S':
minimum_steps = min(minimum_steps, find_treasure_util(grid, i, j))
return minimum_steps
if __name__ == '__main__':
grid = [['S', 'O', 'O', 'S', 'S'],
['D', 'O', 'D', 'O', 'D'],
['O', 'O', 'O', 'O', 'X'],
['X', 'D', 'D', 'O', 'O'],
['X', 'D', 'D', 'D', 'O']]
print(find_treasure(grid)) |
3,718 | bed3d83f682404719a95be360cdd74be9dc87991 | # coding: utf-8
BOT_NAME = ['lg']
SPIDER_MODULES = ['lg.spiders']
NEWSPIDER_MODULE = 'lg.spiders'
DOWNLOAD_DELAY = 0.1 # 间隔时间
LOG_LEVEL = 'WARNING'
|
3,719 | 0345c3c2049c972370cd7bde5a6e0a1dfa5dfe66 | __path__.append('/cvmfs/cms.cern.ch/slc6_amd64_gcc481/cms/cmssw-patch/CMSSW_7_0_6_patch3/python/ggAnalysis')
|
3,720 | ff65e92699c6c9379ac40397b3318c3f6bf7d49a | # coding=UTF-8
from unittest import TestCase
from fwk.util.rect import Rect
class RectSizeTest(TestCase):
def test_sizes_from_coords(self):
rect = Rect(top=33,bottom=22,left=10,right=20)
self.assertEqual(rect.width,10)
self.assertEqual(rect.height,11)
def test_sizes_from_sizes(self):
rect = Rect(top=23,height=48,left=64,width=67)
self.assertEqual(rect.width,67)
self.assertEqual(rect.height,48)
class RectResizeTest(TestCase):
def setUp(self):
self.rect = Rect(top=200,bottom=100,left=400,right=500)
def test_resize_center(self):
self.rect.resize(width=200,height=50,origin='center-center')
self.assertEqual(self.rect.width,200)
self.assertEqual(self.rect.height,50)
self.assertEqual(self.rect.top,175)
self.assertEqual(self.rect.right,550)
def test_resize_bottom_left(self):
self.rect.resize(width=253,height=68,origin='bottom-left')
self.assertEqual(self.rect.width,253)
self.assertEqual(self.rect.height,68)
self.assertEqual(self.rect.bottom,100)
self.assertEqual(self.rect.left,400)
def test_resize_top_right(self):
self.rect.resize(width=253,height=68,origin='top-right')
self.assertEqual(self.rect.width,253)
self.assertEqual(self.rect.height,68)
self.assertEqual(self.rect.top,200)
self.assertEqual(self.rect.right,500)
def test_scale(self):
self.rect.scale(0.5,origin='center-center')
self.assertEqual(self.rect.width,50)
self.assertEqual(self.rect.height,50)
self.assertEqual(self.rect.left,425)
self.assertEqual(self.rect.bottom,125)
class RectMoveTest(TestCase):
def setUp(self):
self.rect = Rect(top=1,bottom=-1,left=-1,right=1)
def test_move_center(self):
self.rect.moveTo(20,10,'center-center')
self.assertEqual(self.rect.top,11)
self.assertEqual(self.rect.right,21)
self.assertEqual(self.rect.width,2)
self.assertEqual(self.rect.height,2)
def test_move_bottom_left(self):
self.rect.moveTo(30,40,'bottom-left')
self.assertEqual(self.rect.bottom,40)
self.assertEqual(self.rect.left,30)
self.assertEqual(self.rect.width,2)
self.assertEqual(self.rect.height,2)
class RectInsetTest(TestCase):
def setUp(self):
self.rect = Rect(bottom=100,top=200,left=10,right=20)
def test_inset_separate_values(self):
self.rect.inset(1,10)
self.assertEqual(self.rect.bottom,110)
self.assertEqual(self.rect.top,190)
self.assertEqual(self.rect.left,11)
self.assertEqual(self.rect.right,19)
def test_inset_single_value(self):
self.rect.inset(2)
self.assertEqual(self.rect.bottom,102)
self.assertEqual(self.rect.top,198)
self.assertEqual(self.rect.left,12)
self.assertEqual(self.rect.right,18)
def test_inset_with_underflow(self):
self.rect.inset(51)
self.assertEqual(self.rect.bottom,150)
self.assertEqual(self.rect.height,0)
self.assertEqual(self.rect.left,15)
self.assertEqual(self.rect.width,0)
class RectCloneAndMagic(TestCase):
def test_clone_and_compare(self):
rect1 = Rect(left=10,bottom=30,width=100,height=410)
rect2 = rect1.clone()
self.assertEqual(rect1,rect2)
rect2 = rect1.clone().inset(10)
self.assertNotEqual(rect1,rect2)
def test_string_representation(self):
'''
Тест, проверяющий наглядность и однозначность строкового представления
объекта прямоугольника.
Данный тест создан поздно ночью, поэтому весьма оправданы могут быть
сомнения как в корректности метода лежащего в его основе так и в
адекватности автора данного теста в момент его (теста) написания.
'''
rect = Rect(left=432548,right=876945,bottom=129543,top=410666)
srepr = repr(rect)
self.assertTrue('Rect' in srepr)
self.assertTrue('432548' in srepr)
self.assertTrue('876945' in srepr)
self.assertTrue('129543' in srepr)
self.assertTrue('410666' in srepr)
|
3,721 | 612a3d168a09fc26530b95d258cbb4de6728419d | #!/usr/bin/env python
import psycopg2
DBNAME = "news"
query1 = """
select title, count(*) as numOfViews from articles,log
where concat('/article/', articles.slug) = log.path
group by title order by numOfViews desc limit 3;
"""
query2 = """
select authors.name, count(*) as numOfViews
from articles, authors, log
where articles.author = authors.id
and concat('/article/', articles.slug) = log.path
group by authors.name order by numOfViews desc ;
"""
query3 = """
select innerQuery.badDay, ROUND((100.0*innerQuery.err/innerQuery.total),3)
as error from (select date_trunc('day', time) as badDay,
count(*) as total,
sum(case when status!='200 OK' then 1 else 0 end) as err
from log
group by badDay) as innerQuery
where round((100.0*innerQuery.err/innerQuery.total),3) >1;
"""
result = ''
def get_data(query):
""" fetch data from database """
db = psycopg2.connect(database=DBNAME)
c = db.cursor()
c.execute(query)
data = c.fetchall()
db.close()
return data
def fileWrite(content):
""" write result to result.txt """
file = open('./result.txt', 'w')
file.write(content)
file.close()
def appendToResult(content, isError=False):
""" formating db result to readable text """
global result
if(isError):
for c in content:
result += c[0].strftime("%Y-%m-%d") + ' - ' + str(c[1]) + '% error'
else:
for c in content:
result += c[0] + ' - ' + str(c[1]) + ' views \n'
fileWrite(result)
if __name__ == '__main__':
result += '\n1. What are the most popular three articles of all time?\n\n'
appendToResult(get_data(query1))
result += ' \n2. Who are the most popular article authors of all time?\n\n'
appendToResult(get_data(query2))
result += '''\n3. On which days did more than
1% of requests lead to errors?\n\n'''
appendToResult(get_data(query3), True)
print(result)
fileWrite(result)
|
3,722 | 124ece8f2f4ecc53d19657e2463cc608befb1ce7 | from rest_framework import serializers
from users.models import bills, Userinfo
class billsSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = bills
fields = ('bname', 'bamount', 'duedate', 'user_id')
class UserinfoSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Userinfo
fields = ('fname', 'lname', 'address', 'city', 'state', 'zipcode', 'dob', 'phone', 'email', 'author') |
3,723 | bc8d3a5e3ed845b4ab2d203bec47881be64ba3f8 | import discord
from discord.ext import commands
from os import path
import os
import datetime as dt
import numpy as np
import math
# client = commands.Bot(command_prefix = '.', case_insensitive=True)
# UTOPIA = 679921845671035034
# DEV_BOT_TOKEN = 'NzUzMzg1MjE1MTAzMzM2NTg4.X1laqA.vKvoV8Gz9jBWDWvIaBGDC4xbLB4'
# BOT_TOKEN = 'NzU0MDAyMzEwNTM5MTE2NTQ0.X1uZXw.urRh3pgMuS8IAfD4jAMbJVdO8D4'
# CREDS = BOT_TOKEN
|
3,724 | 45b20b57a3579c2527c674d0c2af88eedddadcae | from LinkedList import LinkedList
from LinkedListHelper import CreateLinkedList
class LinkedListMod(LinkedList):
def remove_allnode(self):
while self.head:
temp = self.head
self.head = self.head.next
del temp
def main():
l1 = LinkedListMod()
CreateLinkedList(l1)
l1.display()
print("Remove the Linked List.....")
l1.remove_allnode()
l1.display()
if __name__ == "__main__":
main() |
3,725 | c860c1fa6e7610c60077f0eab1572895a23393fd | #!/usr/bin/python
# Copyright (c) 2020 Maryushi3
import emoji_data_python as edp
import sys
import pyautogui
from Xlib import display
from PyQt5.QtWidgets import QApplication, QGridLayout, QLabel, QLineEdit, QScrollArea, QSizePolicy, QStackedLayout, QVBoxLayout, QWidget
from PyQt5.QtCore import QEvent, QSettings, Qt, pyqtSignal
from PyQt5.QtGui import QFont
from PyQt5 import QtTest
# globals
emojiGridLayout = None
mainWindow = None
emojiGridColumnCount = 5
emojiGridRowCount = 4
emojiToShowCount = 0
fullRowsCount = 0
lastRowEmojiCount = 0
emojiFontSize = 20
selectedEmojiPosition = list((0,0))
willExitOnItsOwn = False
selectedEmojiChar=''
settingsFile = None
historyList = []
foundAnyEmoji = True
layoutStack = None
font = QFont()
font.setPointSize(emojiFontSize)
# quits without a lag
def quitNicely():
mainWindow.hide()
quit()
# gets mouse position from Xlib
def mousePosition():
pointerData = display.Display().screen().root.query_pointer()._data
return pointerData["root_x"], pointerData["root_y"]
# copies and pastes selected emoji
def execute_emoji(char):
add_char_to_history(char)
global willExitOnItsOwn
willExitOnItsOwn = True
mainWindow.hide()
QApplication.clipboard().setText(char)
pyautogui.hotkey("ctrl","v")
QtTest.QTest.qWait(250)
quit()
# fills grid with given char list and takes care of layout and counting
def fill_grid_with_char_list(charList):
# for wraparound
global emojiToShowCount
global fullRowsCount
global lastRowEmojiCount
emojiToShowCount = min(len(charList),(emojiGridColumnCount*emojiGridRowCount))
fullRowsCount = emojiToShowCount//emojiGridColumnCount
lastRowEmojiCount = emojiToShowCount%emojiGridColumnCount
global foundAnyEmoji
if emojiToShowCount>0:
foundAnyEmoji = True
layoutStack.setCurrentIndex(0)
else:
foundAnyEmoji = False
layoutStack.setCurrentIndex(1)
# clear grid
global emojiGridLayout
for i in reversed(range(emojiGridLayout.count())):
emojiGridLayout.itemAt(i).widget().setParent(None)
# fill with new chars
rowIdx = 0
colIdx = 0
for emoji in charList:
if rowIdx>emojiGridRowCount-1:
break;
label = QClickableLabel(emoji)
label.clicked.connect(execute_emoji)
label.setFont(font)
label.setAlignment(Qt.AlignCenter)
label.setMinimumHeight(49)
emojiGridLayout.addWidget(label,rowIdx,colIdx)
emojiGridLayout.setAlignment(label,Qt.AlignTop)
if colIdx < emojiGridColumnCount-1:
colIdx+=1
else:
colIdx=0
rowIdx+=1
emojiGridLayout.setContentsMargins(0,0,0,0)
emojiGridLayout.setHorizontalSpacing(0)
emojiGridLayout.setVerticalSpacing(0)
if emojiToShowCount>0:
highlight_emoji([0,0])
# searches for emoji, and passes them to fill_grid_with_char_list
def execute_search(text):
selectedEmoji = (0,0)
if not text or text.isspace():
fill_grid_with_history()
return
foundEmoji = edp.find_by_name(text)
charList = [emoji.char for emoji in foundEmoji]
fill_grid_with_char_list(charList)
# handles what to do after hovering over a given label
def emoji_hovered(hoveredLabel):
parentGrid = hoveredLabel.parentWidget().layout()
hoveredIndex = parentGrid.indexOf(hoveredLabel)
hoveredRow, hoveredColumn, _, _ = parentGrid.getItemPosition(hoveredIndex)
highlight_emoji([hoveredRow,hoveredColumn])
# selects, sets style and handles wraparound
def highlight_emoji(newPosition):
global selectedEmojiPosition
# grid is filled to a full rectangle (last row fills the window horizontally)
if lastRowEmojiCount==0:
if newPosition[0]<0:
newPosition[0]=fullRowsCount-1
elif newPosition[1]<0:
newPosition[1]=emojiGridColumnCount-1
elif newPosition[0]>fullRowsCount-1:
newPosition[0]=0
elif newPosition[1]>emojiGridColumnCount-1:
newPosition[1]=0
# last row is not full
else:
#horizontal wraparound through RIGHT edge for full rows
if (newPosition[0]<fullRowsCount) and (newPosition[1]>emojiGridColumnCount-1):
newPosition[1]=0
#horizontal wraparound through LEFT edge for full rows
elif (newPosition[0]<fullRowsCount) and (newPosition[1]<0):
newPosition[1]=emojiGridColumnCount-1
#horizontal wraparound through right edge for NON FULL rows
elif (newPosition[0]==fullRowsCount) and (newPosition[1]>lastRowEmojiCount-1) and ((selectedEmojiPosition[0]-newPosition[0])==0):
newPosition[1]=0
#horizontal wraparound through LEFT edge for NON FULL rows
elif (newPosition[0]>=fullRowsCount) and (newPosition[1]<0):
newPosition[1]=lastRowEmojiCount-1
#vertical wraparound through BOTTOM edge for full cols
elif (newPosition[0]>fullRowsCount) and (newPosition[1]<lastRowEmojiCount):
newPosition[0]=0
#vertical wraparound through TOP edge for full cols
elif (newPosition[0]<0) and (newPosition[1]<lastRowEmojiCount):
newPosition[0]=fullRowsCount
#vertical wraparound through BOTTOM edge for NON FULL cols
elif (newPosition[0]>fullRowsCount-1) and (newPosition[1]>lastRowEmojiCount-1):
newPosition[0]=0
#vertical wraparound through TOP edge for NON FULL cols
elif (newPosition[0]<0) and (newPosition[1]>lastRowEmojiCount-1):
newPosition[0]=fullRowsCount-1
oldPosition = selectedEmojiPosition
selectedEmojiPosition = newPosition
widgetToDeselect = emojiGridLayout.itemAtPosition(oldPosition[0],oldPosition[1])
if widgetToDeselect:
widgetToDeselect = widgetToDeselect.widget()
widgetToDeselect.setStyleSheet("")
global selectedEmojiChar
widgetToSelect = emojiGridLayout.itemAtPosition(selectedEmojiPosition[0],selectedEmojiPosition[1])
if widgetToSelect:
widgetToSelect = widgetToSelect.widget()
selectedEmojiChar = widgetToSelect.text()
widgetToSelect.setStyleSheet("QLabel{background-color: palette(highlight);}")
# handles direction where to move emoji selection
def move_selection(direction):
if direction=="right":
highlight_emoji([sum(x) for x in zip(selectedEmojiPosition, [0,1])])
elif direction=="left":
highlight_emoji([sum(x) for x in zip(selectedEmojiPosition, [0,-1])])
elif direction=="up":
highlight_emoji([sum(x) for x in zip(selectedEmojiPosition, [-1,0])])
elif direction=="down":
highlight_emoji([sum(x) for x in zip(selectedEmojiPosition, [1,0])])
# handles Esc
def on_key(key):
# test for a specific key
if key == Qt.Key_Escape:
quitNicely()
# adds given emoji to history and saves it to config file
def add_char_to_history(char):
global settingsFile
global historyList
if not historyList:
historyList = [char]
else:
if char in historyList:
historyList.remove(char)
tempList = [char]
tempList.extend(historyList)
historyList = tempList[:(emojiGridColumnCount*emojiGridRowCount)]
settingsFile.setValue('history/history',historyList)
# wrapper around filling the grid
def fill_grid_with_history():
fill_grid_with_char_list(historyList)
# main app window class with inits
class EmojiPickerWindow(QWidget):
def __init__(self):
super().__init__()
# focus handling
self.installEventFilter(self)
self.title = 'Emoji picker \(^o^)/'
self.width = 281
self.height = 251
# start with text box centered at mouse pointer position
self.left, self.top = mousePosition()
self.left -= self.width//2
self.top += (24-self.height)
self.initSettings()
self.initUI()
def initUI(self):
# topmost window layout
layout = QVBoxLayout()
global layoutStack
layoutStack = QStackedLayout()
layoutStackWidget = QWidget()
layoutStackWidget.setLayout(layoutStack)
# scroll area setup shenanigans
scrollArea = QScrollArea()
gridWidget = QWidget()
global emojiGridLayout
emojiGridLayout = QGridLayout(gridWidget)
emojiGridLayout.setAlignment(Qt.AlignTop | Qt.AlignLeft)
# stretch grid to widget
for col in range(emojiGridColumnCount):
emojiGridLayout.setColumnStretch(col,1)
for row in range(emojiGridRowCount):
emojiGridLayout.setRowStretch(row,1)
scrollArea.setWidget(gridWidget)
scrollArea.setWidgetResizable(True)
layoutStack.addWidget(scrollArea)
# info to show when no emoji has been found
noEmojiFoundLabel = QLabel("No emoji found 🙁")
noEmojiFoundLabel.setAlignment(Qt.AlignCenter | Qt.AlignHCenter | Qt.AlignVCenter)
layoutStack.addWidget(noEmojiFoundLabel)
layout.addWidget(layoutStackWidget)
# fill with a placeholder for now (smiling or smile)
# execute_search('smil')
fill_grid_with_history()
# bottom text entry
lineEdit = QLineEditWithArrows()
lineEdit.textChanged.connect(execute_search)
layout.addWidget(lineEdit)
# align it to the bottom, so that it won't stay centered vertically
layout.setAlignment(lineEdit, Qt.AlignBottom)
self.setLayout(layout)
self.setWindowTitle(self.title)
self.setGeometry(self.left, self.top, self.width, self.height)
self.setFixedSize(self.width, self.height)
self.setWindowFlags(Qt.FramelessWindowHint | Qt.WindowStaysOnTopHint)
# needed for filling the grid out from the outside
global mainWindow
mainWindow = self
# esc handling
self.keyPressed.connect(on_key)
self.show()
lineEdit.setFocus()
def initSettings(self):
global settingsFile
global historyList
settingsFile = QSettings("emoji-picker-qtpy", "history");
historyList = settingsFile.value('history/history')
# key handling
keyPressed = pyqtSignal(int)
def keyPressEvent(self, event):
super(EmojiPickerWindow, self).keyPressEvent(event)
self.keyPressed.emit(event.key())
# focus handling
global willExitOnItsOwn
def eventFilter(self, object, event):
if event.type()== QEvent.WindowDeactivate or event.type()== QEvent.FocusOut:
if (not willExitOnItsOwn):
quitNicely()
return False
# clickable label
class QClickableLabel(QLabel):
clicked=pyqtSignal(str)
def __init__(self, parent=None):
QLabel.__init__(self, parent)
def mousePressEvent(self, ev):
self.clicked.emit(self.text())
def enterEvent(self, ev):
emoji_hovered(self)
# keyboard handling override for QlineEdit
class QLineEditWithArrows(QLineEdit):
def keyPressEvent(self, ev):
global selectedEmojiChar
global foundAnyEmoji
if(ev.key() == Qt.Key_Right):
move_selection("right")
if(ev.key() == Qt.Key_Left):
move_selection("left")
if(ev.key() == Qt.Key_Up):
move_selection("up")
if(ev.key() == Qt.Key_Down):
move_selection("down")
if(ev.key() == Qt.Key_Return or ev.key() == Qt.Key_Enter):
if foundAnyEmoji:
execute_emoji(selectedEmojiChar)
else:
quitNicely()
if(ev.key() == Qt.Key_Tab):
pass
else:
QLineEdit.keyPressEvent(self,ev)
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = EmojiPickerWindow()
sys.exit(app.exec_())
|
3,726 | 1685a2c49bea14e6fcaffb03634f6875f8fa1049 | # encoding:utf-8
import tensorflow as tf
import p182.py as p182
# 创建文件列表,并通过文件列表创建输入文件队列。在调用输入数据处理流程前,需要
# 统一所有原始数据的格式并将它们存储到TFRcord文件中。下面给出的文件列表应该包含所
# 有提供训练数据的TFRcord文件
files = tf.train.match_filenames_once("/home/shenxj/tf-work/datasets/file_pattern-*")
filename_queue = tf.train.string_input_producer(files, shuffle=False)
# 使用类似7.1节中结婚嫂的方法解析TFRecord文件里的数据。这里假设image中存储的是图像
# 的原始数据,label为该样例所对应的标签。height,width和channels给出了图像的维度。
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue)
features = tf.parse_single_example(
serialized_example,
features={
'image': tf.FixedLenFeature([], tf.string),
'label': tf.FixedLenFeature([], tf.int64),
'height': tf.FixedLenFeature([], tf.int64),
'weigth': tf.FixedLenFeature([], tf.int64),
'channels': tf.FixedLenFeature([], tf.int64),
}
)
image, label = features['image'], features['label']
height, width = features['height'], features['wigth']
channels = features['channels']
# 从原始图像数据解析出像素矩阵,并根据图像尺寸还原图像
decoded_image = tf.decode_raw(image, tf.uint8)
decoded_image.set_shape([height, width, channels])
# 定义神经网络输入层图片的大小。
image_size = 299
# preprocess_for_train为7.2.2小节中介绍的图像预处理程序
distorted_image = p182.preprocess_for_train(
decoded_image, image_size, image_size, None
)
# 将处理后的图像和标签数据通过tf.train.shuffle_batch整理成神经网络训练时
# 需要的batch
min_after_dequeque = 10000
batch_size = 100
capacity = min_after_dequeque + 3 * batch_size
image_batch, label_batch = tf.train.shuffle_batch(
[distorted_image, label], batch_size=batch_size,
capacity=capacity, min_after_dequeue=min_after_dequeque
)
# 定义神经网络的结构以及优化过程。image_batch可以作为输入提供给神经网络的输入层。
# label_batch则提供了输入batch中样例的正确答案
logit = inference(image_batch)
loss = calc_loss(logit, label_batch)
train_step = tf.train.GradientDescentOptimizer(learning_rate=learning_rate).minimize(loss)
# 声明会话并运行神经网络的优化过程
with tf.Session() as sess:
# 神经网络训练准备工作。这些工作包括变量初始化、线程启动
tf.initialize_all_variables().run()
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
# 神经网络训练过程
for i in range(TRAINING_ROUNDS):
sess.run(train_step)
# 停止所有线程
coord.request_stop()
coord.join(threads)
|
3,727 | f0c621583caf6eea6f790649862a03a464f6574b | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
import csv
from bookstoscrapy import settings
def write_to_csv(item):
writer = csv.writer(open(settings.csv_file_path, 'a'),
lineterminator='\n')
writer.writerow([item[key] for key in item.keys()])
class WriteToCsv(object):
item_counter = 0
def process_item(self, item, spider):
write_to_csv(item)
return item
|
3,728 | 4fea9941defd6703be3cae034d979933262074e3 | with open("out.txt", "w", encoding = "utf_8") as file:
file.write("明日の天気です∖n")
file.write("関西地方はおおむね晴れ.")
file.write("紅葉を見るには絶好の日和でしょう∖n")
file.write(“映像は嵐山の様子です.")
file.write("今年も大変な数の観光客が訪れているようですね.∖n")
|
3,729 | c3967ab15b8278d958fa5ff6ff48bbfb0b086238 | """
app_dist_Tables00.py illustrates use of pitaxcalc-demo release 2.0.0
(India version).
USAGE: python app_dist_Tables00.py
"""
import pandas as pd
from taxcalc import *
import numpy as np
from babel.numbers import format_currency
import matplotlib.pyplot as plt
from matplotlib.gridspec import GridSpec
# Generate Charts
# first merge the files
START_YEAR = 2017
END_YEAR = 2023
BASE_YEAR = 2019
year = START_YEAR
a={}
for year in range(BASE_YEAR, END_YEAR+1):
filename1='dist-table-all-clp-avg-'+str(year)+'.txt'
df1 = pd.read_fwf(filename1)
df1.drop('Unnamed: 0',axis=1,inplace=True)
col_list = df1.columns[1:] + '_avg_clp_' + str(year)
col_list = col_list.insert(0, 'Income_Bracket')
df1.columns = col_list
filename2='dist-table-all-clp-total-'+str(year)+'.txt'
df2 = pd.read_fwf(filename2)
df2.drop('Unnamed: 0',axis=1,inplace=True)
col_list = df2.columns[1:] + '_total_clp_' + str(year)
col_list = col_list.insert(0, 'Income_Bracket')
df2.columns = col_list
a[year] = pd.merge(df1, df2, how="inner", on="Income_Bracket")
filename3='dist-table-all-ref-avg-'+str(year)+'.txt'
df3 = pd.read_fwf(filename3)
df3.drop('Unnamed: 0',axis=1,inplace=True)
col_list = df3.columns[1:] + '_avg_ref_' + str(year)
col_list = col_list.insert(0, 'Income_Bracket')
df3.columns = col_list
a[year] = pd.merge(a[year], df3, how="inner", on="Income_Bracket")
filename4='dist-table-all-ref-total-'+str(year)+'.txt'
df4 = pd.read_fwf(filename4)
df4.drop('Unnamed: 0',axis=1,inplace=True)
col_list = df4.columns[1:] + '_total_ref_' + str(year)
col_list = col_list.insert(0, 'Income_Bracket')
df4.columns = col_list
a[year] = pd.merge(a[year], df4, how="inner", on="Income_Bracket")
df=a[BASE_YEAR]
for year in range(BASE_YEAR+1, END_YEAR+1):
df = pd.merge(df, a[year], how="inner", on="Income_Bracket")
df.set_index('Income_Bracket', inplace=True)
df.to_csv('dist-table-all-years.csv', index=True)
df = pd.read_csv('dist-table-all-years.csv')
df.set_index('Income_Bracket', inplace=True)
df_pit_totals_clp = df[df.columns[df.columns.str.startswith('pitax_total_clp')]]
df_pit_totals_ref = df[df.columns[df.columns.str.startswith('pitax_total_ref')]]
clp_pitax_list = df_pit_totals_clp.loc['ALL'].tolist()
clp_pitax_list = [float(i.replace(',','')) for i in clp_pitax_list]
clp_pitax_list = [round(elem, 0) for elem in clp_pitax_list ]
ref_pitax_list = df_pit_totals_ref.loc['ALL'].tolist()
ref_pitax_list = [float(i.replace(',','')) for i in ref_pitax_list]
ref_pitax_list = [round(elem, 0) for elem in ref_pitax_list ]
years = [x[-4:] for x in list(df_pit_totals_clp.columns)]
plt.style.use('seaborn-whitegrid')
fig = plt.figure()
"""
ax = plt.axes()
ax.plot(x, np.sin(x))
ax.set(xlim=(0, 10), ylim=(-2, 2),
xlabel='x', ylabel='sin(x)',
title='A Simple Plot')
"""
#plt.axis([2017, 2021, 150000, 400000])
plt.title("Estimated Tax Collection")
plt.xlabel("Year")
plt.ylabel("Tax Collection in lakh Cr.");
"""
print(year)
print(clp_pitax_list)
print(ref_pitax_list)
"""
plt.plot(years, clp_pitax_list, linestyle='-', marker='o', color='b', label='Current Law', linewidth=2.0)
plt.plot(years, ref_pitax_list, linestyle='--', marker='o', color='r', label='Reform', linewidth=2.0)
plt.legend(loc='best')
plt.savefig('Total_collection_PIT.png')
plt.show()
# generating bar chart for difference in average tax burden due to reform
# for 2020 - the first year of reform
year = 2020
df_pitax_diff = df['pitax_diff_avg_ref_'+str(year)]
df_pitax_diff = df_pitax_diff[:-1]
df_pitax_diff = df_pitax_diff[2:]
df_pitax_diff = df_pitax_diff.reset_index()
pitax_inc_brac_list = df_pitax_diff['Income_Bracket'].tolist()
pitax_diff_list = df_pitax_diff['pitax_diff_avg_ref_'+str(year)].tolist()
pitax_diff_list = [float(i.replace(',','')) for i in pitax_diff_list]
plt.rcdefaults()
#plt.style.use('seaborn-whitegrid')
fig, ax = plt.subplots(figsize=(8, 5))
# Example data
x_pos = np.arange(len(pitax_inc_brac_list))
ax.bar(x_pos, pitax_diff_list,
color='green')
ax.set_xticks(x_pos)
ax.set_xticklabels(pitax_inc_brac_list)
#ax.invert_yaxis() # labels read top-to-bottom
ax.set_ylabel('Rupees')
ax.set_xlabel('Income Bracket')
ax.invert_yaxis()
ax.set_title('Change in Average Tax Burden Due to Reform in 2020')
plt.savefig('Average Tax Burden Change.png')
plt.show()
# generating pie chart for contribution of tax by different income groups
# for 2020 - the first year of reform
year = 2020
df_pitax_tot_clp = df['pitax_total_clp_'+str(year)]
df_pitax_tot_clp = df_pitax_tot_clp[:-1]
df_pitax_tot_clp = df_pitax_tot_clp[2:]
df_pitax_tot_clp = df_pitax_tot_clp.reset_index()
pitax_inc_brac_list_clp = df_pitax_tot_clp['Income_Bracket'].tolist()
pitax_tot_list_clp = df_pitax_tot_clp['pitax_total_clp_'+str(year)].tolist()
pitax_tot_list_clp = [float(i.replace(',','')) for i in pitax_tot_list_clp]
pitax_tot_list_clp = [round(elem) for elem in pitax_tot_list_clp ]
fig, ax = plt.subplots(figsize=(10, 10))
# only "explode" the 5th slice (contributing to max revenue)
explode = (0, 0, 0, 0, 0.1, 0, 0, 0, 0)
ax.pie(pitax_tot_list_clp, explode=explode, labels=pitax_inc_brac_list_clp, autopct='%1.1f%%',
shadow=False, startangle=90)
ax.axis('equal') # Equal aspect ratio ensures that pie is drawn as a circle.
plt.suptitle('Contribution by Income Bracket to total PIT in 2020', fontsize=16, fontweight="bold")
ax.set_title('Current Law', fontsize=16, fontweight="bold")
plt.savefig('Contribution to total PIT.png')
plt.show()
# generating pie chart for comparing contribution of tax by
# different income groups for clp and reform for 2020 - the first year of reform
year = 2020
df_pitax_tot = df['pitax_total_ref_'+str(year)]
df_pitax_tot = df_pitax_tot[:-1]
df_pitax_tot = df_pitax_tot[2:]
df_pitax_tot = df_pitax_tot.reset_index()
pitax_inc_brac_list = df_pitax_tot['Income_Bracket'].tolist()
pitax_tot_list = df_pitax_tot['pitax_total_ref_'+str(year)].tolist()
pitax_tot_list = [float(i.replace(',','')) for i in pitax_tot_list]
pitax_tot_list = [round(elem) for elem in pitax_tot_list ]
fig, (ax1, ax2) = plt.subplots(1,2, figsize=(10, 5))
#fig, ax = plt.subplots(figsize=(10, 5))
#the_grid = GridSpec(2, 2)
# only "explode" the 5th slice (contributing to max revenue)
explode = (0, 0, 0, 0, 0.1, 0, 0, 0, 0)
#plt.subplot(the_grid[1, 0], aspect=1)
plt.suptitle('Contribution by Income Bracket to total PIT in 2020', fontsize=16, fontweight="bold")
ax1.pie(pitax_tot_list_clp, explode=explode, labels=pitax_inc_brac_list_clp, autopct='%1.1f%%',
shadow=False, startangle=90)
ax1.axis('equal') # Equal aspect ratio ensures that pie is drawn as a circle.
#plt.subplot(the_grid[0, 1], aspect=1)
ax2.pie(pitax_tot_list, explode=explode, labels=pitax_inc_brac_list, autopct='%1.1f%%',
shadow=False, startangle=90)
ax2.axis('equal') # Equal aspect ratio ensures that pie is drawn as a circle.
ax1.set_title('Current Law', fontweight="bold")
ax2.set_title('Reform', fontweight="bold")
plt.savefig('Contribution to total PIT - Before and After Reform.png')
plt.show()
|
3,730 | d9f66cc3ba40292c49da08d7573d4c605a2771ae | def solution(record):
answer = []
arr = dict()
history = []
for i in record:
tmp = i.split()
if tmp[0] == "Enter" :
arr[tmp[1]] = tmp[2]
history.append([tmp[1], "님이 들어왔습니다."])
elif tmp[0] == "Leave" :
history.append([tmp[1], "님이 나갔습니다."])
elif tmp[0] == "Change" :
arr[tmp[1]] = tmp[2]
for i in history :
answer.append(arr[i[0]] + i[1])
return answer |
3,731 | 676ccbac9385a4b63d599c3f85f16e28d839e9b8 | import pysftp
import time
import threading
def sftp_connection():
while True:
cnopts = pysftp.CnOpts()
cnopts.hostkeys = None
try:
with pysftp.Connection('sb-emea.avl.com', username='abhishek.hingwasia@avl.com', password='AvlAvl2931!!',
cnopts=cnopts) as sftp:
print('connection has been established')
remotepath = '/Cummins_CTCI_NB/sftp_image_test/'
while True:
remotepath = '/Cummins_CTCI_NB/sftp_image_test/'
try:
if sftp.exists(remotepath):
print('hi')
time.sleep(5)
print('hello')
time.sleep(5)
except:
print('connection/ssherror exception')
break
except:
print('connection has been breaked')
time.sleep(5)
if __name__ == "__main__":
t1 = threading.Thread(target=sftp_connection)
t1.start() |
3,732 | f61e9e8069a0e90506c2f03a0cc4a25a16d71b85 | import pytest
import numpy as np
from dwave_qbsolv import QBSolv
from src.quantumrouting.solvers import partitionqubo
from src.quantumrouting.types import CVRPProblem
from src.quantumrouting.wrappers.qubo import wrap_vrp_qubo_problem
@pytest.fixture
def cvrp_problem():
max_num_vehicles = 1
coords = [
[-15.6570138544452, -47.802664728268745],
[-15.65879313293694, -47.7496622016347],
[-15.651440380492554, -47.75887552060412],
[-15.651207309372888, -47.755018806591394],
[-15.648706444367969, -47.758785390289965],
[-15.66047286919706, -47.75284167302011]
]
return CVRPProblem(problem_identifier='bla',
location_idx=np.array([0, 1, 2, 3, 4, 5]),
coords=np.array(coords),
vehicle_capacity=100,
num_vehicles=max_num_vehicles,
max_deliveries=5,
demands=np.array([0, 10, 10, 7, 3, 10]),
depot_idx=0)
def test_vrp_partition_full_qubo_solver(cvrp_problem):
backend_solver = QBSolv()
params = partitionqubo.KmeansPartitionFullQuboParams(fixed_num_clusters=1)
qubo_problem_fn = wrap_vrp_qubo_problem(params=params)
solver = partitionqubo.solver_fn(
params=params, backend_solver=backend_solver,
qubo_problem_fn=qubo_problem_fn)
result = solver(problem=cvrp_problem)
assert result.problem_identifier == 'bla'
assert (result.routes == np.array([[0, 5, 1, 3, 2, 4, 0]])).all()
assert result.total_demands == 40
|
3,733 | 9b4bc7f8f9c96f503a5ed79827430963e21718c4 | from django.conf.urls import url
from .views import LoginView, logout_user, delete_user
from .views import NewUserView
urlpatterns = [
url(r'newuser/', NewUserView.as_view(), name='newuser'),
url(r'login/', LoginView.as_view(), name='login'),
url(r'logout/', logout_user, name='logout'),
url(r'delete/$', delete_user, name='deleteuser'),
]
|
3,734 | 0eefae7e0d341d74154bbe480f5ed766829e3ce3 | import os
import h5py
import numpy as np
from keras import backend as K
from keras.layers import Activation, BatchNormalization, Conv2D, Dense, Dot, \
Dropout, Flatten, Input, MaxPooling2D, GlobalAveragePooling2D
from keras import regularizers
from keras.layers import Average as KerasAverage
from keras.models import Sequential, Model
from keras.optimizers import Adam, SGD
from keras.engine.topology import Layer
from .layers import LayerNormalization, CustomSoftmax
from .tf_implementations.loss_functions import loss_factory
class TotalReshape(Layer):
def __init__(self, target_shape, **kwargs):
self.target_shape = target_shape
super(TotalReshape, self).__init__(**kwargs)
def compute_output_shape(self, input_shape):
return tuple(
x if x != -1 else None
for x in self.target_shape
)
def call(self, x):
return K.reshape(x, self.target_shape)
class BaseReducer(Layer):
def __init__(self, **kwargs):
super(BaseReducer, self).__init__(**kwargs)
def compute_output_shape(self, input_shape):
return input_shape[:-1]
class Average(BaseReducer):
def call(self, x):
return K.mean(x, axis=-1)
class Max(BaseReducer):
def call(self, x):
return K.max(x, axis=-1)
class TopKAverage(BaseReducer):
def __init__(self, k, **kwargs):
self.k = k
super(TopKAverage, self).__init__(**kwargs)
def call(self, x):
if K.backend() == "tensorflow":
tf = K.tf
x, _ = tf.nn.top_k(x, self.k, sorted=False)
return K.mean(x, axis=-1)
else:
raise NotImplementedError("TopKAverage is not implemented for "
" %s backend" % (K.backend(),))
def reducer_factory(reducer, k=3):
# Set the type of the reducer to be used
if reducer == "max":
return Max()
elif reducer == "average":
return Average()
elif reducer == "topK":
return TopKAverage(k)
def mae(y_true, y_pred):
""" Implementation of Mean average error
"""
return K.mean(K.abs(y_true - y_pred))
def mde(y_true, y_pred):
return K.mean(K.cast(
K.abs(K.argmax(y_true, axis=1) - K.argmax(y_pred, axis=1)),
K.floatx()
))
def create_simple_cnn(input_shape, kernel_regularizer=None):
common_params = dict(
filters=32,
kernel_size=3,
kernel_regularizer=kernel_regularizer
)
return Sequential([
Conv2D(input_shape=input_shape, **common_params),
BatchNormalization(),
Activation("relu"),
Conv2D(**common_params),
BatchNormalization(),
Activation("relu"),
Conv2D(**common_params),
BatchNormalization(),
Activation("relu"),
Conv2D(**common_params),
BatchNormalization(),
Activation("relu"),
Conv2D(**common_params),
BatchNormalization()
])
def create_simple_cnn_ln(input_shape, kernel_regularizer=None):
common_params = dict(
filters=32,
kernel_size=3,
kernel_regularizer=kernel_regularizer
)
return Sequential([
Conv2D(input_shape=input_shape, **common_params),
LayerNormalization(),
Activation("relu"),
Conv2D(**common_params),
LayerNormalization(),
Activation("relu"),
Conv2D(**common_params),
LayerNormalization(),
Activation("relu"),
Conv2D(**common_params),
LayerNormalization(),
Activation("relu"),
Conv2D(**common_params),
LayerNormalization()
])
def create_dilated_cnn_receptive_field_25(
input_shape,
kernel_regularizer=None
):
return Sequential([
Conv2D(
filters=32,
kernel_size=5,
input_shape=input_shape,
kernel_regularizer=kernel_regularizer
),
BatchNormalization(),
Activation("relu"),
Conv2D(
filters=32,
kernel_size=5,
kernel_regularizer=kernel_regularizer
),
BatchNormalization(),
Activation("relu"),
Conv2D(
filters=32,
kernel_size=5,
kernel_regularizer=kernel_regularizer,
dilation_rate=2
),
BatchNormalization(),
Activation("relu"),
Conv2D(
filters=32,
kernel_size=3,
kernel_regularizer=kernel_regularizer,
),
BatchNormalization(),
Activation("relu"),
Conv2D(
filters=32,
kernel_size=3,
kernel_regularizer=kernel_regularizer
),
BatchNormalization(),
Activation("relu"),
Conv2D(
filters=32,
kernel_size=3,
kernel_regularizer=kernel_regularizer
),
BatchNormalization(),
Activation("relu"),
Conv2D(
filters=32,
kernel_size=3,
kernel_regularizer=kernel_regularizer
),
BatchNormalization()
])
def create_dilated_cnn_receptive_field_25_with_tanh(
input_shape,
kernel_regularizer=None
):
return Sequential([
Conv2D(
filters=32,
kernel_size=5,
input_shape=input_shape,
kernel_regularizer=kernel_regularizer
),
BatchNormalization(),
Activation("tanh"),
Conv2D(
filters=32,
kernel_size=5,
kernel_regularizer=kernel_regularizer
),
BatchNormalization(),
Activation("tanh"),
Conv2D(
filters=32,
kernel_size=5,
kernel_regularizer=kernel_regularizer,
dilation_rate=2
),
BatchNormalization(),
Activation("tanh"),
Conv2D(
filters=32,
kernel_size=3,
kernel_regularizer=kernel_regularizer,
),
BatchNormalization(),
Activation("tanh"),
Conv2D(
filters=32,
kernel_size=3,
kernel_regularizer=kernel_regularizer
),
BatchNormalization(),
Activation("tanh"),
Conv2D(
filters=32,
kernel_size=3,
kernel_regularizer=kernel_regularizer
),
BatchNormalization(),
Activation("tanh"),
Conv2D(
filters=32,
kernel_size=3,
kernel_regularizer=kernel_regularizer
),
BatchNormalization()
])
def create_hartmann_cnn(input_shape, kernel_regularizer=None):
return Sequential([
Conv2D(filters=32, kernel_size=5, input_shape=input_shape),
Activation("tanh"),
MaxPooling2D(pool_size=(2, 2)),
Conv2D(filters=64, kernel_size=5),
Activation("tanh"),
MaxPooling2D(pool_size=(2, 2))
])
def cnn_factory(name):
cnn_factories = {
"simple_cnn": create_simple_cnn,
"simple_cnn_ln": create_simple_cnn_ln,
"dilated_cnn_receptive_field_25":
create_dilated_cnn_receptive_field_25,
"dilated_cnn_receptive_field_25_with_tanh":
create_dilated_cnn_receptive_field_25_with_tanh,
"hartmann_cnn": create_hartmann_cnn
}
return cnn_factories[name]
def optimizer_factory(optimizer, lr, momentum=None, clipnorm=0.0, clipvalue=1):
# Set the type of optimizer to be used
if optimizer == "Adam":
return Adam(lr=lr, clipnorm=clipnorm, clipvalue=clipvalue)
elif optimizer == "SGD":
return SGD(lr=lr, momentum=momentum, clipnorm=clipnorm,
clipvalue=clipvalue)
def kernel_regularizer_factory(regularizer_factor):
if regularizer_factor == 0.0:
return None
else:
return regularizers.l2(regularizer_factor)
def build_simple_cnn(
input_shape,
create_cnn,
optimizer="Adam",
lr=1e-3,
momentum=None,
clipnorm=0.0,
loss="mse",
reducer="average",
merge_layer="dot-product",
weight_decay=None,
weight_file=None
):
# Make sure that we have a proper input shape
# TODO: Maybe change this to 3, because we finally need only the
# patch_shape?
assert len(input_shape) == 5
# Unpack the input shape to make the code more readable
D, N, W, H, C = input_shape
model = create_cnn(
input_shape=(None, None, C),
kernel_regularizer=weight_decay
)
model.compile(
optimizer=optimizer_factory(
optimizer,
lr=lr,
momentum=momentum,
clipnorm=clipnorm
),
loss=loss_factory(loss)
)
# If there is a weight file specified load the weights
if weight_file:
try:
f = h5py.File(weight_file, "r")
keys = [os.path.join(model.name, w.name)
for l in model.layers for w in l.weights]
weights = [f[os.path.join("model_weights", k)][:] for k in keys]
model.set_weights(weights)
except:
model.load_weights(weight_file, by_name=True)
return model
def build_simple_nn_for_training(
input_shape,
create_cnn,
optimizer="Adam",
lr=1e-3,
momentum=None,
clipnorm=0.0,
loss="emd",
reducer="average",
merge_layer="dot-product",
weight_decay=None,
weight_file=None
):
# Make sure that we have a proper input shape
assert len(input_shape) == 5
# Unpack the input shape to make the code more readable
# print(input_shape)
input_shape=list(input_shape)
for i in range(len(input_shape)):
if input_shape[i]!=None:
input_shape[i]=int(input_shape[i])
input_shape=tuple(input_shape)
D, N, W, H, C = input_shape
# Create the two stream inputs
x1_in = Input(shape=input_shape)
x2_in = Input(shape=input_shape)
# Reshape them for input in the CNN
x1 = TotalReshape((-1, W, H, C))(x1_in)
x2 = TotalReshape((-1, W, H, C))(x2_in)
# Create the CNN and extract features from both streams
cnn = create_cnn(input_shape=(W, H, C), kernel_regularizer=weight_decay)
x1 = Flatten()(cnn(x1))
x2 = Flatten()(cnn(x2))
# Compute a kind of similarity between the features of the two streams
x = Dot(axes=-1, normalize=(merge_layer == "cosine-similarity"))([x1, x2])
# Reshape them back into their semantic shape (depth planes, patches, etc)
x = TotalReshape((-1, D, N))(x)
# Compute the final similarity scores for each depth plane
x = reducer_factory(reducer)(x)
# Compute the final output
y = Activation("softmax")(x)
model = Model(inputs=[x1_in, x2_in], outputs=y)
model.compile(
optimizer=optimizer_factory(
optimizer,
lr=lr,
momentum=momentum,
clipnorm=clipnorm
),
loss=loss_factory(loss),
metrics=["accuracy", mae, mde]
)
if weight_file:
model.load_weights(weight_file, by_name=True)
return model
def build_hartmann_network(
input_shape,
create_cnn=create_hartmann_cnn,
optimizer="SGD",
lr=1e-3,
momentum=None,
clipnorm=0.0,
loss=None,
reducer=None,
merge_layer=None,
weight_decay=None,
weight_file=None
):
# Make sure that we have a proper input shape
assert len(input_shape) == 3
# Unpack the input shape to make the code more readable
H, W, C = input_shape
# Create the feature extracting CNN
cnn = create_hartmann_cnn(input_shape=(None, None, C))
# Create the similarity CNN
sim = Sequential([
Conv2D(
filters=2048,
kernel_size=5,
input_shape=K.int_shape(cnn.output)[1:]
),
Activation("relu"),
Conv2D(filters=2048, kernel_size=1),
Activation("relu"),
Conv2D(filters=2, kernel_size=1),
Activation("softmax")
])
# Create the joint model for training
x_in = [Input(shape=input_shape) for i in range(5)]
x = [cnn(xi) for xi in x_in]
x = KerasAverage()(x)
y = sim(x)
model = Model(inputs=x_in, outputs=y)
# Compile all the models
model.compile(
optimizer=optimizer_factory(
optimizer,
lr=lr,
momentum=momentum,
clipnorm=clipnorm
),
loss="categorical_crossentropy",
metrics=["accuracy"]
)
cnn.compile("sgd", "mse") # Just so that we can run predict()
sim.compile("sgd", "mse")
# Attach the cnn and sim to the model in case someone wants to use them
model.cnn = cnn
model.sim = sim
if weight_file:
model.load_weights(weight_file, by_name=True)
return model
def get_nn(name):
models = {
"simple_cnn": build_simple_cnn,
"simple_nn_for_training": build_simple_nn_for_training,
"hartmann": build_hartmann_network
}
return models[name]
|
3,735 | 2e041e33b5c34c2bddc72b36ff641817f1e21db2 | TTTSIZE = 4
def who_win_line(line):
elements = set(line)
if '.' in elements:
return '.'
elements.discard('T')
if len(elements) >= 2:
return 'D'
else:
return elements.pop()
def who_win_tic_tac_toe(original_rows):
#print('%s' % repr(original_rows))
board_full = True
rows = [row[0:TTTSIZE] for row in original_rows]
#print('%s' % repr(rows))
columns = [ [rows[0][0], rows[1][0], rows[2][0], rows[3][0]],
[rows[0][1], rows[1][1], rows[2][1], rows[3][1]],
[rows[0][2], rows[1][2], rows[2][2], rows[3][2]],
[rows[0][3], rows[1][3], rows[2][3], rows[3][3]] ]
diagonal1 = [rows[0][0], rows[1][1], rows[2][2], rows[3][3]]
diagonal2 = [rows[0][3], rows[1][2], rows[2][1], rows[3][0]]
lines = rows
lines.extend(columns)
lines.append(diagonal1)
lines.append(diagonal2)
for line in lines:
winner = who_win_line(line)
if winner == 'X':
return 'X won'
elif winner == 'O':
return 'O won'
elif winner == '.':
board_full = False
if board_full:
return 'Draw'
else:
return 'Game has not completed'
import sys
#import pdb
if __name__ == '__main__':
filename_prefix = sys.argv[1]
filename_in = filename_prefix + ".in"
filename_out = filename_prefix + ".out"
file_in = open(filename_in, 'r')
lines = file_in.readlines()
testcnt = int(lines[0])
idx = 1
file_out = open(filename_out, 'w')
#pdb.set_trace()
for test in range(testcnt):
res = who_win_tic_tac_toe(lines[idx : idx + TTTSIZE])
file_out.write("Case #{0}: {1}\n".format(test + 1, res))
idx += TTTSIZE + 1
|
3,736 | 87baaf4a1b48fa248c65d26cc44e819a2ede1140 | # Python library import
import asyncio, asyncssh, logging
# Module logging logger
log = logging.getLogger(__package__)
# Debug level
# logging.basicConfig(level=logging.WARNING)
# logging.basicConfig(level=logging.INFO)
logging.basicConfig(level=logging.DEBUG)
asyncssh.set_debug_level(2)
# Declaration of constant values
# Max data to read in read function
MAX_BUFFER_DATA = 65535
# Dictonary with all netmasks of IPv4
ipv4_netmask_list = {
"0.0.0.0": "0",
"128.0.0.0": "1",
"192.0.0.0": "2",
"224.0.0.0": "3",
"240.0.0.0": "4",
"248.0.0.0": "5",
"252.0.0.0": "6",
"254.0.0.0": "7",
"255.0.0.0": "8",
"255.128.0.0": "9",
"255.192.0.0": "10",
"255.224.0.0": "11",
"255.240.0.0": "12",
"255.248.0.0": "13",
"255.252.0.0": "14",
"255.254.0.0": "15",
"255.255.0.0": "16",
"255.255.128.0": "17",
"255.255.192.0": "18",
"255.255.224.0": "19",
"255.255.240.0": "20",
"255.255.248.0": "21",
"255.255.252.0": "22",
"255.255.254.0": "23",
"255.255.255.0": "24",
"255.255.255.128": "25",
"255.255.255.192": "26",
"255.255.255.224": "27",
"255.255.255.240": "28",
"255.255.255.248": "29",
"255.255.255.252": "30",
"255.255.255.254": "31",
"255.255.255.255": "32",
}
class NetworkDevice:
"""
Base class for network object
:param ip: IP address of a device
:type ip: str
:param username: Username used to connect to a device
:type username: str
:param password: Password used to connect to a device
:type password: str
:param device_type: Type of device used
:type device_type: str
:param port: TCP port used to connect a device. Default value is "22" for SSH
:type port: int, optional
:param timeout: TCP port used to connect a device. Default value is 10 seconds
:type timeout: int, optional
:param _protocol: Protocol used to connect a device. "ssh" or "telnet" are possible options. Default value is "ssh"
:type _protocol: str, optional
:param enable_mode: Enable mode for devices requiring it. Default value is "False"
:type enable_mode: bool, optional
:param enable_password: Enable password used for enable mode.
:type enable_password: str, optional
:param conn: Variable used for the management of the SSH connection
:type conn: SSHClientConnection object
:param _writer: Variable used for the management of the Telnet connection and writing channel
:type _writer: StreamWriter object
:param _reader: Variable used for the management of the Telnet reading channel
:type _reader: StreamReader object
:param possible_prompts: Used by the connect method to list all possible prompts of the device
:type possible_prompts: list
:param _connect_first_ending_prompt: Default possible ending prompts. Used only the time after login and password to discover the prompt
:type _connect_first_ending_prompt: list
:param list_of_possible_ending_prompts: Different strings at the end of a prompt the device can get. Used for detecting the prompt returned in sent commands
:type list_of_possible_ending_prompts: list
:param _telnet_connect_login: Login prompt for Telnet. Used to detect when a login is expected or when login and password access is failed
:type _telnet_connect_login: str
:param _telnet_connect_password: Password prompt for Telnet. Used to detect when a login is expected or when login and password access is failed
:type _telnet_connect_password: list
:param _telnet_connect_authentication_fail_prompt: Known failing messages or prompts when an authentication has failed. Used to get an answer faster than timeout events
:type _telnet_connect_authentication_fail_prompt: list
:param cmd_enable: Enable command for entering into enable mode
:type cmd_enable: str
:param cmd_disable_paging: Command used to disable paging on a device. That command is run at connection time
:type cmd_disable_paging: str
:param cmd_enter_config_mode: Command used to enter into a configuration mode on a device when this device support that feature.
:type cmd_enter_config_mode: str
:param cmd_exit_config_mode: Command used to leave a configuration mode on a device when this device support that feature.
:type cmd_exit_config_mode: str
:param cmd_get_version: API command used to get the software version of a device
:type cmd_get_version: str
:param cmd_get_hostname: API command used to get the hostname of a device
:type cmd_get_hostname: str
:param cmd_get_model: API command used to get the model of a device
:type cmd_get_model: str
:param cmd_get_serial_number: API command used to get the serial number of a device
:type cmd_get_serial_number: str
:param cmd_get_config: API command used to get the running configuration of a device
:type cmd_get_config: str
:param cmd_save_config: API command used to save the running configuration on the device
:type cmd_save_config: str
"""
def __init__(self, **kwargs):
# Display info message
log.info("__init__")
self.ip = ""
self.username = ""
self.password = ""
self.device_type = ""
self.port = 22
self.timeout = 10
self._protocol = "ssh"
self.enable_mode = False
self.enable_password = ""
self.conn = None
self._writer = None
self._reader = None
self.possible_prompts = []
self._connect_first_ending_prompt = ["#", ">"]
self.list_of_possible_ending_prompts = [
"(config-line)#",
"(config-if)#",
"(config)#",
">",
"#",
]
self._carriage_return_for_send_command = "\n"
self._send_command_error_in_returned_output = []
self._telnet_connect_login = "Username:"
self._telnet_connect_password = "Password:"
self._telnet_connect_authentication_fail_prompt = [":", "%"]
# General commands
self.cmd_enable = "enable"
self.cmd_disable_paging = "terminal length 0"
self.cmd_enter_config_mode = "configure terminal"
self.cmd_exit_config_mode = "exit"
self.cmd_get_version = "show version"
self.cmd_get_hostname = "show version | include uptime"
self.cmd_get_model = "show inventory"
self.cmd_get_serial_number = "show inventory | i SN"
self.cmd_get_config = "show running-config"
self.cmd_save_config = "write memory"
# Layer 1 commands
self.cmd_get_interfaces = [
"interface ethernet print terse without-paging",
"foreach i in=([/interface ethernet find]) do={/interface ethernet monitor $i once without-paging}",
"interface bridge port print terse without-paging",
]
self.cmd_set_interface = [
"interface ethernet enable <INTERFACE>",
"interface ethernet disable <INTERFACE>",
'interface ethernet comment <INTERFACE> "<COMMENT>"',
"interface ethernet set l2mtu=<MAXIMUMFRAMESIZE> <INTERFACE>",
"interface bridge port set frame-types=<MODE> ingress-filtering=<FILTERINGVLAN> [find interface=<INTERFACE>]",
]
# Layer 2 commands
self.cmd_get_mac_address_table = "interface bridge host print without-paging"
self.cmd_get_arp = "ip arp print terse without-paging"
self.cmd_get_lldp_neighbors = "ip neighbor print terse without-paging"
self.cmd_get_vlans = "interface bridge vlan print terse without-paging"
self.cmd_add_vlan = 'interface bridge vlan add vlan-ids=<VLAN> comment="<VLAN_NAME>" bridge=<BRIDGE>'
self.cmd_remove_vlan = "interface bridge vlan remove [find vlan-ids=<VLAN>]"
self.cmd_add_interface_to_vlan = [
"interface bridge vlan print terse",
"interface bridge vlan set [find vlan-ids=<VLAN>] untagged=<INTERFACE>",
"interface bridge vlan set [find vlan-ids=<VLAN>] tagged=<INTERFACE>",
"interface bridge port set [find interface=<INTERFACE>] pvid=<VLAN>",
]
self.cmd_remove_interface_from_vlan = [
"interface bridge vlan print terse",
"interface bridge vlan set [find vlan-ids=<VLAN>] untagged=<INTERFACE>",
"interface bridge vlan set [find vlan-ids=<VLAN>] tagged=<INTERFACE>",
"interface bridge port set [find interface=<INTERFACE>] pvid=<VLAN>",
]
# Layer 3 commands
self.cmd_get_routing_table = "ip route print without-paging terse"
self.cmd_get_interfaces_ip = "ip address print terse without-paging"
self.cmd_add_static_route = "ip route add dst-address=<NETWORK>/<PREFIXLENGTH> gateway=<DESTINATION> distance=<METRIC>"
self.cmd_remove_static_route = (
"ip route remove [find dst-address=<NETWORK>/<PREFIXLENGTH>]"
)
# Display info message
log.debug("__init__: kwargs: " + str(kwargs))
# Get information from dictionary
# "ip" found?
if "ip" in kwargs:
# Save "ip" parameter
self.ip = kwargs["ip"]
# Display info message
log.info("__init__: ip found: " + str(self.ip))
# "username" found?
if "username" in kwargs:
self.username = kwargs["username"]
# Display info message
log.info("__init__: username found: " + str(self.username))
# "password" found?
if "password" in kwargs:
self.password = kwargs["password"]
# Display info message
log.debug("__init__: password found: " + str(self.password))
# "device_type" found?
if "device_type" in kwargs:
self.device_type = kwargs["device_type"]
# Display info message
log.info("__init__: device_type found: " + str(self.device_type))
# "timeout" found?
if "timeout" in kwargs:
self.timeout = kwargs["timeout"]
# Display info message
log.info("__init__: timeout found: " + str(self.timeout))
# "protocol" found?
if "protocol" in kwargs:
self._protocol = kwargs["protocol"].lower()
# Display info message
log.info("__init__: protocol found: " + str(self._protocol))
# By default telnet port is 23
if self._protocol.lower() == "telnet":
self.port = 23
# "port" found?
if "port" in kwargs:
self.port = kwargs["port"]
# Display info message
log.info("__init__: port found: " + str(self.port))
# "enable_mode" found?
if "enable_mode" in kwargs:
self.enable_mode = kwargs["enable_mode"]
# Display info message
log.info("__init__: enable_mode found: " + str(self.enable_mode))
# "enable_password" found?
if "enable_password" in kwargs:
self.enable_password = kwargs["enable_password"]
# Display info message
log.info("__init__: enable_password found: " + str(self.enable_password))
async def __aenter__(self):
"""
Context manager opening connection
"""
try:
# Run an async method to connect a device
await self.connect()
except Exception:
# Disconnection (if needed) in case the connection is done but something failed
await self.disconnect()
# propagate exception if needed
raise
return self
# async def _aexit_(self, exc_type, exc_value, traceback):
async def __aexit__(self, exc_type, exc_value, traceback):
"""
Context manager closing connection
"""
# Close the connection
await self.disconnect()
def find_prompt(self, text):
"""
Method used to find a prompt inside an output string
This method is used during the first communication with the device.
First it find the prompt then caculate the different forms the prompt
can take. This will be useful later on while finding prompt in other
output stream (read).
:param text: data with a prompt
:type text: str
:return: the prompt found
:rtype: str
"""
# Get last line of the data
prompt = text.split("\n")[-1]
# Remove possible \r in the data
# prompt = prompt.replace("\r", "")
prompt = text.split("\r")[-1]
# Display info message
log.info(f"find_prompt: prompt: '{prompt}'")
# Get the possible prompts for future recognition
self.possible_prompts = self.get_possible_prompts(prompt)
# Return the prompt
return prompt
def get_possible_prompts(self, prompt):
"""
Method used to check if a prompt has one of the expected endings then
create a list with all possible prompts for the device
:param prompt: a prompt with a possible ending prompt (eg. "switch#")
:type prompt: str
:return: the list of prompts
:rtype: list
"""
# By default no prompts are returned
list_of_prompts = []
# Get all the ppossible values of the endings of the prompt
list_of_possible_ending_prompts = self.list_of_possible_ending_prompts
# Temporary variable storing the prompt value
my_prompt = prompt
# Test each possible prompt ending (i.e '#', '>', "(config-if)#", "(config)#")
for ending in list_of_possible_ending_prompts:
# Is this current prompt ending at the end of the prompt?
if my_prompt.endswith(ending):
# Yes
# Then remove the ending
my_prompt = my_prompt[: -len(ending)]
# Break the loop
break
# Prompt should be from "switch#" to "switch"
# Display info message
log.info(f"get_possible_prompts: prompt found: '{my_prompt}'")
# Display info message
log.info(f"get_possible_prompts: prompt found size: '{len(my_prompt)}'")
# Now create all the possible prompts for that device
for ending in list_of_possible_ending_prompts:
# Save the prompt name with a possible ending in the list
list_of_prompts.append(my_prompt + ending)
# Display info message
log.info(f"get_possible_prompts: list of possible prompts: {list_of_prompts}")
# Return the list of prompts
return list_of_prompts
def check_if_prompt_is_found(self, text):
"""
Method used to check if a prompt is detected inside a string
:param text: a string with prompt
:type text: str
:return: the prompt found
:rtype: str
"""
# By default the prompt is not found
prompt_found = False
# Check all possible prompts
for prompt in self.possible_prompts:
# Display info message
log.info(f"check_if_prompt_is_found: prompt: '{prompt}'")
# Is this prompt present in the text?
if prompt in text:
# Yes
prompt_found = True
# Display info message
log.info(f"check_if_prompt_is_found: prompt found: '{prompt}'")
# Leave the for loop
break
# Return the prompt found
return prompt_found
def remove_command_in_output(self, text, cmd):
"""
Method removing the command at the beginning of a string
After sending commands an "echo" of the command sent
is display in the output string. This method removes it.
:param text: the text with the command at the beginning
:type text: str
:param cmd: the command previously sent
:type cmd: str
:return: the output string without the command
:rtype: str
"""
# Display info message
log.info(f"remove_command_in_output: cmd = '{cmd}'")
# Display info message
log.info(f"remove_command_in_output: cmd (hex) = '{cmd.encode().hex()}'")
# Remove the command from the beginning of the output
# output = text.lstrip(cmd + "\n")
output = text.split(cmd + "\n")[-1]
# Display info message
log.info(f"remove_command_in_output: output = '{output}'")
# Return the string without the command
return output
def remove_starting_carriage_return_in_output(self, text):
"""
Method removing the carriage return at the beginning of a string
:param text: the text with the command at the beginning
:type text: str
:return: the output string without the starting carriage return
:rtype: str
"""
# Display info message
log.info("remove_starting_carriage_return_in_output")
# Remove the carriage return at the beginning of the string
output = text.lstrip("\r\n\r")
# Display info message
log.info(f"remove_starting_carriage_return_in_output: output = '{output}'")
# Return the string without the starting carriage return
return output
def remove_ending_prompt_in_output(self, text):
"""
Method removing the prompt at the end of a string
:param text: the text with a prompt at the beginning
:type text: str
:return: the output string without the ending prompt
:rtype: str
"""
# Display info message
log.info("remove_ending_prompt_in_output")
# Check all possible prompts
for prompt in self.possible_prompts:
# Display info message
log.info(f"remove_ending_prompt_in_output: prompt: '{prompt}'")
# Prompt found in the text?
if prompt in text:
# Yes
# Then it is removed from the text
# text = text.rstrip(prompt)
text = text[: -len(prompt)]
# Remove also carriage return
text = text.rstrip("\r\n")
# Leave the loop
break
# output = text.rstrip("\r\n" + self.prompt)
# Display info message
log.info(f"remove_ending_prompt_in_output: text without prompt:\n'{text}'")
# Return the text without prompt at the end
return text
def check_error_output(self, output):
"""
Check if an error is returned by the device ("% Unrecognized command", "% Ambiguous command", etc.)
If an error is found, then an exception is raised
"""
# Display info message
log.info("check_error_output")
# Check if output has some data
if output:
# Yes
# Display info message
log.info("check_error_output: output has some data")
# Check all elements in the list of output
for element in self._send_command_error_in_returned_output:
# Display info message
log.info(f"check_error_output: element: {element}")
# Display info message
log.info(f"check_error_output: output[0]: {output[0]}")
# Check if the output starts with a string with an error message (like "% Invalid input detected at '^' marker.")
# Error message?
if output.startswith(element):
# Yes
# Raise an exception
raise Exception(output)
def remove_ansi_escape_sequence(self, text):
"""
Method removing ANSI escape sequence from a string
Just CSI sequences are removed
:param text: the text with a prompt at the beginning
:type text: str
:return: the output string without the ending prompt
:rtype: str
"""
# By default no string returned
output = ""
# By default no escape sequence found
esc_found = 0
# Read char by char a string
for i in text:
# Display char
# log.info(f"{str(i).encode('ascii')}")
# No escape previously found?
if esc_found == 0:
# No escape sequence currently found
# Escape?
if i == "\x1b":
# Yes
log.info("Esc!")
# Escape found
esc_found = 1
else:
# No
# Then the current char can be saved
output += i
# Escape previously found?
elif esc_found == 1:
# Yes
# Then check if this is a CSI sequence
if i == "[":
# Beginning of CSI sequence
log.info("CSI sequence")
# CSI sequence
esc_found = 2
else:
# Another Escape sequence
# Keep the escape sequence in the string
output += "\x1b" + i
# No escape sequence next
esc_found = 0
else:
# Char between 'a' and 'z' or 'A' and 'Z'?
if (i >= "a" and i <= "z") or (i >= "A" and i <= "Z"):
# Yes
# Then it is the end of CSI escape sequence
log.info("End of escape sequence")
# No escape sequence next
esc_found = 0
# Return a string without ANSI escape sequence
return output
async def disable_paging(self):
"""
Async method disabling paging on a device
Use the "cmd_disable_paging" attribute
"""
# Display info message
log.info("disable_paging")
# Send command to the device to disable paging
await self.send_command(self.cmd_disable_paging)
async def connect(self):
"""
Async method used for connecting a device
Currently supported: SSH and Telnet
"""
# Display info message
log.info("connect")
try:
# SSH?
if self._protocol == "ssh":
# Yes
# Then Connect using SSH
await self.connectSSH()
# Telnet?
elif self._protocol == "telnet":
# Yes
# Then Connect using Telnet
await self.connectTelnet()
else:
# Unsupported protocol
# Raise an exception
raise Exception(f"connect: unsupported protocol: {self._protocol}")
except Exception:
# There was a problem with a connection method
# Display info message
log.info("connect: connection error")
raise
async def connectSSH(self):
"""
Async method used for connecting a device using SSH protocol
"""
# Display info message
log.info("connectSSH")
# Parameters of the connection
generator = asyncssh.connect(
self.ip,
username=self.username,
password=self.password,
known_hosts=None,
# encryption_algs="*", # Parameter that includes all encryption algorithms (even the old ones disabled by default)
encryption_algs=[
algs.decode("utf-8") for algs in asyncssh.encryption._enc_algs
], # Parameter that includes all encryption algorithms (even the old ones disabled by default)
)
# Trying to connect to the device
try:
self.conn = await asyncio.wait_for(generator, timeout=self.timeout)
except asyncio.exceptions.TimeoutError as error:
# Timeout
# Display error message
log.error(f"connectSSH: connection failed: {self.ip} timeout: '{error}'")
# Exception propagation
raise asyncio.exceptions.TimeoutError(
"Connection failed: connection timed out."
)
except Exception as error:
# Connection failed
# Display error message
log.error(f"connectSSH: connection failed: {self.ip} '{error}'")
# Exception propagation
raise
# Display info message
log.info("connectSSH: connection success")
# Create a session
self.stdinx, self.stdoutx, _ = await self.conn.open_session(term_type="netscud")
# Display info message
log.info("connectSSH: open_session success")
# By default no data has been read
data = ""
# By default no prompt found
prompt_not_found = True
try:
# Read data
while prompt_not_found:
# Display info message
log.info("connectSSH: beginning of the loop")
# Read the prompt
data += await asyncio.wait_for(
self.stdoutx.read(MAX_BUFFER_DATA), timeout=self.timeout
)
# Display info message
log.info(f"connectSSH: data: '{str(data)}'")
# Display info message
log.info(f"connectSSH: data: hex:'{data.encode('utf-8').hex()}'")
# Check if an initial prompt is found
for prompt in self._connect_first_ending_prompt:
# Ending prompt found?
if data.endswith(prompt):
# Yes
# Display info message
log.info(f"connectSSH: first ending prompt found: '{prompt}'")
# A ending prompt has been found
prompt_not_found = False
# Leave the loop
break
# Display info message
log.info("connectSSH: end of loop")
except Exception as error:
# Fail while reading the prompt
# Display error message
log.error(
f"connectSSH: timeout while reading the prompt: {self.ip} '{error}'"
)
# Exception propagation
raise
# Display info message
log.info(f"connectSSH: end of prompt loop")
# Remove possible escape sequence
data = self.remove_ansi_escape_sequence(data)
# Find prompt
self.prompt = self.find_prompt(str(data))
# Display info message
log.info(f"connectSSH: prompt found: '{self.prompt}'")
# Display info message
log.info(f"connectSSH: prompt found size: '{len(self.prompt)}'")
# Disable paging command available?
if self.cmd_disable_paging:
# Yes
# Disable paging
await self.disable_paging()
async def connectTelnet(self):
"""
Async method used for connecting a device using Telnet protocol
"""
# Display info message
log.info("connectTelnet")
try:
# Prepare connection with Telnet
conn = asyncio.open_connection(self.ip, self.port)
except Exception as error:
# Preparation to the connection failed
# Display error message
log.error(f"connectTelnet: preparation to the connection failed: '{error}'")
# Exception propagation
raise
# Display info message
log.info("connectTelnet: preparation to the connection success")
try:
# Connection with Telnet
self._reader, self._writer = await asyncio.wait_for(
conn, timeout=self.timeout
)
except asyncio.TimeoutError:
# Time out during connection
# Display error message
log.error("connectTelnet: connection: timeout")
# Exception propagation
raise
# Display info message
log.info("connectTelnet: connection success")
# Get prompt for the login
prompt = self._telnet_connect_login
# Get prompt for the password
prompt_password = self._telnet_connect_password
# By default a login is expected
use_login = True
# Temporary string variable
output = ""
# Temporary bytes variable
byte_data = b""
# Read the telnet information and first prompt (for login but a password prompt can be found for IOS for instance)
while True:
# Display info message
log.info(f"connectTelnet: read data for prompt")
# Read returned prompt
byte_data += await asyncio.wait_for(
self._reader.read(MAX_BUFFER_DATA), timeout=self.timeout
)
# Display info message
log.info(f"connectTelnet: byte_data: {byte_data}")
# Temporary convertion in string. This string has the following form: "b'....'"
output = str(byte_data)
# Display info message
log.info(f"connectTelnet: output: {output}")
# Prompt for the username found?
if prompt in output:
# Yes
# Leave the loop
break
# Prompt for the password found?
elif prompt_password in output:
# Yes
# That means only password is required
use_login = False
# Leave the loop
break
# Display info message
log.info(f"connectTelnet: login prompt: '{output}'")
# Login to use?
if use_login:
# Yes
# Display info message
log.info("connectTelnet: sending login")
try:
# Send login
await self.send_command(self.username, prompt_password)
# Display info message
log.info("connectTelnet: login sent")
except Exception:
# Problem with the login
# Propagate the exception
raise
# Display info message
log.info("connectTelnet: sending password")
try:
# Send password
output = await self.telnet_send_command_with_unexpected_pattern(
self.password,
self._connect_first_ending_prompt,
self._telnet_connect_authentication_fail_prompt,
)
except Exception:
# Problem with the password
# Propagate the exception
raise
# Display info message
log.info("connectTelnet: password sent")
# Find prompt
self.prompt = self.find_prompt(str(output))
# Display info message
log.info(f"connectTelnet: prompt found: '{self.prompt}'")
# Password enable?
if self.enable_mode:
# Yes
# Display info message
log.info("connectTelnet: enable mode to be activated")
try:
# Send enable command
await self.send_command(self.cmd_enable, prompt_password)
# Display info message
log.info("connectTelnet: enable command sent")
# Display info message
log.info("connectTelnet: sending enable password")
# Send enable password
await self.telnet_send_command_with_unexpected_pattern(
self.enable_password,
self._connect_first_ending_prompt,
self._telnet_connect_authentication_fail_prompt,
)
# Display info message
log.info("connectTelnet: enable password sent")
except Exception:
# Problem with the enable password
# Display info message
log.info("connectTelnet: enable password failure")
# Propagate the exception
raise
# Disable paging command available?
if self.cmd_disable_paging:
# Yes
# Disable paging
await self.disable_paging()
async def disconnect(self):
"""
Async method used to disconnect a device
If this method is not used then exceptions will happen
when the program will end
"""
# Debug info message
log.info("disconnect")
# SSH?
if self._protocol == "ssh":
# Yes
# Then disconnect using SSH
await self.disconnectSSH()
# Telnet?
elif self._protocol == "telnet":
# Yes
# Then disconnect using Telnet
await self.disconnectTelnet()
else:
# Unsupported protocol
# Raise an exception
raise Exception(f"Unsupported protocol: {self._protocol}")
async def disconnectSSH(self):
"""
Async method used to disconnect a device in SSH
If this method is not used then exceptions will happen
when the program will end
"""
# Debug info message
log.info("disconnectSSH")
# Connection previously open in SSH?
if self.conn:
# Yes
# Then close the SSH connection
self.conn.close()
# No more connection to disconnect
self.conn = None
async def disconnectTelnet(self):
"""
Async method used to disconnect a device in Telnet
If this method is not used then exceptions will happen
when the program will end
"""
# Debug info message
log.info("disconnectTelnet")
# Connection previously open in Telnet?
if self._writer:
# Yes
# Then close the SSH connection
self._writer.close()
# No more connection to disconnect
self._writer = None
async def send_command(self, cmd, pattern=None, timeout=None):
"""
Async method used to send data to a device
:param cmd: command to send
:type cmd: str
:param pattern: optional, a pattern replacing the prompt when the prompt is not expected
:type pattern: str
:param timeout: optional, a timeout for the command sent. Default value is self.timeout
:type timeout: str
:return: the output of command
:rtype: str
"""
# Debug info message
log.info("send_command")
# Default value of timeout variable
if timeout is None:
timeout = self.timeout
# SSH?
if self._protocol == "ssh":
# Yes
# Then disconnect using SSH
output = await self.send_commandSSH(cmd, pattern=pattern, timeout=timeout)
# Telnet?
elif self._protocol == "telnet":
# Yes
# Then disconnect using Telnet
output = await self.send_commandTelnet(
cmd, pattern=pattern, timeout=timeout
)
else:
# Unsupported protocol
# Raise an exception
raise Exception(f"send_command: unsupported protocol: {self._protocol}")
# Return the result of the command
return output
async def send_commandSSH(self, cmd, pattern=None, timeout=None):
"""
Async method used to send data to a device
:param cmd: command to send
:type cmd: str
:param pattern: optional, a pattern replacing the prompt when the prompt is not expected
:type pattern: str
:param timeout: optional, a timeout for the command sent. Default value is self.timeout
:type timeout: str
:return: the output of command
:rtype: str
"""
# Debug info message
log.info("send_commandSSH")
# Default value of timeout variable
if timeout is None:
timeout = self.timeout
# Add carriage return at the end of the command (mandatory to send the command)
# cmd = cmd + "\n"
# cmd = cmd + "\r\n"
# Debug info message
log.info(f"send_commandSSH: cmd = '{cmd}'")
# Sending command
self.stdinx.write(cmd + self._carriage_return_for_send_command)
# Display message
log.info("send_commandSSH: command sent")
# Variable used to gather data
output = ""
# Reading data
while True:
# await asyncio.sleep(1)
# Read the data received
output += await asyncio.wait_for(
self.stdoutx.read(MAX_BUFFER_DATA), timeout=timeout
)
# Debug info message
# log.info(f"send_commandSSH: output hex: '{str(output).encode("utf-8").hex()}'")
# Remove ANSI escape sequence
output = self.remove_ansi_escape_sequence(output)
# Remove possible "\r"
output = output.replace("\r", "")
# data = ""
# for i in output:
# data += i.encode("utf-8").hex()
# print(data)
# Debug info message
log.info(f"send_commandSSH: output: '{output}'")
# Is a patten used?
if pattern:
# Use pattern instead of prompt
if pattern in output:
# Yes
# Leave the loop
break
else:
# Check if prompt is found
if self.check_if_prompt_is_found(output):
# Yes
# Leave the loop
break
# Debug info message
log.debug(
f"send_commandSSH: raw output: '{output}'\nsend_commandSSH: raw output (hex): '{output.encode().hex()}'"
)
# Remove the command sent from the result of the command
output = self.remove_command_in_output(output, str(cmd))
# Remove the carriage return of the output
output = self.remove_starting_carriage_return_in_output(output)
# Remove the ending prompt of the output
output = self.remove_ending_prompt_in_output(output)
# Debug info message
log.debug(
f"send_commandSSH: cleaned output: '{output}'\nsend_commandSSH: cleaned output (hex): '{output.encode().hex()}'"
)
# Check if there is an error in the output string (like "% Unrecognized command")
# and generate an exception if needed
self.check_error_output(output)
# Return the result of the command
return output
async def send_commandTelnet(self, cmd, pattern=None, timeout=None):
"""
Async method used to send data to a device
:param cmd: command to send
:type cmd: str
:param pattern: optional, a pattern replacing the prompt when the prompt is not expected
:type pattern: str
:param timeout: optional, a timeout for the command sent. Default value is self.timeout
:type timeout: str
:return: the output of command
:rtype: str
"""
# Debug info message
log.info("send_commandTelnet")
# Default value of timeout variable
if timeout is None:
timeout = self.timeout
# Add carriage return at the end of the command (mandatory to send the command)
cmd = cmd + "\n"
# Sending command
self._writer.write(cmd.encode())
# Temporary string variable
output = ""
# Temporary bytes variable
byte_data = b""
try:
# Read data
while True:
# Read returned prompt
byte_data += await asyncio.wait_for(
self._reader.read(MAX_BUFFER_DATA), timeout=timeout
)
# Display info message
log.info(f"send_commandTelnet: byte_data: '{byte_data}'")
# Temporary convertion in string. This string has the following form: "b'....'"
output = str(byte_data)
# Display info message
log.info(f"send_commandTelnet: output: '{output}'")
# Is a patten used?
if pattern:
# Use pattern instead of prompt
if pattern in output:
# Yes
# Leave the loop
break
else:
# Check if prompt is found
if self.check_if_prompt_is_found(output):
# Yes
# Leave the loop
break
except asyncio.TimeoutError:
# Time out during when reading prompt
# Display error message
log.error("send_commandTelnet: connection: timeout")
# Exception propagation
raise
except Exception as error:
# Error during when reading prompt
# Display error message
log.error(f"send_commandTelnet: error: {error}")
# Exception propagation
raise
# Convert data (bytes) into string
output = byte_data.decode("utf-8", "ignore")
# Debug info message
log.debug(
f"send_commandTelnet: raw output: '{output}'\nsend_commandTelnet: raw output (hex): '{output.encode().hex()}'"
)
# Remove the command sent from the result of the command
output = self.remove_command_in_output(output, str(cmd))
# Remove the carriage return of the output
output = self.remove_starting_carriage_return_in_output(output)
# Remove the ending prompt of the output
output = self.remove_ending_prompt_in_output(output)
# Debug info message
log.debug(
f"send_commandTelnet: cleaned output: '{output}'\nsend_commandTelnet: cleaned output (hex): '{output.encode().hex()}'"
)
# Check if there is an error in the output string (like "% Unrecognized command")
# and generate an exception if needed
self.check_error_output(output)
# Return the result of the command
return output
async def telnet_send_command_with_unexpected_pattern(
self, cmd, pattern, error_pattern=None, timeout=None
):
"""
Async method used to send command for Telnet connection to a device with possible unexpected patterns
send_command can wait till time out if login and password are wrong. This method
speed up the returned error message when authentication failed is identified.
This method is limited to authentication whem password is required
:param cmd: command to send
:type cmd: str
:param pattern: optional, a list of patterns located at the very end of the a returned string. Can be used
to define a custom or unexpected prompt a the end of a string
:type pattern: str
:param timeout: optional, a timeout for the command sent. Default value is self.timeout
:type timeout: str
:param error_pattern: optional, a list of failed prompts found when the login and password are not correct
:type error_pattern: str
:return: the output of command
:rtype: str
"""
# Debug info message
log.info("telnet_send_command_with_unexpected_pattern")
# Default value of timeout variable
if timeout is None:
timeout = self.timeout
# Add carriage return at the end of the command (mandatory to send the command)
cmd = cmd + self._carriage_return_for_send_command
# Sending command
self._writer.write(cmd.encode())
# Temporary string variable
output = ""
# Temporary bytes variable
byte_data = b""
# By default pattern is not found
pattern_not_found = True
try:
# Read data
while pattern_not_found:
# Read returned prompt
byte_data += await asyncio.wait_for(
self._reader.read(MAX_BUFFER_DATA), timeout=timeout
)
# Display info message
log.info(
f"telnet_send_command_with_unexpected_pattern: byte_data: '{byte_data}'"
)
# Display debug message
log.debug(
f"telnet_send_command_with_unexpected_pattern: byte_data: hex: '{byte_data.hex()}'"
)
# Temporary convertion in string. This string has the following form: "b'....'"
output = str(byte_data)
# Display info message
log.info(
f"telnet_send_command_with_unexpected_pattern: output: '{output}'"
)
# Is a pattern used?
if pattern:
# Check all pattern of prompt in the output
for prompt in pattern:
# Display info message
log.info(
f"telnet_send_command_with_unexpected_pattern: checking prompt: '{prompt}'"
)
# A pattern found?
if prompt in output:
# Yes
# A pattern is found. The main loop can be stopped
pattern_not_found = False
# Display info message
log.info(
f"telnet_send_command_with_unexpected_pattern: prompt found: '{prompt}'"
)
# Leave the loop
break
# Is an unexpected pattern used?
if error_pattern and pattern_not_found:
# Check all unexpected pattern of prompt in the output
for bad_prompt in error_pattern:
# Display info message
log.info(
f"telnet_send_command_with_unexpected_pattern: checking unexpected prompt: '{bad_prompt}'"
)
# An error_pattern pattern found?
if bad_prompt in output:
# Yes
# Display error message
log.error(
"telnet_send_command_with_unexpected_pattern: authentication failed"
)
# Raise exception
raise Exception(
"telnet_send_command_with_unexpected_pattern: authentication failed"
)
# Leave the loop
# break
except asyncio.TimeoutError:
# Time out during when reading prompt
# Close the connection in order to not display RuntimeError
await self.disconnect()
# Display error message
log.error(
"telnet_send_command_with_unexpected_pattern: reading prompt: timeout"
)
# Exception propagation
raise
except Exception as error:
# Error during when reading prompt
# Close the connection in order to not display RuntimeError
await self.disconnect()
# Display error message
log.error(
f"telnet_send_command_with_unexpected_pattern: reading prompt: error: {error}"
)
# Exception propagation
raise
# Convert data (bytes) into string
output = byte_data.decode("utf-8", "ignore")
# Debug info message
log.debug(
f"telnet_send_command_with_unexpected_pattern: raw output: '{output}'\ntelnet_send_command_with_unexpected_pattern: raw output (hex): '{output.encode().hex()}'"
)
# Remove the command sent from the result of the command
output = self.remove_command_in_output(output, str(cmd))
# Remove the carriage return of the output
output = self.remove_starting_carriage_return_in_output(output)
# Remove the ending prompt of the output
output = self.remove_ending_prompt_in_output(output)
# Debug info message
log.debug(
f"telnet_send_command_with_unexpected_pattern: cleaned output: '{output}'\ntelnet_send_command_with_unexpected_pattern: cleaned output (hex): '{output.encode().hex()}'"
)
# Return the result of the command
return output
async def send_config_set(self, cmds=None, timeout=None):
"""
Async method used to send command in config mode
The commands send can be either a string a list of strings. There are
3 steps:
- Entering configuration mode
- Sending the commands
- Leaving configuration mode
:param cmds: The commands to the device
:type cmds: str or list
:param timeout: optional, a timeout for the command sent. Default value is self.timeout
:type timeout: str
:return: the results of the commands sent
:rtype: list of str
"""
# Display info message
log.info("send_config_set")
# Default value of timeout variable
if timeout is None:
timeout = self.timeout
# Debug info message
log.info("send_command")
# SSH?
if self._protocol == "ssh":
# Yes
# Then disconnect using SSH
output = await self.send_config_setSSH(cmds, timeout)
# Telnet?
elif self._protocol == "telnet":
# Yes
# Then disconnect using Telnet
output = await self.send_config_setTelnet(cmds, timeout)
else:
# Unsupported protocol
# Raise an exception
raise Exception(f"send_config_set: unsupported protocol: {self._protocol}")
# Return the result of the commands
return output
async def send_config_setSSH(self, cmds=None, timeout=None):
"""
Async method used to send command in config mode
The commands send can be either a string a list of strings. There are
3 steps:
- Entering configuration mode
- Sending the commands
- Leaving configuration mode
:param cmds: The commands to the device
:type cmds: str or list
:param timeout: optional, a timeout for the command sent. Default value is self.timeout
:type timeout: str
:return: the results of the commands sent
:rtype: list of str
"""
# Display info message
log.info("send_config_setSSH")
# Default value of timeout variable
if timeout is None:
timeout = self.timeout
# Clear returned output
returned_output = ""
# Check if cmds is a string
if isinstance(cmds, str):
# A string
# Convert the string into a list
cmds = [cmds]
# A list?
elif not isinstance(cmds, list):
# Not a list (and not a string)
# Display error message
log.error(
"send_config_setSSH: parameter cmds used in send_config_set is neither a string nor a list"
)
# Leave the method
return returned_output
##############################
# Entering configuration mode
##############################
# Display info message
log.info("send_config_set: entering configuration mode")
# Clear output
output = ""
# Get command for entering in config made
cmd = self.cmd_enter_config_mode
# Add carriage return at the end of the command (mandatory to send the command)
cmd = cmd + self._carriage_return_for_send_command
# Display info message
log.info(f"send_config_setSSH: cmd = '{cmd}'")
# Sending command
self.stdinx.write(cmd)
# Display message
log.info("send_config_setSSH: configuration mode entered")
while True:
# Read the data received
output += await asyncio.wait_for(
self.stdoutx.read(MAX_BUFFER_DATA), timeout=timeout
)
# Display info message
log.info(f"send_config_setSSH: output: '{output}'")
# Check if prompt is found
if self.check_if_prompt_is_found(output):
# Yes
# Leave the loop
break
# Debug info message
log.debug(
f"send_config_setSSH: raw output: '{output}'\nsend_config_setSSH: raw output (hex): '{output.encode().hex()}'"
)
# Add the output to the returned output
returned_output += output
# Remove the command sent from the result of the command
output = self.remove_command_in_output(output, str(cmd))
# Remove the carriage return of the output
output = self.remove_starting_carriage_return_in_output(output)
# Remove the ending prompt of the output
output = self.remove_ending_prompt_in_output(output)
# Display info message
log.debug(
f"send_config_setSSH: cleaned output: '{output}'\nsend_config_setSSH: cleaned output (hex): '{output.encode().hex()}'"
)
# Check if there is an error in the output string (like "% Unrecognized command")
# and generate an exception if needed
self.check_error_output(output)
##############################
# Sending commands
##############################
# Display info message
log.info("send_config_setSSH: sending commands")
# Clear output
output = ""
# Each command
for cmd in cmds:
# Add carriage return at the end of the command (mandatory to send the command)
cmd = cmd + self._carriage_return_for_send_command
# Display info message
log.info(f"send_config_setSSH: cmd = '{cmd}'")
# Sending command
self.stdinx.write(cmd)
# Display info message
log.info("send_config_setSSH: command sent")
while True:
# Read the data received
output += await asyncio.wait_for(
self.stdoutx.read(MAX_BUFFER_DATA), timeout=timeout
)
# Display info message
log.info(f"send_config_setSSH: output: '{output}'")
# Check if prompt is found
if self.check_if_prompt_is_found(output):
# Yes
# Leave the loop
break
# Debug info message
log.debug(
f"send_config_setSSH: raw output: '{output}'\nsend_config_setSSH: raw output (hex): '{output.encode().hex()}'"
)
# Add the output to the returned output
returned_output += output
# Remove the command sent from the result of the command
output = self.remove_command_in_output(output, str(cmd))
# Remove the carriage return of the output
output = self.remove_starting_carriage_return_in_output(output)
# Remove the ending prompt of the output
output = self.remove_ending_prompt_in_output(output)
# Display info message
log.debug(
f"send_config_setSSH: cleaned output: '{output}'\nsend_config_setSSH: cleaned output (hex): '{output.encode().hex()}'"
)
# Check if there is an error in the output string (like "% Unrecognized command")
# and generate an exception if needed
self.check_error_output(output)
##############################
# Leaving configuration mode
##############################
# Display info message
log.info("send_config_setSSH: leaving configuration mode")
# Clear output
output = ""
# Get command to leave config made
cmd = self.cmd_exit_config_mode
# Add carriage return at the end of the command (mandatory to send the command)
cmd = cmd + self._carriage_return_for_send_command
# Display info message
log.info(f"send_config_setSSH: cmd = '{cmd}'")
# Sending command
self.stdinx.write(cmd)
# Display info message
log.info("send_config_setSSH: command to leave configuration mode sent")
while True:
# Read the data received
output += await asyncio.wait_for(
self.stdoutx.read(MAX_BUFFER_DATA), timeout=timeout
)
# Display info message
log.info(f"send_config_setSSH: output: '{output}'")
# Check if prompt is found
if self.check_if_prompt_is_found(output):
# Yes
# Leave the loop
break
# Debug info message
log.debug(
f"send_config_setSSH: raw output: '{output}'\nsend_config_setSSH: raw output (hex): '{output.encode().hex()}'"
)
# Add the output to the returned output
returned_output += output
# Remove the command sent from the result of the command
output = self.remove_command_in_output(output, str(cmd))
# Remove the carriage return of the output
output = self.remove_starting_carriage_return_in_output(output)
# Remove the ending prompt of the output
output = self.remove_ending_prompt_in_output(output)
# Display info message
log.debug(
f"send_config_setSSH: cleaned output: '{output}'\nsend_config_setSSH: cleaned output (hex): '{output.encode().hex()}'"
)
# Check if there is an error in the output string (like "% Unrecognized command")
# and generate an exception if needed
self.check_error_output(output)
# Return the result of the commands
return returned_output
async def send_config_setTelnet(self, cmds=None, timeout=None):
"""
Async method used to send command in config mode
The commands send can be either a string a list of strings. There are
3 steps:
- Entering configuration mode
- Sending the commands
- Leaving configuration mode
:param cmds: The commands to the device
:type cmds: str or list
:param timeout: optional, a timeout for the command sent. Default value is self.timeout
:type timeout: str
:return: the results of the commands sent
:rtype: list of str
"""
# Display info message
log.info("send_config_setTelnet")
# Default value of timeout variable
if timeout is None:
timeout = self.timeout
# Clear returned output
returned_output = ""
# Check if cmds is a string
if isinstance(cmds, str):
# A string
# Convert the string into a list
cmds = [cmds]
# A list?
elif not isinstance(cmds, list):
# Not a list (and not a string)
# Display error message
log.error(
"send_config_setTelnet: parameter cmds used in send_config_set is neither a string or a list"
)
# Leave the method
return returned_output
##############################
# Entering configuration mode
##############################
# Display info message
log.info("send_config_setTelnet: entering configuration mode")
# Clear output
output = ""
# Get command for entering in config made
cmd = self.cmd_enter_config_mode
# Add carriage return at the end of the command (mandatory to send the command)
cmd = cmd + self._carriage_return_for_send_command
# Display info message
log.info(f"send_config_setTelnet: cmd = '{cmd}'")
# Sending command
self._writer.write(cmd.encode())
# Display message
log.info("send_config_setTelnet: configuration mode entered")
# Temporary string variable
output = ""
# Temporary bytes variable
byte_data = b""
try:
# Read data
while True:
# Read the data received
byte_data += await asyncio.wait_for(
self._reader.read(MAX_BUFFER_DATA), timeout=timeout
)
# Temporary convertion in string. This string has the following form: "b'....'"
output = str(byte_data)
# Display info message
log.info(f"send_config_setTelnet: output: '{output}'")
# Check if prompt is found
if self.check_if_prompt_is_found(output):
# Yes
# Leave the loop
break
except asyncio.TimeoutError:
# Time out during when reading prompt
# Display error message
log.error("send_config_setTelnet: connection: timeout")
# Exception propagation
raise
except Exception as error:
# Error during when reading prompt
# Display error message
log.error(f"send_config_setTelnet: error: {error}")
# Exception propagation
raise
# Convert data (bytes) into string
output = byte_data.decode("utf-8", "ignore")
# Debug info message
log.debug(
f"send_config_setTelnet: raw output: '{output}'\nsend_config_setTelnet: raw output (hex): '{output.encode().hex()}'"
)
# Add the output to the returned output
returned_output += output
# Remove the command sent from the result of the command
output = self.remove_command_in_output(output, str(cmd))
# Remove the carriage return of the output
output = self.remove_starting_carriage_return_in_output(output)
# Remove the ending prompt of the output
output = self.remove_ending_prompt_in_output(output)
# Display info message
log.debug(
f"send_config_setTelnet: cleaned output: '{output}'\nsend_config_setTelnet: cleaned output (hex): '{output.encode().hex()}'"
)
# Check if there is an error in the output string (like "% Unrecognized command")
# and generate an exception if needed
self.check_error_output(output)
##############################
# Sending commands
##############################
# Display info message
log.info("send_config_setTelnet: sending commands")
# Clear output
output = ""
# Each command
for cmd in cmds:
# Add carriage return at the end of the command (mandatory to send the command)
cmd = cmd + self._carriage_return_for_send_command
# Display info message
log.info(f"send_config_setTelnet: cmd = '{cmd}'")
# Sending command
self._writer.write(cmd.encode())
# Display info message
log.info("send_config_setTelnet: command sent")
# Temporary string variable
output = ""
# Temporary bytes variable
byte_data = b""
try:
# Read data
while True:
# Read the data received
byte_data += await asyncio.wait_for(
self._reader.read(MAX_BUFFER_DATA), timeout=timeout
)
# Temporary convertion in string. This string has the following form: "b'....'"
output = str(byte_data)
# Display info message
log.info(f"send_config_setTelnet: output: '{output}'")
# Check if prompt is found
if self.check_if_prompt_is_found(output):
# Yes
# Leave the loop
break
except asyncio.TimeoutError:
# Time out during when reading prompt
# Display error message
log.error("send_config_setTelnet: connection: timeout")
# Exception propagation
raise
except Exception as error:
# Error during when reading prompt
# Display error message
log.error(f"send_config_setTelnet: error: {error}")
# Exception propagation
raise
# Convert data (bytes) into string
output = byte_data.decode("utf-8", "ignore")
# Debug info message
log.debug(
f"send_config_setTelnet: raw output: '{output}'\nsend_config_setTelnet: raw output (hex): '{output.encode().hex()}'"
)
# Add the output to the returned output
returned_output += output
# Remove the command sent from the result of the command
output = self.remove_command_in_output(output, str(cmd))
# Remove the carriage return of the output
output = self.remove_starting_carriage_return_in_output(output)
# Remove the ending prompt of the output
output = self.remove_ending_prompt_in_output(output)
# Display info message
log.debug(
f"send_config_setTelnet: cleaned output: '{output}'\nsend_config_setTelnet: cleaned output (hex): '{output.encode().hex()}'"
)
# Check if there is an error in the output string (like "% Unrecognized command")
# and generate an exception if needed
self.check_error_output(output)
##############################
# Leaving configuration mode
##############################
# Display info message
log.info("send_config_setTelnet: leaving configuration mode")
# Clear output
output = ""
# Get command to leave config made
cmd = self.cmd_exit_config_mode
# Add carriage return at the end of the command (mandatory to send the command)
cmd = cmd + self._carriage_return_for_send_command
# Display info message
log.info(f"send_config_setTelnet: cmd = '{cmd}'")
# Sending command
self._writer.write(cmd.encode())
# Display info message
log.info("send_config_setTelnet: command to leave configuration mode sent")
# Temporary string variable
output = ""
# Temporary bytes variable
byte_data = b""
# Protection against infinite loop
loop = 3
try:
# Read data
while loop:
# Read the data received
byte_data += await asyncio.wait_for(
self._reader.read(MAX_BUFFER_DATA), timeout=timeout
)
# Temporary convertion in string. This string has the following form: "b'....'"
output = str(byte_data)
# Display info message
log.info(f"send_config_setTelnet: output: '{output}'")
await asyncio.sleep(0.5)
# Check if prompt is found
if self.check_if_prompt_is_found(output):
# Yes
# Leave the loop
break
# Protection for "exit" command infinite loop in Cisco when enable is not activated
loop -= 1
except asyncio.TimeoutError:
# Time out during when reading prompt
# Display error message
log.error("send_config_setTelnet: connection: timeout")
# Exception propagation
raise
except Exception as error:
# Error during when reading prompt
# Display error message
log.error(f"send_config_setTelnet: error: {error}")
# Exception propagation
raise
# Convert data (bytes) into string
output = byte_data.decode("utf-8", "ignore")
# Debug info message
log.debug(
f"send_config_setTelnet: raw output: '{output}'\nsend_config_setTelnet: raw output (hex): '{output.encode().hex()}'"
)
# Add the output to the returned output
returned_output += output
# Remove the command sent from the result of the command
output = self.remove_command_in_output(output, str(cmd))
# Remove the carriage return of the output
output = self.remove_starting_carriage_return_in_output(output)
# Remove the ending prompt of the output
output = self.remove_ending_prompt_in_output(output)
# Display info message
log.debug(
f"send_config_setTelnet: cleaned output: '{output}'\nsend_config_setTelnet: cleaned output (hex): '{output.encode().hex()}'"
)
# Check if there is an error in the output string (like "% Unrecognized command")
# and generate an exception if needed
self.check_error_output(output)
# Return the result of the commands
return returned_output
#########################################################
#
# List of API
#
#########################################################
async def get_version(self):
"""
Asyn method used to get the version of the software of the device
:return: Version of the software of the device
:rtype: str
"""
# Display info message
log.info("get_version")
# By default empty string
version = ""
# Run get version on the device
output = await self.send_command(self.cmd_get_version)
# Seek "Version " and "," to get the version in the returned output
version = output.split("Version ")[1].split(",")[0]
# Display info message
log.info(f"get_version: version: {version}")
# Return the version of the software of the device
return version
async def get_hostname(self):
"""
Asyn method used to get the name of the device
:return: Name of the device
:rtype: str
"""
# Display info message
log.info("get_hostname")
# Get hostname
output = await self.send_command(self.cmd_get_hostname)
# Display info message
log.info(f"get_hostname: output: '{output}'")
# Remove the useless information in the returned string
output = output.split()[0]
# Display info message
log.info(f"get_hostname: hostname found: '{output}'")
# Return the name of the device
return output
async def get_model(self):
"""
Asyn method used to get the model of the device
:return: Model of the device
:rtype: str
"""
# Display info message
log.info("get_model")
# Get model
output = await self.send_command(self.cmd_get_model)
# Display info message
log.info(f"get_model: output: '{output}'")
# Remove the useless information in the returned string
output = output.split('"')[3]
# Display info message
log.info(f"get_model: model found: '{output}'")
# Return the model of the device
return output
async def get_serial_number(self):
"""
Get serial number of the switch or the serial number of the first switch of a stack
:return: Serial number of the device
:rtype: str
"""
# Display info message
log.info("get_serial_number")
# Get serial number
output = await self.send_command(self.cmd_get_serial_number)
# Display info message
log.info(f"get_serial_number: output: '{output}'")
# Remove the useless information in the returned string
output = output.splitlines()[0].split()[-1]
# Display info message
log.info(f"get_hostname: hostname found: '{output}'")
# Return the serial number of the device
return output
async def get_config(self, timeout=None):
"""
Asyn method used to get the configuration of the device
:param timeout: optional, a timeout for the command sent. Default value is self.timeout
:type timeout: str
:return: Configuration of the device
:rtype: str
"""
# Display info message
log.info("get_config")
# Default value of timeout variable
if timeout is None:
timeout = self.timeout
# Get config
output = await self.send_command(self.cmd_get_config, timeout=timeout)
# Return de configuration of the device
return output
async def save_config(self):
"""
Asyn method used to save the current configuration on the device
:return: Commands of the configuration saving process
:rtype: str
"""
# Display info message
log.info("save_config")
# Send command
output = await self.send_command(self.cmd_save_config)
# Return the commands of the configuration saving process
return output
|
3,737 | 03d07f5f4647e904c288e828b8f8e7de35740054 | #!/usr/bin/env python
import errno
import logging
import os
import re
import sys
import argparse
def parse_map(map_str):
file_map = []
for line in map_str.split('\n'):
if not line:
continue
find, replace = line.split(' -- ', 1)
file_map.append((find, replace))
return file_map
def map_file(file_map, d, f):
for find, repl in file_map:
if '/' in find:
source = os.path.join(d, f)
includes_path = True
else:
source = f
includes_path = False
match = re.match(find, source)
if match:
if repl == '!ignore':
return None
ret = re.sub(find, repl, source)
if includes_path:
return ret
else:
return os.path.join(d, ret)
else:
raise ValueError('File {} does not match any rules.'.format(f))
def install_file(source, dest):
dest = os.path.expanduser(dest)
logging.debug('Processing {}'.format(source))
try:
dirname = os.path.dirname(dest)
if dirname:
os.makedirs(dirname)
except OSError as e:
# Error 'File Exists' is ok, all others are a problem.
if e.errno != errno.EEXIST:
raise
if os.path.exists(dest):
if CONFIG.force:
os.unlink(dest)
elif os.path.samefile(source, dest):
return True
else:
logging.warning('Not replacing existing file {} with {}.'.format(dest, source))
return False
logging.info('Linking {} to {}'.format(source, dest))
if not CONFIG.noop:
os.link(source, dest)
class ChangeDir(object):
def __init__(self, path):
self.path = path
self.olddir = os.path.curdir
def __enter__(self):
self.olddir = os.path.curdir
os.chdir(self.path)
def __exit__(self, *args):
os.chdir(self.olddir)
def clamp(n, bottom, top):
return min(max(bottom, n), top)
CONFIG = None
def loadConfig():
global CONFIG
parser = argparse.ArgumentParser(description='Install dotfiles.')
parser.add_argument('-n', '--noop', action='store_true')
parser.add_argument('-v', '--verbose', action='append_const', const=1)
parser.add_argument('-q', '--quiet', action='append_const', dest='verbose', const=-1)
parser.add_argument('-f', '--force', action='store_true')
opt = parser.parse_args()
opt.verbose = clamp(2 + sum(opt.verbose or [0]), 0, 4)
CONFIG = opt
def main():
loadConfig()
log_levels = [logging.CRITICAL, logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG]
logging.basicConfig(level=log_levels[CONFIG.verbose])
if CONFIG.noop:
logging.info('Running in no-op mode')
try:
with open('map') as f:
map_str = f.read()
except IOError:
logging.error('Could not open map file.')
sys.exit(1)
file_map = parse_map(map_str)
with ChangeDir('configs'):
for root, dirs, files in os.walk('.'):
# Remove leading ./ or .
root = re.sub(r'^./?', '', root)
for f in files:
try:
dest = map_file(file_map, root, f)
if dest is not None:
install_file(os.path.join(root, f), dest)
except ValueError:
logging.error('File "{}" does not match any rules.'.format(f))
if __name__ == '__main__':
os.chdir(os.path.dirname(__file__))
main()
|
3,738 | f614287a2a118484b67f2b16e429a3335416d186 | # Copyright (c) 2008 Johns Hopkins University.
# All rights reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose, without fee, and without written
# agreement is hereby granted, provided that the above copyright
# notice, the (updated) modification history and the author appear in
# all copies of this source code.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS `AS IS'
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, LOSS OF USE, DATA,
# OR PROFITS) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
# @author Razvan Musaloiu-E. <razvanm@cs.jhu.edu>
"""A library that implements the T2 serial communication.
This library has two parts: one that deals with sending and receiving
packets using the serial format from T2 (TEP113) and a second one that
tries to simplifies the work with arbitrary packets.
"""
import sys, struct, time, serial, socket, operator, thread
import Queue
from threading import Lock, Condition
__version__ = "$Id: tos.py,v 1.1 2008/05/17 01:17:03 razvanm Exp $"
__all__ = ['Serial', 'AM',
'Packet', 'RawPacket',
'AckFrame', 'DataFrame', 'NoAckDataFrame',
'ActiveMessage']
def list2hex(v):
return " ".join(["%02x" % p for p in v])
class Serial:
"""
A Serial object offers a way to send and data using a HDLC-like
formating.
"""
HDLC_FLAG_BYTE = 0x7e
HDLC_CTLESC_BYTE = 0x7d
TOS_SERIAL_ACTIVE_MESSAGE_ID = 0
TOS_SERIAL_CC1000_ID = 1
TOS_SERIAL_802_15_4_ID = 2
TOS_SERIAL_UNKNOWN_ID = 255
SERIAL_PROTO_ACK = 67
SERIAL_PROTO_PACKET_ACK = 68
SERIAL_PROTO_PACKET_NOACK = 69
SERIAL_PROTO_PACKET_UNKNOWN = 255
def __init__(self, port, baudrate, flush=False, debug=False, qsize=10):
self._debug = debug
self._in_queue = Queue.Queue(qsize)
self._out_lock = Lock()
self._out_ack = Condition()
self._seqno = 0
self._ack = None
self._write_counter = 0
self._write_counter_failures = 0
self._read_counter = 0
self._ts = None
self._s = serial.Serial(port, baudrate, rtscts=0, timeout=0.5)
self._s.flushInput()
start = time.time();
if flush:
print >>sys.stdout, "Flushing the serial port",
while time.time() - start < 1:
p = self._read()
sys.stdout.write(".")
if not self._debug:
sys.stdout.write("\n")
self._s.close()
self._s = serial.Serial(port, baudrate, rtscts=0, timeout=None)
thread.start_new_thread(self.run, ())
def run(self):
while True:
p = self._read()
self._read_counter += 1
if self._debug:
print "Serial:run: got a packet(%d): %s" % (self._read_counter, p)
ack = AckFrame(p.data)
if ack.protocol == self.SERIAL_PROTO_ACK:
if not self._ack:
self._ack = ack
if self._debug:
print "Serial:run: got an ack:", ack
self._ack = ack
# Wake up the writer
self._out_ack.acquire()
self._out_ack.notify()
self._out_ack.release()
else:
ampkt = ActiveMessage(NoAckDataFrame(p.data).data)
if ampkt.type == 100:
for t in "".join([chr(i) for i in ampkt.data]).strip('\n\0').split('\n'):
print "PRINTF:", t.strip('\n')
else:
if self._in_queue.full():
print "Warning: Buffer overflow"
self._in_queue.get()
self._in_queue.put(p, block=False)
# Returns the next incoming serial packet
def _read(self):
"""Wait for a packet and return it as a RawPacket."""
try:
d = self._get_byte()
ts = time.time()
while d != self.HDLC_FLAG_BYTE:
d = self._get_byte()
ts = time.time()
packet = [d]
d = self._get_byte()
if d == self.HDLC_FLAG_BYTE:
d = self._get_byte()
ts = time.time()
else:
packet.append(d)
while d != self.HDLC_FLAG_BYTE:
d = self._get_byte()
packet.append(d)
if self._debug == True:
print "Serial:_read: unescaped", packet
packet = self._unescape(packet)
crc = self._crc16(0, packet[1:-3])
packet_crc = self._decode(packet[-3:-1])
if crc != packet_crc:
print "Warning: wrong CRC! %x != %x %s" % (crc, packet_crc, ["%2x" % i for i in packet])
if self._debug:
if self._ts == None:
self._ts = ts
else:
print "Serial:_read: %.4f (%.4f) Recv:" % (ts, ts - self._ts), self._format_packet(packet[1:-3])
self._ts = ts
return RawPacket(ts, packet[1:-3], crc == packet_crc)
except socket.timeout:
return None
def read(self, timeout=None):
start = time.time();
done = False
while not done:
p = None
while p == None:
if timeout == 0 or time.time() - start < timeout:
try:
p = self._in_queue.get(True, timeout)
except Queue.Empty:
return None
else:
return None
if p.crc:
done = True
else:
p = None
# In the current TinyOS the packets from the mote are always NoAckDataFrame
return NoAckDataFrame(p.data)
def write(self, payload):
"""
Write a packet. If the payload argument is a list, it is
assumed to be exactly the payload. Otherwise the payload is
assume to be a Packet and the real payload is obtain by
calling the .payload().
"""
if type(payload) != type([]):
# Assume this will be derived from Packet
payload = payload.payload()
self._out_lock.acquire()
self._seqno = (self._seqno + 1) % 100
packet = DataFrame();
packet.protocol = self.SERIAL_PROTO_PACKET_ACK
packet.seqno = self._seqno
packet.dispatch = 0
packet.data = payload
packet = packet.payload()
crc = self._crc16(0, packet)
packet.append(crc & 0xff)
packet.append((crc >> 8) & 0xff)
packet = [self.HDLC_FLAG_BYTE] + self._escape(packet) + [self.HDLC_FLAG_BYTE]
while True:
self._put_bytes(packet)
self._write_counter += 1
if self._debug == True:
print "Send(%d/%d): %s" % (self._write_counter, self._write_counter_failures, packet)
print "Wait for ack %d ..." % (self._seqno)
self._out_ack.acquire()
self._out_ack.wait(0.2)
if self._debug:
print "Wait for ack %d done. Latest ack:" % (self._seqno), self._ack
self._out_ack.release()
if self._ack and self._ack.seqno == self._seqno:
if self._debug:
print "The packet was acked."
self._out_lock.release()
if self._debug:
print "Returning from Serial.write..."
return True
else:
self._write_counter_failures += 1
if self._debug:
print "The packet was not acked. Try again."
# break # make only one sending attempt
self._out_lock.release()
return False
def _format_packet(self, payload):
f = NoAckDataFrame(payload)
if f.protocol == self.SERIAL_PROTO_ACK:
rpacket = AckFrame(payload)
return "Ack seqno: %d" % (rpacket.seqno)
else:
rpacket = ActiveMessage(f.data)
return "D: %04x S: %04x L: %02x G: %02x T: %02x | %s" % \
(rpacket.destination, rpacket.source,
rpacket.length, rpacket.group, rpacket.type,
list2hex(rpacket.data))
def _crc16(self, base_crc, frame_data):
crc = base_crc
for b in frame_data:
crc = crc ^ (b << 8)
for i in range(0, 8):
if crc & 0x8000 == 0x8000:
crc = (crc << 1) ^ 0x1021
else:
crc = crc << 1
crc = crc & 0xffff
return crc
def _encode(self, val, dim):
output = []
for i in range(dim):
output.append(val & 0xFF)
val = val >> 8
return output
def _decode(self, v):
r = long(0)
for i in v[::-1]:
r = (r << 8) + i
return r
def _get_byte(self):
try:
r = struct.unpack("B", self._s.read())[0]
return r
except struct.error:
# Serial port read timeout
raise socket.timeout
def _put_bytes(self, data):
#print "DEBUG: _put_bytes:", data
for b in data:
self._s.write(struct.pack('B', b))
def _unescape(self, packet):
r = []
esc = False
for b in packet:
if esc:
r.append(b ^ 0x20)
esc = False
elif b == self.HDLC_CTLESC_BYTE:
esc = True
else:
r.append(b)
return r
def _escape(self, packet):
r = []
for b in packet:
if b == self.HDLC_FLAG_BYTE or b == self.HDLC_CTLESC_BYTE:
r.append(self.HDLC_CTLESC_BYTE)
r.append(b ^ 0x20)
else:
r.append(b)
return r
def debug(self, debug):
self._debug = debug
class SFClient:
def __init__(self, host, port, qsize=10):
self._in_queue = Queue(qsize)
self._s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._s.connect((host, port))
data = self._s.recv(2)
if data != 'U ':
print "Wrong handshake"
self._s.send("U ")
print "Connected"
thread.start_new_thread(self.run, ())
def run(self):
while True:
length = ord(self._s.recv(1))
data = self._s.recv(length)
data = [ord(c) for c in data][1:]
#print "Recv %d bytes" % (length), ActiveMessage(data)
if self._in_queue.full():
print "Warning: Buffer overflow"
self._in_queue.get()
p = RawPacket()
p.crc = 1
p.data = data
self._in_queue.put(p, block=False)
def read(self, timeout=0):
return self._in_queue.get()
def write(self, payload):
print "SFClient: write:", payload
if type(payload) != type([]):
# Assume this will be derived from Packet
payload = payload.payload()
payload = [0] + payload
self._s.send(chr(len(payload)))
self._s.send(''.join([chr(c) for c in payload]))
return True
class AM:
def __init__(self, s):
self._s = s
def read(self, timeout=None):
frame = self._s.read(timeout)
if frame:
return ActiveMessage(frame.data)
return frame
def write(self, packet, amid):
return self._s.write(ActiveMessage(packet, amid=amid))
class Packet:
"""
The Packet class offers a handy way to build pack and unpack
binary data based on a given pattern.
"""
def _decode(self, v):
r = long(0)
for i in v:
r = (r << 8) + i
return r
def _encode(self, val, dim):
output = []
for i in range(dim):
output.append(int(val & 0xFF))
val = val >> 8
output.reverse()
return output
def __init__(self, desc, packet = None):
offset = 0
boffset = 0
sum = 0
for i in range(len(desc)-1, -1, -1):
(n, t, s) = desc[i]
if s == None:
if sum > 0:
desc[i] = (n, t, -sum)
break
sum += s
self.__dict__['_schema'] = [(t, s) for (n, t, s) in desc]
self.__dict__['_names'] = [n for (n, t, s) in desc]
self.__dict__['_values'] = []
if type(packet) == type([]):
for (t, s) in self._schema:
if t == 'int':
self._values.append(self._decode(packet[offset:offset + s]))
offset += s
elif t == 'bint':
doffset = 8 - (boffset + s)
self._values.append((packet[offset] >> doffset) & ((1<<s) - 1))
boffset += s
if boffset == 8:
offset += 1
boffset = 0
elif t == 'string':
self._values.append(''.join([chr(i) for i in packet[offset:offset + s]]))
offset += s
elif t == 'blob':
if s:
if s > 0:
self._values.append(packet[offset:offset + s])
offset += s
else:
self._values.append(packet[offset:s])
offset = len(packet) + s
else:
self._values.append(packet[offset:])
elif type(packet) == type(()):
for i in packet:
self._values.append(i)
else:
for v in self._schema:
self._values.append(None)
def __repr__(self):
return self._values.__repr__()
def __str__(self):
r = ""
for i in range(len(self._names)):
r += "%s: %s " % (self._names[i], self._values[i])
for i in range(len(self._names), len(self._values)):
r += "%s" % self._values[i]
return r
# return self._values.__str__()
# Implement the map behavior
def __getitem__(self, key):
return self.__getattr__(key)
def __setitem__(self, key, value):
self.__setattr__(key, value)
def __len__(self):
return len(self._values)
def keys(self):
return self._names
def values(self):
return self._names
# Implement the struct behavior
def __getattr__(self, name):
#print "DEBUG: __getattr__", name
if type(name) == type(0):
return self._names[name]
else:
return self._values[self._names.index(name)]
def __setattr__(self, name, value):
if type(name) == type(0):
self._values[name] = value
else:
self._values[self._names.index(name)] = value
def __ne__(self, other):
if other.__class__ == self.__class__:
return self._values != other._values
else:
return True
def __eq__(self, other):
if other.__class__ == self.__class__:
return self._values == other._values
else:
return False
def __nonzero__(self):
return True;
# Custom
def names(self):
return self._names
def sizes(self):
return self._schema
def payload(self):
r = []
boffset = 0
for i in range(len(self._schema)):
(t, s) = self._schema[i]
if t == 'int':
r += self._encode(self._values[i], s)
boffset = 0
elif t == 'bint':
doffset = 8 - (boffset + s)
if boffset == 0:
r += [self._values[i] << doffset]
else:
r[-1] |= self._values[i] << doffset
boffset += s
if boffset == 8:
boffset = 0
elif self._values[i] != []:
r += self._values[i]
for i in self._values[len(self._schema):]:
r += i
return r
class RawPacket(Packet):
def __init__(self, ts = None, data = None, crc = None):
Packet.__init__(self,
[('ts' , 'int', 4),
('crc', 'int', 1),
('data', 'blob', None)],
None)
self.ts = ts;
self.data = data
self.crc = crc
class AckFrame(Packet):
def __init__(self, payload = None):
Packet.__init__(self,
[('protocol', 'int', 1),
('seqno', 'int', 1)],
payload)
class DataFrame(Packet):
def __init__(self, payload = None):
if payload != None and type(payload) != type([]):
# Assume is a Packet
payload = payload.payload()
Packet.__init__(self,
[('protocol', 'int', 1),
('seqno', 'int', 1),
('dispatch', 'int', 1),
('data', 'blob', None)],
payload)
class NoAckDataFrame(Packet):
def __init__(self, payload = None):
if payload != None and type(payload) != type([]):
# Assume is a Packet
payload = payload.payload()
Packet.__init__(self,
[('protocol', 'int', 1),
('dispatch', 'int', 1),
('data', 'blob', None)],
payload)
class ActiveMessage(Packet):
def __init__(self, gpacket = None, amid = 0x00, dest = 0xFFFF):
if type(gpacket) == type([]):
payload = gpacket
else:
# Assume this will be derived from Packet
payload = None
Packet.__init__(self,
[('destination', 'int', 2),
('source', 'int', 2),
('length', 'int', 1),
('group', 'int', 1),
('type', 'int', 1),
('data', 'blob', None)],
payload)
if payload == None:
self.destination = dest
self.source = 0x0000
self.group = 0x00
self.type = amid
self.data = []
if gpacket:
self.data = gpacket.payload()
self.length = len(self.data)
|
3,739 | 9140da0b6c04f39a987a177d56321c56c01586e8 | import torch
import torch.nn as nn
import torch.nn.functional as F
class Encoder(nn.Module):
def __init__(self):
super(Encoder, self).__init__()
self.conv1 = nn.Conv2d(1, 32, kernel_size=5, stride=1)
self.bn1 = nn.BatchNorm2d(32)
self.conv2 = nn.Conv2d(32, 48, kernel_size=5, stride=1)
self.bn2 = nn.BatchNorm2d(48)
def forward(self, x):
x = torch.mean(x, 1).view(x.size()[0], 1, x.size()[2], x.size()[3])
x = F.max_pool2d(F.relu(self.bn1(self.conv1(x))), stride=2, kernel_size=2, dilation=(1, 1))
x = F.max_pool2d(F.relu(self.bn2(self.conv2(x))), stride=2, kernel_size=2, dilation=(1, 1))
#print(x.size())
x = x.view(x.size(0), 48*4*4)
return x
class Classifier(nn.Module):
def __init__(self, args, prob=0.5):
super(Classifier, self).__init__()
self.fc1 = nn.Linear(48*4*4, 100)
self.bn1_fc = nn.BatchNorm1d(100)
self.fc2 = nn.Linear(100, 100)
self.bn2_fc = nn.BatchNorm1d(100)
self.fc3 = nn.Linear(100, 10)
self.bn_fc3 = nn.BatchNorm1d(10)
self.prob = prob
self.use_drop = args.use_drop
self.use_bn = args.use_bn
self.use_gumbel = args.use_gumbel
def forward(self, x):
x = F.dropout(x, training=self.training, p=self.prob)
x = F.relu(self.bn1_fc(self.fc1(x)))
x = F.dropout(x, training=self.training, p=self.prob)
x = F.relu(self.bn2_fc(self.fc2(x)))
x = F.dropout(x, training=self.training, p=self.prob)
x = self.fc3(x)
return x
class Generator(nn.Module):
def __init__(self, nz=100):
super(Generator, self).__init__()
self.network = nn.Sequential(
# input is Z, going into a convolution
nn.ConvTranspose2d(nz, 512, 4, 1, 0, bias=False),
nn.BatchNorm2d(512),
nn.ReLU(True),
# state size. (ngf*8) x 4 x 4
nn.ConvTranspose2d(512, 256, 3, 2, 1, bias=False),
nn.BatchNorm2d(256),
nn.ReLU(True),
# state size. (ngf*2) x 8 x 8
nn.ConvTranspose2d(256, 128, 4, 2, 1, bias=False),
nn.BatchNorm2d(128),
nn.ReLU(True),
# state size. (ngf) x 16 x 16
nn.ConvTranspose2d(128, 1, 4, 2, 1, bias=False),
nn.Tanh()
# state size. (nc) x 32 x 32
)
def forward(self, x):
# print(x.shape) # torch.Size([64, 100, 1, 1])
x = self.network(x)
# print(x.shape) # torch.Size([64, 1, 28, 28])
return x
|
3,740 | 5cb67e5fcedafca4ce124e4094cbd8e1e9d95bb4 | import logging
from unittest.mock import patch, Mock
from intake.tests.base_testcases import ExternalNotificationsPatchTestCase
from intake.tests import mock, factories
from intake.tests.mock_org_answers import get_answers_for_orgs
from intake.management.commands import send_followups
from user_accounts.models import Organization
from project.tests.assertions import assertInLogsCount
class TestCommand(ExternalNotificationsPatchTestCase):
fixtures = [
'counties', 'organizations']
@patch('intake.management.commands.send_followups.is_the_weekend')
@patch('intake.management.commands.send_followups.FollowupsService')
def test_doesnt_do_anything_on_the_weekend(
self, FollowupsService, is_the_weekend):
is_the_weekend.return_value = True
command = send_followups.Command()
command.stdout = Mock()
command.handle()
FollowupsService.assert_not_called()
@patch('intake.management.commands.send_followups.is_the_weekend')
def test_expected_weekday_run(self, is_the_weekend):
is_the_weekend.return_value = False
org = Organization.objects.get(slug='ebclc')
dates = sorted([mock.get_old_date() for i in range(464, 469)])
for date, pk in zip(dates, range(464, 469)):
factories.FormSubmissionWithOrgsFactory.create(
id=pk,
date_received=date,
organizations=[org],
answers=get_answers_for_orgs(
[org],
contact_preferences=[
'prefers_email',
'prefers_sms'],
phone='4445551111',
email='test@test.com',
))
command = send_followups.Command()
command.stdout = Mock()
with self.assertLogs(
'project.services.logging_service', logging.INFO) as logs:
command.handle()
self.assertEqual(
len(self.notifications.email_followup.send.mock_calls), 4)
assertInLogsCount(logs, {'event_name=app_followup_sent': 4})
|
3,741 | f0630d248cfa575ee859e5c441deeb01b68c8150 | # import sys
# class PriorityQueue:
# """Array-based priority queue implementation."""
#
# def __init__(self):
# """Initially empty priority queue."""
# self.queue = []
# self.min_index = None
# self.heap_size = 0
#
# def __len__(self):
# # Number of elements in the queue.
# return len(self.queue)
#
# def left(self, i):
# return 2 * i
#
# def right(self, i):
# return 2 * i + 1
#
# def parent(self, i):
# return i // 2
#
# def min_heapify(self, i):
# l = self.left(i)
# r = self.right(i)
# if l <= self.heap_size and self.queue[l-1] < self.queue[i-1]:
# least = l
# else:
# least = i
# if r <= self.heap_size and self.queue[r-1] < self.queue[i-1]:
# least = r
# if least != i:
# temp = self.queue[i-1]
# self.queue[i-1] = self.queue[least-1]
# self.queue[least-1] = temp
# self.min_heapify(least)
#
# # def build_min_heap(self):
# # self.heap_size = len(self.queue)
# # for i in range(len(self.queue) // 2, -1, -1):
# # self.min_heapify(i)
#
# def heap_increase_key(self, i, key):
# if key > self.queue[i-1]:
# raise ValueError("new key is larger than current key")
# self.queue[i-1] = key
# while i > 1 and self.queue[self.parent(i)-1] > self.queue[i-1]:
# tmp = self.queue[self.parent(i)-1]
# self.queue[self.parent(i)-1] = self.queue[i-1]
# self.queue[i-1] = tmp
# i = self.parent(i)
#
# def append(self, key):
# """Inserts an element in the priority queue."""
# if key is None:
# raise ValueError('Cannot insert None in the queue')
# self.heap_size += 1
# self.queue.insert(self.heap_size-1, sys.maxsize)
# self.heap_increase_key(self.heap_size, key)
# self.min_index = None
#
# def min(self):
# """The smallest element in the queue."""
# if self.heap_size == 0:
# return None
# return self.queue[0]
#
# def pop(self):
# """Removes the minimum element in the queue.
#
# Returns:
# The value of the removed element.
# """
# if self.heap_size == 0:
# return None
# self._find_min()
# popped_key = self.queue.pop(self.min_index)
# self.heap_size -= 1
# print(self.queue, self.heap_size)
# if self.heap_size != 0:
# self.queue[0] = self.queue[self.heap_size-1]
# self.min_heapify(0)
# self.min_index = None
# return popped_key
#
# def _find_min(self):
# # Computes the index of the minimum element in the queue.
# #
# # This method may crash if called when the queue is empty.
# if self.min_index is not None:
# return
# self.min_index = 0
class PriorityQueue:
"""Heap-based priority queue implementation."""
def __init__(self):
"""Initially empty priority queue."""
self.heap = [None]
def __len__(self):
# Number of elements in the queue.
return len(self.heap) - 1
def append(self, key):
"""Inserts an element in the priority queue."""
if key is None:
raise ValueError('Cannot insert None in the queue')
i = len(self.heap)
self.heap.append(key)
while i > 1:
parent = i // 2
if key < self.heap[parent]:
self.heap[i], self.heap[parent] = self.heap[parent], key
i = parent
else:
break
def min(self):
"""Returns the smallest element in the queue."""
return self.heap[1]
def pop(self):
"""Removes the minimum element in the queue.
Returns:
The value of the removed element.
"""
heap = self.heap
popped_key = heap[1]
if len(heap) == 2:
return heap.pop()
heap[1] = key = heap.pop()
i = 1
while True:
left = i * 2
if len(heap) <= left:
break
left_key = heap[left]
right = i * 2 + 1
right_key = right < len(heap) and heap[right]
if right_key and right_key < left_key:
child_key = right_key
child = right
else:
child_key = left_key
child = left
if key <= child_key:
break
self.heap[i], self.heap[child] = child_key, key
i = child
return popped_key
A = PriorityQueue()
A.append(1)
A.append(4)
A.append(3)
print(A.heap)
A.append(2)
print(A.heap)
A.append(0)
print(A.heap)
A.append(7)
A.append(6)
A.append(5)
# print(A.pop())
# print(A.pop())
# print(A.pop())
# print(A.pop())
# print(A.pop())
# print(A.pop())
# print(A.pop())
# print(A.pop())
|
3,742 | f012f862ad064fc168bd5328b97c433164a3a36f | #from skimage import measure
#from svmutil import *
import cv2
import numpy as np
def inside(r, q):
rx, ry, rw, rh = r
qx, qy, qw, qh = q
return rx > qx and ry > qy and rx + rw < qx + qw and ry + rh < qy + qh
def draw_detections(img, rects, thickness = 1):
for x, y, w, h in rects:
# the HOG detector returns slightly larger rectangles than the real objects.
# so we slightly shrink the rectangles to get a nicer output.
pad_w, pad_h = int(0.15*w), int(0.05*h)
cv2.rectangle(img, (x+pad_w, y+pad_h), (x+w-pad_w, y+h-pad_h), (0, 255, 0), thickness)
if __name__ == '__main__':
hog = cv2.HOGDescriptor()
hog.setSVMDetector(cv2.HOGDescriptor_getDefaultPeopleDetector())
hogParams = {'winStride': (8, 8), 'padding': (32, 32), 'scale': 1.05}
cap = cv2.VideoCapture(0)
while(True):
ret, frame = cap.read()
if not ret:
break
found, w = hog.detectMultiScale(frame, **hogParams)
found_filtered = []
for ri, r in enumerate(found):
for qi, q in enumerate(found):
if ri != qi and inside(r, q):
break
else:
found_filtered.append(r)
#draw_detections(frame, found)
draw_detections(frame, found_filtered, 3)
print('%d (%d) found' % (len(found_filtered), len(found)))
key = cv2.waitKey(10)
if key == 27:
cv2.destroyAllWindows()
break
cv2.imshow('img', frame)
# if cv2.waitKey(1) & 0xFF == ord('q'):
# break
cap.release()
cv2.destroyAllWindows()
|
3,743 | 830ae4b6a6b2c4e1bbe6928b3a4b0be86d2ec7a3 | """
This module contains the class definitions for all types of BankAccount
alongside BankAccountCreator as a supporting class to create an
appropriate bank account for a given user type.
"""
from abc import ABC
from abc import abstractmethod
from transaction import Transaction
from budget import Budget
from budget import BudgetManager
from budget import BudgetCategory
from budget import BudgetCreator
from user import UserType
class BankAccount(ABC):
"""
An abstract base class that represents a bank account. By default,
all bank accounts have:
- a bank account number
- a bank name
- a bank balance
- a budget manager to manage budgets
- a list of transactions
- a locked state to determine whether this account is locked.
"""
def __init__(self, bank_account_no: str, bank_name: str,
bank_balance: float, budget_manager: BudgetManager):
"""
Initializes a bank account.
:param bank_account_no: a string
:param bank_name: a string
:param bank_balance: a float
:param budget_manager: a BudgetManager
"""
self.bank_account_no = bank_account_no
self.bank_name = bank_name
self.bank_balance = bank_balance
self.transactions = []
self.budget_manager = budget_manager
self._locked = False
def record_transaction(self, transaction: Transaction) -> bool:
"""
Records a transaction and returns True if this transaction is
recorded successfully. A transaction is recorded successfully
when this bank account is not locked, has enough balance, and
the budget associated with the transaction is not locked.
:param transaction: a Transaction, the transaction to record
:return: a bool, True if record successfully, False otherwise
"""
if self._locked:
print('Failed to record transaction! Your account has been locked!'
)
return False
if transaction.amount > self.bank_balance:
print('Failed to record transaction! Not enough balance!')
return False
budget = self.budget_manager.get_budget(transaction.budget_category)
if budget.locked:
print('Failed to record transaction! This budget has been locked!')
return False
self.transactions.append(transaction)
self.bank_balance -= transaction.amount
budget.amount_spent += transaction.amount
self._warn_and_lock_if_needed(transaction)
return True
@abstractmethod
def _warn_and_lock_if_needed(self, transaction: Transaction) -> None:
"""
Contains the logic to check if a warning or notification should
be issued to the user. It also locks a budget or this bank
account if needed. The exact algorithm would vary bank account
to bank account.
:param transaction: a Transaction, the newly recorded
transaction
:return: None
"""
pass
def print_transactions_for_review(self, budget: Budget) -> None:
"""
Prints a list of transactions in the given budget for review.
:param budget: a Budget
:return: None
"""
print(f'Please review the following transactions in the {budget.name} '
f'budget:')
transactions = self.get_transactions_by_budget(budget.category)
for transaction in transactions:
print(transaction)
def _warn_nearing_exceed_budget(self, budget: Budget,
exceeded_percent: int) -> None:
"""
Issues a warning to the user that they are about to exceed this
budget.
:param budget: a Budget, the budget that they are about to
exceed
:param exceeded_percent: an int, the percent that they have
already exceeded
:return: None
"""
print(f'[WARNING] You are about to exceed the {budget.name} budget! '
f'You went over {exceeded_percent}% of the total '
f'${budget.total_amount}.')
def _notify_exceeded_budget(self, budget: Budget) -> None:
"""
Notifies the user that they've just exceeded this budget.
:param budget: a Budget, the budget that they've just exceeded
:return: None
"""
print(f'[NOTIFICATION] You have exceeded the {budget.name} budget.')
def _lock_budget(self, budget: Budget) -> None:
"""
Locks a budget.
:param budget: a Budget, the budget to be locked
:return: None
"""
budget.lock()
print(f'Your {budget.name} budget has now been locked!')
def get_transactions_by_budget(self, category: BudgetCategory) -> list:
"""
Returns a list of transactions for the given budget category.
:param category: a BudgetCategory
:return: a list of Transaction, the transactions in that
category
"""
return [transaction
for transaction in self.transactions
if transaction.budget_category == category]
def get_budgets(self) -> list:
"""
Returns a list of budgets.
:return: a list of Budget objects
"""
return self.budget_manager.get_budgets()
def __str__(self):
transactions_info = ''
for transaction in self.transactions:
transactions_info += f'{transaction}\n'
if len(transactions_info) == 0:
transactions_info = "You haven't made any transaction yet.\n"
return f'*** Bank Account Details ***\n' \
f'• Bank account number: {self.bank_account_no}\n' \
f'• Bank name: {self.bank_name}\n' \
f'• Status: {"Locked" if self._locked else "Available"}\n' \
f'• Transactions:\n' \
f'{transactions_info}' \
f'• Closing balance: ${self.bank_balance}'
class AngelBankAccount(BankAccount):
"""
This bank account is designed for Angel users. The Angel user
represents a user who's parents are not worried at all.
"""
def _warn_and_lock_if_needed(self, transaction: Transaction) -> None:
"""
Issues a warning or locks budget/bank account when these
conditions are met:
- Never gets locked out of a budget category. They can continue
spending money even if they exceed the budget in question.
- Gets politely notified if they exceed a budget category.
- Gets a warning if they exceed more than 90% of a budget.
:param transaction: a Transaction, the newly recorded
transaction
:return: None
"""
budget = self.budget_manager.get_budget(transaction.budget_category)
exceeded_ratio = budget.exceeded_ratio
if exceeded_ratio > 1:
self._notify_exceeded_budget(budget)
self.print_transactions_for_review(budget)
elif exceeded_ratio > 0.9:
self._warn_nearing_exceed_budget(budget, 90)
self.print_transactions_for_review(budget)
class TroublemakerBankAccount(BankAccount):
"""
This bank account is designed for Troublemaker children. These
children often find themselves in trouble. These are usually minor
incidents and their parents are concerned but not worried.
"""
def _warn_and_lock_if_needed(self, transaction: Transaction) -> None:
"""
Issues a warning or locks budget/bank account when these
conditions are met:
- Gets a warning if they exceed more than 75% of a budget
category.
- Gets politely notified if they exceed a budget category.
- Gets locked out of conducting transactions in a budget
category if they exceed it by 120% of the amount assigned to the
budget in question.
:param transaction: a Transaction, the newly recorded
transaction
:return: None
"""
budget = self.budget_manager.get_budget(transaction.budget_category)
exceeded_ratio = budget.exceeded_ratio
if exceeded_ratio > 1.2:
self._lock_budget(budget)
self.print_transactions_for_review(budget)
elif exceeded_ratio > 1:
self._notify_exceeded_budget(budget)
self.print_transactions_for_review(budget)
elif exceeded_ratio > 0.75:
self._warn_nearing_exceed_budget(budget, 75)
self.print_transactions_for_review(budget)
class RebelBankAccount(BankAccount):
"""
This bank account is designed for Rebel children. The Rebel
represents a child who refuses to follow any rules and believes that
society should be broken down and restructured. Parents of these
children are quite worried about them.
"""
def _warn_and_lock_if_needed(self, transaction: Transaction) -> None:
"""
Issues a warning or locks budget/bank account when these
conditions are met:
- They get a warning for every transaction after exceeding 50%
of a budget.
- Gets ruthlessly notified if they exceed a budget category.
- Gets locked out of conducting transactions in a budget
category if they exceed it by 100% of the amount assigned to the
budget in question.
- If they exceed their budget in 2 or more categories then they
get locked out of their account completely.
:param transaction: a Transaction, the newly recorded
transaction
:return: None
"""
budget = self.budget_manager.get_budget(transaction.budget_category)
exceeded_ratio = budget.exceeded_ratio
if exceeded_ratio > 1:
self._notify_exceeded_budget(budget)
self._lock_budget(budget)
self.print_transactions_for_review(budget)
if self.budget_manager.no_locked_budgets >= 2:
self._locked = True
print('YOUR BANK ACCOUNT HAS BEEN LOCKED!')
elif exceeded_ratio > 0.5:
self._warn_nearing_exceed_budget(budget, 50)
self.print_transactions_for_review(budget)
class BankAccountCreator:
"""
An utility class that helps create a BankAccount.
"""
_user_type_mapper = {
UserType.ANGEL: AngelBankAccount,
UserType.TROUBLEMAKER: TroublemakerBankAccount,
UserType.REBEL: RebelBankAccount,
}
"""
A dictionary that maps a UserType enum to an appropriate BankAccount
class.
"""
@staticmethod
def load_test_account() -> BankAccount:
"""
Creates and returns a test bank account.
:return: a BankAccount
"""
budget_manager = BudgetCreator.load_test_budget_manager()
return TroublemakerBankAccount('123123', 'HSBC', 1000, budget_manager)
@classmethod
def create_bank_account(cls, user_type: UserType) -> BankAccount:
"""
Prompts the user for bank account details, initializes a Bank
Account based on the given user type and returns it.
:param user_type: a UserType
:return: a BankAccount
"""
bank_account_no = input('Enter bank account number: ')
bank_name = input('Enter bank name: ')
bank_balance = -1
while bank_balance < 0:
bank_balance = float(input('Enter bank balance: '))
if bank_balance < 0:
print('Bank balance must be greater than or equal to 0! Please'
' enter again!')
budget_manager = BudgetCreator.create_budget_manager()
return cls._user_type_mapper[user_type](
bank_account_no,
bank_name,
bank_balance,
budget_manager,
)
|
3,744 | 5d9afef2a748782659b82b329ea08d5815162cbc | # 作者:西岛闲鱼
# https://github.com/globien/easy-python
# https://gitee.com/globien/easy-python
# 用蒙特卡洛法计算圆周率,即,往一个正方形里扔豆子,计算有多少比例的豆子扔在了该正方形的内切圆中
import random
num_all = 0 #随机点总计数器
num_cir = 0 #随机点在圆内的计数器
num_halt = 10000000 #每产生这么多个随机点后,计算并打印一次目前的结果
print("将进行无限计算,请用Ctrl_C或其他方式强制退出!!!")
input("按回车(Enter)键开始...")
print("开始计算...,退出请用Ctrl_C或其他强制退出方式...")
print("\n实验次数 计算结果")
while 1 :
for i in range(num_halt):
x = random.random() #获得随机点的横坐标
y = random.random() #获得随机点的纵坐标
if x*x + y*y < 1 : #随机点(x,y)在圆内
num_cir = num_cir + 1 #圆内计数器+1
num_all = num_all + 1 #总计数器+1
pi = 4*num_cir/num_all
print(num_all," ", pi)
|
3,745 | 66c71111eae27f6e9fee84eef05cc1f44cc5a477 | from setuptools import setup
from Cython.Build import cythonize
setup(
ext_modules=cythonize("utils.pyx"),
) |
3,746 | 7399612f64eb8e500bc676e6d507be5fe375f40f | #!/usr/bin/env python
import vtk
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# Create the RenderWindow, Renderer and both Actors
#
ren1 = vtk.vtkRenderer()
ren2 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.SetMultiSamples(0)
renWin.AddRenderer(ren1)
renWin.AddRenderer(ren2)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# create pipeline
#
pl3d = vtk.vtkMultiBlockPLOT3DReader()
pl3d.SetXYZFileName("" + str(VTK_DATA_ROOT) + "/Data/combxyz.bin")
pl3d.SetQFileName("" + str(VTK_DATA_ROOT) + "/Data/combq.bin")
pl3d.SetScalarFunctionNumber(110)
pl3d.SetVectorFunctionNumber(202)
pl3d.Update()
output = pl3d.GetOutput().GetBlock(0)
probeLine = vtk.vtkLineSource()
probeLine.SetPoint1(1,1,29)
probeLine.SetPoint2(16.5,5,31.7693)
probeLine.SetResolution(500)
probe = vtk.vtkProbeFilter()
probe.SetInputConnection(probeLine.GetOutputPort())
probe.SetSourceData(output)
probe.Update()
probeTube = vtk.vtkTubeFilter()
probeTube.SetInputData(probe.GetPolyDataOutput())
probeTube.SetNumberOfSides(5)
probeTube.SetRadius(.05)
probeMapper = vtk.vtkPolyDataMapper()
probeMapper.SetInputConnection(probeTube.GetOutputPort())
probeMapper.SetScalarRange(output.GetScalarRange())
probeActor = vtk.vtkActor()
probeActor.SetMapper(probeMapper)
displayLine = vtk.vtkLineSource()
displayLine.SetPoint1(0,0,0)
displayLine.SetPoint2(1,0,0)
displayLine.SetResolution(probeLine.GetResolution())
displayMerge = vtk.vtkMergeFilter()
displayMerge.SetGeometryConnection(displayLine.GetOutputPort())
displayMerge.SetScalarsData(probe.GetPolyDataOutput())
displayMerge.Update()
displayWarp = vtk.vtkWarpScalar()
displayWarp.SetInputData(displayMerge.GetPolyDataOutput())
displayWarp.SetNormal(0,1,0)
displayWarp.SetScaleFactor(.000001)
displayWarp.Update()
displayMapper = vtk.vtkPolyDataMapper()
displayMapper.SetInputData(displayWarp.GetPolyDataOutput())
displayMapper.SetScalarRange(output.GetScalarRange())
displayActor = vtk.vtkActor()
displayActor.SetMapper(displayMapper)
outline = vtk.vtkStructuredGridOutlineFilter()
outline.SetInputData(output)
outlineMapper = vtk.vtkPolyDataMapper()
outlineMapper.SetInputConnection(outline.GetOutputPort())
outlineActor = vtk.vtkActor()
outlineActor.SetMapper(outlineMapper)
outlineActor.GetProperty().SetColor(0,0,0)
ren1.AddActor(outlineActor)
ren1.AddActor(probeActor)
ren1.SetBackground(1,1,1)
ren1.SetViewport(0,.25,1,1)
ren2.AddActor(displayActor)
ren2.SetBackground(0,0,0)
ren2.SetViewport(0,0,1,.25)
renWin.SetSize(300,300)
ren1.ResetCamera()
cam1 = ren1.GetActiveCamera()
cam1.SetClippingRange(3.95297,50)
cam1.SetFocalPoint(8.88908,0.595038,29.3342)
cam1.SetPosition(9.9,-26,41)
cam1.SetViewUp(0.060772,-0.319905,0.945498)
ren2.ResetCamera()
cam2 = ren2.GetActiveCamera()
cam2.ParallelProjectionOn()
cam2.SetParallelScale(.15)
iren.Initialize()
# render the image
#
# prevent the tk window from showing up then start the event loop
# --- end of script --
|
3,747 | 8114d8162bab625854804d1df2b4a9c11818d35e | def somaSerie(valor):
soma = 0
for i in range(valor):
soma += ((i**2)+1)/(i+3)
return soma
a = int(input("Digite o 1º Numero :-> "))
result = somaSerie(a)
print(result) |
3,748 | 6b0081e829f9252e44fa7b81fbfcdd4115856373 | import sys
n = int(sys.stdin.readline().rstrip())
l = list(map(int,sys.stdin.readline().rstrip().split()))
m = int(sys.stdin.readline().rstrip())
v = list(map(int,sys.stdin.readline().rstrip().split()))
card = [0] * (max(l)-min(l)+1)
a = min(l)
b = max(l)
for i in l:
card[i-a]+=1
for j in v:
if ((j>=a)&(j<=b)):
print(card[j-a],end = " ")
else:
print(0, end = " ") |
3,749 | e3119979028d3dd4e1061563db4ec20607e744d1 | #! /usr/bin/python
from bs4 import BeautifulSoup
import requests
import sys
def exit(err):
print err
sys.exit(0)
def get_text(node, lower = True):
if lower:
return (''.join(node.findAll(text = True))).strip().lower()
return (''.join(node.findAll(text = True))).strip()
def get_method_signature(tag):
gold = 'Method signature:'.lower()
return tag.name == "td" and get_text(tag) == gold
def get_returns(tag):
gold = 'Returns:'.lower()
return tag.name == "pre" and gold in get_text(tag)
def main():
if len(sys.argv) != 3:
exit("Usage: %s <srm_number> <class_name>" % sys.argv[0])
srm = sys.argv[1].strip().lower()
class_name = sys.argv[2].strip().lower()
domain = "http://community.topcoder.com"
search_url = "%(domain)s/tc?module=ProblemArchive&class=%(class_name)s"
data = requests.get(search_url % locals()).text
# f = open('/tmp/data.html', 'w')
# f.write(data)
# f.close()
# data = open('/tmp/data.html', 'r')
soup = BeautifulSoup(data)
result_table = None
result_table_string = 'Challenge'
tables = soup.findAll('table')
tables.reverse()
for table in tables:
if result_table_string.lower() in get_text(table):
result_table = table
break
else:
exit("no problem found, please check class name")
result_row = None
actual_class_name = None
for row in result_table.findAll('tr'):
cells = row.findAll('td')
if len(cells) < 3:
continue
if get_text(cells[1]) == class_name and srm in get_text(cells[2]):
actual_class_name = get_text(cells[1], lower = False)
result_row = row
break
else:
exit("no problem found, please check class name and SRM number")
problem_url = "%s%s" % (domain, cells[1].find('a').get('href'))
data = requests.get(problem_url).text
# f = open('/tmp/problem.html', 'w')
# f.write(data)
# f.close()
#data = open('/tmp/problem.html', 'r')
soup = BeautifulSoup(data)
try:
method_signature_text = soup.findAll(get_method_signature)[-1]
method_signature = method_signature_text.nextSibling.string
returns_tr = method_signature_text.parent.previousSibling
return_type = returns_tr.findAll('td')[1].string.strip()
parameters_tr = returns_tr.previousSibling
parameters = parameters_tr.findAll('td')[1].string.split(",")
method_tr = parameters_tr.previousSibling
method_name = method_tr.findAll('td')[1].string.strip()
test_cases = soup.findAll(get_returns)
expected_return_values = []
inputs = []
for i in range(len(test_cases)):
inputs.append([])
for i, test_case in enumerate(test_cases):
expected_return_values.append(test_case.string.strip().split(": ")[1])
input_values = test_case.parent.parent.previousSibling.findAll('pre')
for input_value in input_values:
inputs[i].append(input_value.string.strip())
except:
raise
exit("error getting method signature, no luck")
# inject test cases into template
spaces = " "
input_test_case = "%(parameter)s var_%(index_1)d_%(index_2)d = %(value)s;\n"
invoke_method = "%(return_type)s expected_%(index_1)d = %(lower_actual_class_name)s.%(method_name)s(%(method_params)s);\n"
if return_type == "String":
compare_outputs = "System.out.println((expected_%(index_1)d.equals(%(expected_value)s) ? \"Passed\" : \"Failed\") + \" for case %(index_1)d\");"
else:
compare_outputs = "System.out.println(((expected_%(index_1)d == %(expected_value)s) ? \"Passed\" : \"Failed\") + \" for case %(index_1)d\");"
compare_outputs += "\n"
lower_actual_class_name = actual_class_name.lower()
test_case_str = ""
for index_1, input_case in enumerate(inputs):
# declare the inputs
method_params_list = []
for index_2, parameter in enumerate(parameters):
value = input_case[index_2]
test_case_str += spaces
test_case_str += input_test_case % locals()
method_params_list.append("var_%(index_1)d_%(index_2)d" % locals())
# invoke the function
method_params = ','.join(method_params_list)
test_case_str += spaces
test_case_str += invoke_method % locals()
# compare the output
expected_value = expected_return_values[index_1]
test_case_str += spaces
test_case_str += compare_outputs % locals()
# inject everything else into final template
template = open('template.java', 'r').read()
fp = open('%(actual_class_name)s.java' % locals(), 'w')
fp.write(template % locals())
fp.close()
print "done :) generated %(actual_class_name)s.java" % locals()
if __name__ == "__main__":
main() |
3,750 | a4ccf373695b7df60039bc8f6440a6ad43d265c1 | # coding: utf-8
"""
MailSlurp API
MailSlurp is an API for sending and receiving emails from dynamically allocated email addresses. It's designed for developers and QA teams to test applications, process inbound emails, send templated notifications, attachments, and more. ## Resources - [Homepage](https://www.mailslurp.com) - Get an [API KEY](https://app.mailslurp.com/sign-up/) - Generated [SDK Clients](https://docs.mailslurp.com/) - [Examples](https://github.com/mailslurp/examples) repository # noqa: E501
The version of the OpenAPI document: 6.5.2
Contact: contact@mailslurp.dev
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from mailslurp_client.api_client import ApiClient
from mailslurp_client.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class FormControllerApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def submit_form(self, **kwargs): # noqa: E501
"""Submit a form to be parsed and sent as an email to an address determined by the form fields # noqa: E501
This endpoint allows you to submit HTML forms and receive the field values and files via email. #### Parameters The endpoint looks for special meta parameters in the form fields OR in the URL request parameters. The meta parameters can be used to specify the behaviour of the email. You must provide at-least a `_to` email address to tell the endpoint where the form should be emailed. These can be submitted as hidden HTML input fields with the corresponding `name` attributes or as URL query parameters such as `?_to=test@example.com` The endpoint takes all other form fields that are named and includes them in the message body of the email. Files are sent as attachments. #### Submitting This endpoint accepts form submission via POST method. It accepts `application/x-www-form-urlencoded`, and `multipart/form-data` content-types. #### HTML Example ```html <form action=\"https://python.api.mailslurp.com/forms\" method=\"post\" > <input name=\"_to\" type=\"hidden\" value=\"test@example.com\"/> <textarea name=\"feedback\"></textarea> <button type=\"submit\">Submit</button> </form> ``` #### URL Example ```html <form action=\"https://python.api.mailslurp.com/forms?_to=test@example.com\" method=\"post\" > <textarea name=\"feedback\"></textarea> <button type=\"submit\">Submit</button> </form> ``` The email address is specified by a `_to` field OR is extracted from an email alias specified by a `_toAlias` field (see the alias controller for more information). Endpoint accepts . You can specify a content type in HTML forms using the `enctype` attribute, for instance: `<form enctype=\"multipart/form-data\">`. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.submit_form(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str to: The email address that submitted form should be sent to.
:param str subject: Optional subject of the email that will be sent.
:param str redirect_to: Optional URL to redirect form submitter to after submission. If not present user will see a success message.
:param str email_address: Email address of the submitting user. Include this if you wish to record the submitters email address and reply to it later.
:param str success_message: Optional success message to display if no _redirectTo present.
:param str spam_check: Optional but recommended field that catches spammers out. Include as a hidden form field but LEAVE EMPTY. Spam-bots will usually fill every field. If the _spamCheck field is filled the form submission will be ignored.
:param str other_parameters: All other parameters or fields will be accepted and attached to the sent email. This includes files and any HTML form field with a name. These fields will become the body of the email that is sent.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.submit_form_with_http_info(**kwargs) # noqa: E501
def submit_form_with_http_info(self, **kwargs): # noqa: E501
"""Submit a form to be parsed and sent as an email to an address determined by the form fields # noqa: E501
This endpoint allows you to submit HTML forms and receive the field values and files via email. #### Parameters The endpoint looks for special meta parameters in the form fields OR in the URL request parameters. The meta parameters can be used to specify the behaviour of the email. You must provide at-least a `_to` email address to tell the endpoint where the form should be emailed. These can be submitted as hidden HTML input fields with the corresponding `name` attributes or as URL query parameters such as `?_to=test@example.com` The endpoint takes all other form fields that are named and includes them in the message body of the email. Files are sent as attachments. #### Submitting This endpoint accepts form submission via POST method. It accepts `application/x-www-form-urlencoded`, and `multipart/form-data` content-types. #### HTML Example ```html <form action=\"https://python.api.mailslurp.com/forms\" method=\"post\" > <input name=\"_to\" type=\"hidden\" value=\"test@example.com\"/> <textarea name=\"feedback\"></textarea> <button type=\"submit\">Submit</button> </form> ``` #### URL Example ```html <form action=\"https://python.api.mailslurp.com/forms?_to=test@example.com\" method=\"post\" > <textarea name=\"feedback\"></textarea> <button type=\"submit\">Submit</button> </form> ``` The email address is specified by a `_to` field OR is extracted from an email alias specified by a `_toAlias` field (see the alias controller for more information). Endpoint accepts . You can specify a content type in HTML forms using the `enctype` attribute, for instance: `<form enctype=\"multipart/form-data\">`. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.submit_form_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str to: The email address that submitted form should be sent to.
:param str subject: Optional subject of the email that will be sent.
:param str redirect_to: Optional URL to redirect form submitter to after submission. If not present user will see a success message.
:param str email_address: Email address of the submitting user. Include this if you wish to record the submitters email address and reply to it later.
:param str success_message: Optional success message to display if no _redirectTo present.
:param str spam_check: Optional but recommended field that catches spammers out. Include as a hidden form field but LEAVE EMPTY. Spam-bots will usually fill every field. If the _spamCheck field is filled the form submission will be ignored.
:param str other_parameters: All other parameters or fields will be accepted and attached to the sent email. This includes files and any HTML form field with a name. These fields will become the body of the email that is sent.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(str, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'to',
'subject',
'redirect_to',
'email_address',
'success_message',
'spam_check',
'other_parameters'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method submit_form" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'to' in local_var_params and local_var_params['to'] is not None: # noqa: E501
query_params.append(('_to', local_var_params['to'])) # noqa: E501
if 'subject' in local_var_params and local_var_params['subject'] is not None: # noqa: E501
query_params.append(('_subject', local_var_params['subject'])) # noqa: E501
if 'redirect_to' in local_var_params and local_var_params['redirect_to'] is not None: # noqa: E501
query_params.append(('_redirectTo', local_var_params['redirect_to'])) # noqa: E501
if 'email_address' in local_var_params and local_var_params['email_address'] is not None: # noqa: E501
query_params.append(('_emailAddress', local_var_params['email_address'])) # noqa: E501
if 'success_message' in local_var_params and local_var_params['success_message'] is not None: # noqa: E501
query_params.append(('_successMessage', local_var_params['success_message'])) # noqa: E501
if 'spam_check' in local_var_params and local_var_params['spam_check'] is not None: # noqa: E501
query_params.append(('_spamCheck', local_var_params['spam_check'])) # noqa: E501
if 'other_parameters' in local_var_params and local_var_params['other_parameters'] is not None: # noqa: E501
query_params.append(('otherParameters', local_var_params['other_parameters'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['*/*']) # noqa: E501
# Authentication setting
auth_settings = ['API_KEY'] # noqa: E501
return self.api_client.call_api(
'/forms', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
|
3,751 | 559bd0c1821f405d21cdacba55f129ee5220bb5d | import os
from django.conf import settings
from chamber.importers import BulkCSVImporter, CSVImporter
from .models import CSVRecord
class BulkCSVRecordImporter(BulkCSVImporter):
model_class = CSVRecord
fields = ('id', 'name', 'number')
csv_path = os.path.join(settings.PROJECT_DIR, 'data', 'all_fields_filled.csv')
def clean_number(self, value):
# Just to test clean methods are called
return 888
class CSVRecordImporter(CSVImporter):
model_class = CSVRecord
fields = ('id', 'name', 'number')
csv_path = os.path.join(settings.PROJECT_DIR, 'data', 'all_fields_filled.csv')
def clean_number(self, value):
# Just to test clean methods are called
return 888
|
3,752 | 17ba6aaa9009c258136b184ca6a8660cec1cfe40 | from robot.libraries.BuiltIn import BuiltIn
from RoboGalaxyLibrary.utilitylib import logging as logger
import re
def block_no_keyword_warn():
pass
class Compare_hpMCTP(object):
def __init__(self):
self.fusionlib = BuiltIn().get_library_instance('FusionLibrary')
def do(self, expect, actual, verbose=False):
def smart_compare(exp, act):
# Remove leading whitespaces
exp = (re.sub(r'^\s*', '', exp))
act = (re.sub(r'^\s*', '', act))
if verbose:
logger._log_to_console_and_log_file("expected after removing leading white space: %s" % exp)
logger._log_to_console_and_log_file("actual after removing leading white space: %s" % act)
missing = [e for e in exp if (e not in act) and (e is not '')]
extra = [a for a in act if (a not in exp)]
rc = 1 # True (good, until proven otherwise)
if extra:
logger._log_to_console_and_log_file("extra item found: %s" % extra)
rc = 0
else:
logger._log_to_console_and_log_file("No Extra found.")
if missing:
logger._log_to_console_and_log_file("missing item: %s" % missing)
rc = 0
else:
logger._log_to_console_and_log_file("No Missing found.")
return rc
# Need to delete some items.
actual = re.sub(r'\n\r', '\n', actual)
# get rid of the stuff from actual up to the first header. Extra info not compared.
# for example, the first three lines below.
# hpMCTP 2.3.0-4
# Copyright (c) 2015-2016 Hewlett-Packard - All Rights Reserved
# -------------------------------------------------------------
# <ISCSI-Boot-Cats>
headerEnd = actual.index('<ISCSI-Boot-Cats>')
actual = '\n' + actual[headerEnd:]
if verbose:
logger._log_to_console_and_log_file("Actual now: %s" % actual)
logger._log_to_console_and_log_file("Expect now: %s" % expect)
# Start comparing the expected vs the actual
# if as a string they match, then no need to do a smart compare
if expect == actual:
return logger._log_to_console_and_log_file("expect == actual. String equal, no further compare needed.")
else:
logger._log_to_console_and_log_file("expect != actual, will do smart compare")
# split into single lines.
eList = expect.split('\n')
aList = actual.split('\n')
logger._log_to_console_and_log_file("Split on: %s into %s sections" % ('\n', len(eList) - 1))
if len(aList) != len(eList):
errMsg = "aList and eList counts diff. Problem with split. a: %s, e: %s" % (len(aList) - 1, len(eList) - 1)
logger._log_to_console_and_log_file(errMsg)
raise AssertionError(errMsg)
for i in xrange(1, len(eList)):
if eList[i] == aList[i]:
logger._log_to_console_and_log_file("Sections %s are equal." % i)
if verbose:
logger._log_to_console_and_log_file("expect: %s" % eList[i])
logger._log_to_console_and_log_file("actual: %s" % aList[i])
else:
logger._log_to_console_and_log_file("Section %s requires a smart compare." % i)
if verbose:
logger._log_to_console_and_log_file("expect: %s" % eList[i])
logger._log_to_console_and_log_file("actual: %s" % aList[i])
if not smart_compare(eList[i], aList[i]):
errMsg = "Expected: '%s' does not match '%s'" % (eList[i], aList[i])
logger._log_to_console_and_log_file(errMsg)
raise AssertionError(errMsg)
|
3,753 | e08b7a96c957895068e584a0564f02c52acd48ec | from django.apps import AppConfig
class AdminrequestsConfig(AppConfig):
name = 'adminRequests'
|
3,754 | 1b7b94a0331e2462f83f4f77bcfaefbeefdf24f4 | # -*- coding: utf-8 -*-
"""
Created on Mon Jul 29 20:33:32 2013
@author: ste
"""
#Convert input file for graph from adjacency list version, where each line is
#vertex adjacent adjacent adjacent ...
#to edge representation where each line is
#tail head
edges=[]
with open("/Users/ste/Desktop/Ste/Python/AlgorithmsCourse/KargerMinCut.txt") as v_list_file:
for line in v_list_file:
node=map(int, line.split())
for adjacent in node[1:]:
edges.append([node[0], adjacent])
with open("/Users/ste/Desktop/Ste/C++/Programs/AlgorithmCourse/GraphSearch/KargerMinCut(edges).txt", "w+") as outfile:
for edge in edges:
outfile.write(str(edge[0])+' '+str(edge[1])+'\n')
|
3,755 | d00fa29c502cc0311c54deb657b37c3c3caac7ca | import pygame
import pygame.freetype
import sys
import sqlite3
from data.player_class import Player
from data.explosion_class import Explosion
from data.objects_class import Bullets, Damage
from data.enemy_class import Enemy
from data.enemy_class import Boss
from data.death_animation import Smallexplosions
from data.explosion_class import Miniexplosion
from data.objects_class import Bossbullets
import random
def draw_text(text, font_u, color, surface, x, y):
text_object = font_u.render(text, color)
textrect = text_object[1]
textrect.topleft = (x, y)
surface.blit(text_object[0], textrect)
def play_sound(sound_p, volume_h=0.5, wait_t=0):
pl_sound = pygame.mixer.Sound(sound_p)
pl_sound.set_volume(volume_h)
if is_sound:
pl_sound.play()
pygame.time.wait(wait_t)
pygame.init()
speed_bckgd = 2
running_game = True
is_sound = True
menu = True
boss_done = False
game_score = 0
bullets_shot = 0
line_counter = 0
enemy_killed = 0
speed = 2
FPS = 100
width = 600
height = 800
player_name = ''
con = sqlite3.connect('resources/db/leaderboard.db')
font = pygame.freetype.Font('resources/sprites/font_main.ttf', 45)
font_table = pygame.freetype.Font('resources/sprites/font_main.ttf', 25)
font_space = pygame.freetype.Font('resources/sprites/space.ttf', 20)
font_rating = pygame.freetype.Font('resources/sprites/font_main.ttf', 150)
pygame.display.set_icon(pygame.image.load('resources/images/test_small_logo_1.bmp'))
pygame.display.set_caption('Death or Dishonour')
screen = pygame.display.set_mode((600, 800))
clock = pygame.time.Clock()
cur = con.cursor()
def draw_controls():
pygame.draw.rect(screen, (255, 255, 255), (0, 420, 600, 380), 4)
pygame.draw.rect(screen, (0, 0, 0, 1), (3, 422, 595, 376))
draw_text('controls:', font, (255, 255, 255), screen, 20, 430)
wasd = pygame.image.load('resources/sprites/controls_1.png')
wasd = pygame.transform.scale(wasd, (243, 100))
screen.blit(wasd, (20, 470))
pygame.draw.rect(screen, (255, 255, 255), (20, 646, 130, 25))
draw_text('SPACE', font_space, (0, 0, 0), screen, 50, 651)
draw_text(' - movement', font, (255, 255, 255), screen, 270, 522)
mouse = pygame.image.load('resources/sprites/controls_2.png')
mouse = pygame.transform.scale(mouse, (90, 100))
screen.blit(mouse, (153, 590))
draw_text(' - shoot', font, (255, 255, 255), screen, 270, 640)
def draw_leaderboard():
table = []
result = cur.execute("""SELECT * FROM highest_score ORDER BY score DESC LIMIT 7""")
for elem in result:
table.append(elem)
pygame.draw.rect(screen, (0, 0, 0), (310, 70, 250, 335))
pygame.draw.rect(screen, (255, 255, 255), (310, 70, 250, 335), 3)
pygame.draw.line(screen, (255, 255, 255), (310, 124), (560, 124), 3)
pygame.draw.line(screen, (255, 255, 255), (435, 124), (435, 405), 3)
charge = 40
y = 124
for i in range(1, 8):
y += charge
pygame.draw.line(screen, (255, 255, 255), (310, y), (560, y), 3)
draw_text('leaderboard', font_table, (255, 255, 255), screen, 362, 80)
x = 350
y = 140
for i in table:
draw_text(str(i[0]), font_table, (255, 255, 255), screen, x, y)
draw_text(str(i[1]), font_table, (255, 255, 255), screen, x + 100, y)
y += charge
def main_menu():
click = False
pygame.mixer.stop()
while True:
mx, my = pygame.mouse.get_pos()
screen.fill((0, 0, 0))
# ------------------------------------------ name zone draw
pygame.draw.rect(screen, (0, 0, 0), (52, 10, 508, 50))
pygame.draw.rect(screen, (255, 255, 255), (52, 10, 508, 50), 3)
draw_text('Death or Dishonour', font, (255, 255, 255), screen, 85, 20)
# ------------------------------------------ play button
button_play = pygame.image.load('resources/sprites/button.png')
button_play = pygame.transform.scale(button_play, (222, 105))
b_play_mask = button_play.get_rect()
b_play_mask.x = 50
b_play_mask.y = 70
screen.blit(button_play, (b_play_mask.x, b_play_mask.y))
draw_text('play', font, (255, 255, 255), screen, 113, 100)
# ------------------------------------------ options button
button_options = pygame.image.load('resources/sprites/button.png')
button_options = pygame.transform.scale(button_options, (222, 105))
b_options_mask = button_options.get_rect()
b_options_mask.x = 50
b_options_mask.y = 185
screen.blit(button_options, (b_options_mask.x, b_options_mask.y))
draw_text('options', font, (255, 255, 255), screen, 78, 215)
# ------------------------------------------ quit button
button_exit = pygame.image.load('resources/sprites/button.png')
button_exit = pygame.transform.scale(button_exit, (222, 105))
b_exit_mask = button_exit.get_rect()
b_exit_mask.x = 50
b_exit_mask.y = 300
screen.blit(button_exit, (b_exit_mask.x, b_exit_mask.y))
draw_text('quit', font, (255, 255, 255), screen, 113, 330)
# ------------------------------------------ draw
draw_controls()
draw_leaderboard()
# ------------------------------------------ collide
if b_play_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 70, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
game_screen()
if b_options_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 185, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
options_menu()
if b_exit_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 300, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
pygame.quit()
sys.exit()
# ------------------------------------------ events
click = False
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
pygame.quit()
sys.exit()
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
# ------------------------------------------ update
pygame.display.update()
clock.tick(10)
def options_menu():
global player_name, line_counter, is_sound
running = True
click = False
numlock = False
while running:
mx, my = pygame.mouse.get_pos()
screen.fill((0, 0, 0))
# ------------------------------------------ name zone draw
pygame.draw.rect(screen, (0, 0, 0), (52, 10, 508, 50))
pygame.draw.rect(screen, (255, 255, 255), (52, 10, 508, 50), 3)
draw_text('Options', font, (255, 255, 255), screen, 215, 20)
# ------------------------------------------ button nick
button_1 = pygame.image.load('resources/sprites/button.png')
button_1 = pygame.transform.scale(button_1, (222, 105))
b_1_mask = button_1.get_rect()
b_1_mask.x = 50
b_1_mask.y = 70
screen.blit(button_1, (b_1_mask.x, b_1_mask.y))
draw_text(player_name, font, (255, 255, 255), screen, 125, 100)
# ------------------------------------------ button sound
button_2 = pygame.image.load('resources/sprites/button.png')
button_2 = pygame.transform.scale(button_2, (222, 105))
b_2_mask = button_2.get_rect()
b_2_mask.x = 50
b_2_mask.y = 185
screen.blit(button_2, (b_2_mask.x, b_2_mask.y))
# ------------------------------------------ button back
button_back = pygame.image.load('resources/sprites/button.png')
button_back = pygame.transform.scale(button_back, (222, 105))
b_back_mask = button_back.get_rect()
b_back_mask.x = 50
b_back_mask.y = 300
screen.blit(button_back, (b_back_mask.x, b_back_mask.y))
draw_text('back', font, (255, 255, 255), screen, 113, 330)
# ------------------------------------------ draw
draw_controls()
draw_text('audio:', font, (255, 255, 255), screen, 60, 195)
if is_sound:
draw_text('on', font, (255, 255, 255), screen, 190, 245)
else:
draw_text('off', font, (255, 255, 255), screen, 175, 230)
if line_counter == 0 or player_name == 'NON':
draw_text('ENTER', font, (255, 0, 0), screen, 280, 90)
draw_text('NICKNAME', font, (255, 0, 0), screen, 280, 120)
if numlock:
draw_text('OFF', font, (255, 0, 0), screen, 500, 90)
draw_text('NUM', font, (255, 0, 0), screen, 500, 120)
draw_text('LOCK', font, (255, 0, 0), screen, 500, 150)
# ------------------------------------------ collide
if b_2_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 185, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
if is_sound:
is_sound = not is_sound
pygame.mixer.pause()
else:
is_sound = not is_sound
pygame.mixer.unpause()
if b_back_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (50, 300, 222, 105), 5)
if click:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
running = False
# ------------------------------------------ events
click = False
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_BACKSPACE:
player_name = player_name[:-1]
if line_counter != 0:
line_counter -= 1
elif player_name == 'NON':
pass
elif event.key == pygame.K_SPACE:
pass
elif event.key == pygame.K_UP:
pass
elif event.key == pygame.K_DOWN:
pass
elif event.key == pygame.K_LEFT:
pass
elif event.key == pygame.K_RIGHT:
pass
elif event.key == pygame.K_RETURN:
pass
elif event.key == pygame.K_NUMLOCK:
numlock = True
elif event.key == pygame.K_ESCAPE:
running = False
elif event.mod == pygame.KMOD_NONE and event.key != pygame.K_TAB:
if line_counter != 3:
line_counter += 1
player_name += str(event.unicode).upper()
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
# ------------------------------------------ update
pygame.display.update()
clock.tick(10)
def game_screen():
global game_score, player_name, running_game, enemy_killed, bullets_shot, boss_done
game_score = 0
enemy_killed = 0
bullets_shot = 0
boss_done = False
if player_name == '':
player_name = 'NON'
track_count = 0
battle_tracks = ['resources/sounds/music/battle_music_1.mp3', 'resources/sounds/music/battle_music_2.mp3',
'resources/sounds/music/battle_music_3.mp3', 'resources/sounds/music/battle_music_4.mp3',
'resources/sounds/music/battle_music_5.mp3', 'resources/sounds/music/battle_music_6.mp3']
ingame_music = pygame.mixer.Sound(battle_tracks[track_count])
ingame_music.stop()
ingame_music_sound = 0.1
if not is_sound:
ingame_music_sound = 0
ingame_music.set_volume(ingame_music_sound)
ingame_music.play()
bs = False
running_game = True
pygame.time.set_timer(pygame.USEREVENT, 1000)
enemies = pygame.sprite.Group()
death = False
p = Player()
window_holes = pygame.sprite.Group()
bullets_count = pygame.sprite.Group()
boss_bullets_count = pygame.sprite.Group()
booms = pygame.sprite.Group()
small_booms = pygame.sprite.Group()
mini_booms = pygame.sprite.Group()
phase1_score = True
phase2_score = True
phase3_score = True
battle_music = True
phase4_score = True
col_check = 1
boss_death = False
level_bckgd_pos = -23800
current_player_sprite = 'stay'
current_level_background = pygame.image.load('resources/level_pictures/first_level_bckgd.jpg')
screen.blit(current_level_background, (0, 0))
wait = 0
last = pygame.time.get_ticks()
last_2 = pygame.time.get_ticks()
boss_cooldown = 1000
cooldown = 100
while running_game:
# ---------------------------------------- управление
for event in pygame.event.get(): # в этом цикле мы принимаем сообщения, отправленные пользователем
if event.type == pygame.KEYDOWN and event.key == pygame.K_EQUALS:
ingame_music.stop()
track_count += 1
if track_count > 5:
track_count = 0
ingame_music = pygame.mixer.Sound(battle_tracks[track_count])
ingame_music.set_volume(ingame_music_sound)
ingame_music.play()
if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_PLUS:
ingame_music_sound += 0.05
if ingame_music_sound >= 1.5:
ingame_music_sound = 1.4
ingame_music.set_volume(ingame_music_sound)
if event.type == pygame.KEYDOWN and event.key == pygame.K_KP_MINUS:
ingame_music_sound -= 0.05
if ingame_music_sound < 0:
ingame_music_sound = 0
ingame_music.set_volume(ingame_music_sound)
if event.type == pygame.KEYDOWN and (
event.key == pygame.K_a or event.key == pygame.K_LEFT) and not p.moving_right:
current_player_sprite = 'left'
p.moving_right = False
p.moving_left = True
elif event.type == pygame.KEYDOWN and (
event.key == pygame.K_d or event.key == pygame.K_RIGHT) and not p.moving_left:
current_player_sprite = 'right'
p.moving_left = False
p.moving_right = True
if event.type == pygame.KEYUP and (event.key == pygame.K_a or event.key == pygame.K_LEFT):
current_player_sprite = 'stay'
p.moving_right = False
p.moving_left = False
if event.type == pygame.KEYUP and (event.key == pygame.K_d or event.key == pygame.K_RIGHT):
current_player_sprite = 'stay'
p.moving_right = False
p.moving_left = False
if event.type == pygame.KEYDOWN and (
event.key == pygame.K_w or event.key == pygame.K_UP) and not p.moving_down:
p.moving_down = False
p.moving_up = True
elif event.type == pygame.KEYDOWN and (
event.key == pygame.K_s or event.key == pygame.K_DOWN) and not p.moving_up:
p.moving_up = False
p.moving_down = True
if event.type == pygame.KEYUP and (event.key == pygame.K_w or event.key == pygame.K_UP):
current_player_sprite = 'stay'
p.moving_down = False
p.moving_up = False
if event.type == pygame.KEYUP and (event.key == pygame.K_s or event.key == pygame.K_DOWN):
current_player_sprite = 'stay'
p.moving_down = False
p.moving_up = False
# просчет выстрела
if event.type == pygame.MOUSEBUTTONDOWN and event.button == 1 and p.health_count > 0:
now = pygame.time.get_ticks()
if now - last >= cooldown:
last = now
Bullets(bullets_count).shot((p.x + 21, p.y - 25))
Bullets(bullets_count).shot((p.x + 76, p.y - 25))
if is_sound:
play_sound('resources/sounds/shot_sound.mp3', 0.1)
Bullets.shooting = True
bullets_shot += 2
# просчет выстрела, но для пробела
elif event.type == pygame.KEYDOWN and event.key == pygame.K_SPACE and p.health_count > 0:
now = pygame.time.get_ticks()
if now - last >= cooldown:
last = now
Bullets(bullets_count).shot((p.x + 21, p.y - 25))
Bullets(bullets_count).shot((p.x + 76, p.y - 25))
if is_sound:
play_sound('resources/sounds/shot_sound.mp3', 0.1)
Bullets.shooting = True
bullets_shot += 2
# спавн врагов
if event.type == pygame.USEREVENT and level_bckgd_pos >= -8500 and not bs:
bs = True
b = Boss()
if event.type == pygame.USEREVENT and level_bckgd_pos < -8500:
Enemy(enemies)
if event.type == pygame.USEREVENT and death and pygame.time.get_ticks()\
- wait > 2000 or level_bckgd_pos > -801:
ingame_music.stop()
death_screen()
while True:
if len(str(game_score)) < 6:
game_score = '0' + str(game_score)
else:
break
var = "INSERT INTO highest_score VALUES ('{}', '{}')".format(player_name, game_score)
cur.execute(var)
con.commit()
# если пользователь закроет программу, игра завершится
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
# выход в меню
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
pause_screen()
if not running_game:
ingame_music.stop()
while True:
if len(str(game_score)) < 6:
game_score = '0' + str(game_score)
else:
break
var = "INSERT INTO highest_score VALUES ('{}', '{}')".format(player_name, game_score)
cur.execute(var)
con.commit()
# передвижение заднего фона
level_bckgd_pos += speed_bckgd
if level_bckgd_pos >= 0:
screen.fill((0, 0, 0))
screen.blit(current_level_background, (0, level_bckgd_pos))
if level_bckgd_pos > -805:
death = True
# передвижение игрока
if p.health_count > 0:
# проверка коллизии врага, игрока и пули
for i in enemies:
collision = pygame.sprite.collide_rect(p, i)
if collision:
Explosion(booms).boom((i.rect.x, i.rect.y))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
if i.health_count - 2 <= 0:
game_score += 10
i.kill()
Explosion(booms).boom((i.rect.x, i.rect.y))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
enemy_killed += 1
else:
i.health_count -= 2
if is_sound:
play_sound('resources/sounds/collision_sound.mp3', 0.03)
p.health_count -= 1
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.05)
if p.health_count > 0:
Damage(window_holes).taking_damage((random.randint(50, 550), random.randint(50, 750)))
if is_sound:
play_sound('resources/sounds/window_crashed.mp3', 1)
play_sound('resources/sounds/explosion_stun.mp3', 0.02)
for j in bullets_count:
collision = pygame.sprite.collide_rect(j, i)
if collision:
if i.health_count - 1 <= 0:
game_score += 5
i.kill()
Explosion(booms).boom((i.rect.x, i.rect.y))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
enemy_killed += 1
else:
i.health_count -= 1
Miniexplosion(mini_booms).boom((j.rect.x, j.rect.y))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
if is_sound:
play_sound('resources/sounds/collision_sound.mp3', 0.03)
j.kill()
if bs and not boss_death:
collision = pygame.sprite.collide_rect(b, p)
if collision and b.y > 0:
b.health_count -= 0.3
if is_sound:
play_sound('resources/sounds/collision_sound.mp3', 0.03)
p.health_count -= 0.2
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.05)
if b.body == b.stay1 or b.body == b.stay2:
b.body = b.stay2
if b.body == b.stay3 or b.body == b.stay4:
b.body = b.stay4
if b.body == b.stay5 or b.body == b.stay6:
b.body = b.stay6
col_check += 1
if p.health_count > 0:
Damage(window_holes).taking_damage((random.randint(50, 550), random.randint(50, 750)))
if is_sound:
play_sound('resources/sounds/window_crashed.mp3', 0.1)
play_sound('resources/sounds/explosion_stun.mp3', 0.02)
for j in bullets_count:
collision = pygame.sprite.collide_rect(b, j)
if collision and b.y > 0:
if b.body == b.stay1 or b.body == b.stay2:
b.body = b.stay2
if b.body == b.stay3 or b.body == b.stay4:
b.body = b.stay4
if b.body == b.stay5 or b.body == b.stay6:
b.body = b.stay6
col_check += 1
b.health_count -= 0.2
Miniexplosion(mini_booms).boom((j.rect.x, j.rect.y))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
if is_sound:
play_sound('resources/sounds/collision_sound.mp3', 0.03)
j.kill()
for h in boss_bullets_count:
collision = pygame.sprite.collide_rect(p, h)
if collision:
p.health_count -= 1
Miniexplosion(mini_booms).boom((h.rect.x, h.rect.y))
if p.health_count > 0:
Damage(window_holes).taking_damage((random.randint(50, 550), random.randint(50, 750)))
if is_sound:
play_sound('resources/sounds/window_crashed.mp3', 0.1)
play_sound('resources/sounds/explosion_stun.mp3', 0.01)
if is_sound:
play_sound('resources/sounds/collision_sound.mp3', 0.03)
h.kill()
p.update(FPS)
# смена текстур игрока
if current_player_sprite == 'left':
sprite = p.anim_left()
screen.blit(sprite, (p.x, p.y))
p.left_1 = not p.left_1
elif current_player_sprite == 'right':
sprite = p.anim_right()
screen.blit(sprite, (p.x, p.y))
p.right_1 = not p.right_1
elif current_player_sprite == 'stay':
sprite = p.anim_stay()
screen.blit(sprite, (p.x, p.y))
p.stay_1 = not p.stay_1
if bs:
if battle_music:
ingame_music.stop()
ingame_music = pygame.mixer.Sound('resources/sounds/music/wagner_main_theme.mp3')
ingame_music.set_volume(ingame_music_sound)
ingame_music.play()
battle_music = False
b.update()
if b.body == b.stay3 and phase1_score:
game_score += 100
phase1_score = False
if b.body == b.stay5 and phase2_score:
game_score += 100
phase2_score = False
if b.body == b.stay7 and phase3_score:
game_score += 200
phase3_score = False
now = pygame.time.get_ticks()
if now - last_2 >= boss_cooldown and b.y > 0 and b.body != b.stay7:
last_2 = now
play_sound('resources/sounds/boss_shot.mp3', 0.05)
Bossbullets(boss_bullets_count).shot((b.x + 170, b.y + 155))
if col_check % 40 == 0:
b.change_sprite()
else:
col_check += 1
if b.health_count > 0:
screen.blit(b.body, (b.x, b.y))
elif b.health_count <= 0 and phase4_score:
boss_done = True
phase4_score = False
game_score += 350
if is_sound:
play_sound('resources/sounds/boss_defeated.mp3', 0.2)
Explosion(booms).boom((b.rect.x + 75, b.rect.y + 25))
Explosion(booms).boom((b.rect.x, b.rect.y))
Explosion(booms).boom((b.rect.x + 200, b.rect.y + 34))
Explosion(booms).boom((b.rect.x + 250, b.rect.y + 25))
Explosion(booms).boom((b.rect.x + 150, b.rect.y + 56))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
boss_death = True
else:
if p.minimize == 0:
if is_sound:
ingame_music.stop()
play_sound('resources/sounds/plane_crash.mp3', 0.05)
p.minimize += 1
if not death:
if p.minimize <= 320:
p.death()
screen.blit(p.death_sp, (p.x, p.y))
else:
death = True
wait = pygame.time.get_ticks()
Smallexplosions(small_booms).boom((p.rect.x + 3, p.rect.y + 25))
Smallexplosions(small_booms).boom((p.rect.x, p.rect.y))
Smallexplosions(small_booms).boom((p.rect.x - 22, p.rect.y + 7))
if is_sound:
play_sound('resources/sounds/explosion_sound.mp3', 0.1)
p.kill()
if bs and b.health_count > 0:
b.speed += 0.02
b.win = True
screen.blit(b.body, (b.x, b.y))
b.update()
# передвижение врагов
window_holes.update()
window_holes.draw(screen)
enemies.update(FPS)
# отрисовка врагов
enemies.draw(screen)
# передвижение пули
bullets_count.update()
bullets_count.draw(screen)
boss_bullets_count.update()
boss_bullets_count.draw(screen)
small_booms.update()
small_booms.draw(screen)
mini_booms.update()
mini_booms.draw(screen)
# ник игрока
draw_text('Player: {}'.format(player_name), font, (255, 255, 255), screen, 20, 20)
# cчет игрока
if len(str(game_score)) < 2:
draw_text('00000' + str(game_score), font, (255, 255, 255), screen, 430, 20)
elif len(str(game_score)) < 3:
draw_text('0000' + str(game_score), font, (255, 255, 255), screen, 430, 20)
elif len(str(game_score)) < 4:
draw_text('000' + str(game_score), font, (255, 255, 255), screen, 430, 20)
elif len(str(game_score)) < 5:
draw_text('00' + str(game_score), font, (255, 255, 255), screen, 430, 20)
elif len(str(game_score)) < 6:
draw_text('0' + str(game_score), font, (255, 255, 255), screen, 430, 20)
elif len(str(game_score)) >= 6:
draw_text("Max score", font, (255, 255, 255), screen, 510, 20)
# взрыв на месте убитого врага
booms.update()
booms.draw(screen)
pygame.display.flip()
clock.tick(FPS)
def death_screen():
global running_game, game_score
running = True
click = False
draw_counter = 0
color_counter = 0
pygame.time.set_timer(pygame.USEREVENT, 1000)
rating_kills = enemy_killed//10
if bullets_shot < 800:
rating_shots = 1
else:
rating_shots = 0
rating = rating_kills + rating_shots
if boss_done:
death_music = pygame.mixer.Sound('resources/sounds/music/victory_theme.mp3')
death_music.stop()
death_music_sound = 0.1
if not is_sound:
death_music_sound = 0
death_music.set_volume(death_music_sound)
death_music.play()
rating += 2
else:
death_music = pygame.mixer.Sound('resources/sounds/music/loose_theme.mp3')
death_music.stop()
death_music_sound = 0.1
if not is_sound:
death_music_sound = 0
death_music.set_volume(death_music_sound)
death_music.play()
while True:
if len(str(game_score)) < 6:
game_score = '0' + str(game_score)
else:
break
while running:
mx, my = pygame.mouse.get_pos()
screen.fill((0, 0, 0))
# ------------------------------------------ name zone draw
pygame.draw.rect(screen, (0, 0, 0), (50, 10, 500, 50))
pygame.draw.rect(screen, (255, 255, 255), (50, 10, 500, 50), 3)
draw_text('End of your way', font, (255, 255, 255), screen, 120, 15)
# ------------------------------------------ button menu
button_menu = pygame.image.load('resources/sprites/button.png')
button_menu = pygame.transform.scale(button_menu, (200, 70))
b_menu_mask = button_menu.get_rect()
b_menu_mask.x = 195
b_menu_mask.y = 700
screen.blit(button_menu, (b_menu_mask.x, b_menu_mask.y))
draw_text('menu', font, (255, 255, 255), screen, 245, 730)
# ------------------------------------------ draw
if draw_counter >= 1:
draw_text('Player: {}'.format(player_name), font, (255, 255, 255), screen, 50, 150)
if draw_counter >= 2:
draw_text('Score: {}'.format(game_score), font, (255, 255, 255), screen, 50, 230)
if draw_counter >= 3:
draw_text('Enemies killed: {}'.format(enemy_killed), font, (255, 255, 255), screen, 50, 310)
if draw_counter >= 4:
draw_text('Bullets fired: {}'.format(bullets_shot), font, (255, 255, 255), screen, 50, 390)
if draw_counter >= 5:
draw_text('Rating:', font, (255, 255, 255), screen, 50, 470)
if draw_counter >= 6:
if rating <= 6:
draw_text('F', font_rating, (100, 100, 100), screen, 300, 470)
elif rating == 7:
draw_text('D', font_rating, (29, 173, 23), screen, 300, 470)
elif rating == 8:
draw_text('C', font_rating, (20, 20, 255), screen, 300, 470)
elif rating == 9:
draw_text('B', font_rating, (200, 0, 255), screen, 300, 470)
elif rating == 10:
draw_text('A', font_rating, (255, 200, 0), screen, 300, 470)
elif rating == 11:
draw_text('S', font_rating, (255, 100, 0), screen, 300, 470)
elif rating <= 13:
draw_text('SS', font_rating, (255, 0, 0), screen, 300, 470)
else:
if color_counter == 0:
draw_text('SSS', font_rating, (255, 0, 0), screen, 300, 470)
elif color_counter == 1:
draw_text('SSS', font_rating, (0, 255, 0), screen, 300, 470)
else:
draw_text('SSS', font_rating, (0, 0, 255), screen, 300, 470)
# ------------------------------------------ collide
if b_menu_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (195, 700, 200, 70), 4)
if click:
if is_sound:
if is_sound:
play_sound('resources/sounds/click_sound.mp3', 0.2)
else:
pass
running = False
running_game = False
# ------------------------------------------ events
click = False
for event in pygame.event.get():
if event.type == pygame.USEREVENT:
draw_counter += 1
color_counter += 1
if color_counter == 3:
color_counter = 0
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
running = False
running_game = False
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
# ------------------------------------------ update
pygame.display.update()
clock.tick(10)
death_music.stop()
def pause_screen():
global running_game
running = True
click = False
while running:
screen.fill((0, 0, 0))
mx, my = pygame.mouse.get_pos()
pygame.draw.rect(screen, (0, 0, 0), (175, 160, 240, 340))
pygame.draw.rect(screen, (255, 255, 255), (175, 160, 240, 340), 3)
# ------------------------------------------ name zone draw
pygame.draw.rect(screen, (0, 0, 0), (185, 180, 220, 80))
pygame.draw.rect(screen, (255, 255, 255), (185, 180, 220, 80), 3)
draw_text('Pause', font, (255, 255, 255), screen, 235, 205)
# ------------------------------------------ button menu
button_menu = pygame.image.load('resources/sprites/button.png')
button_menu = pygame.transform.scale(button_menu, (200, 70))
b_menu_mask = button_menu.get_rect()
b_menu_mask.x = 195
b_menu_mask.y = 410
screen.blit(button_menu, (b_menu_mask.x, b_menu_mask.y))
draw_text('menu', font, (255, 255, 255), screen, 245, 440)
# ------------------------------------------ button resume
button_resume = pygame.image.load('resources/sprites/button.png')
button_resume = pygame.transform.scale(button_resume, (200, 70))
b_resume_mask = button_resume.get_rect()
b_resume_mask.x = 195
b_resume_mask.y = 300
screen.blit(button_resume, (b_resume_mask.x, b_resume_mask.y))
draw_text('resume', font, (255, 255, 255), screen, 225, 330)
# ------------------------------------------ collide
if b_menu_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (195, 410, 200, 70), 4)
if click:
running = False
running_game = False
if b_resume_mask.collidepoint((mx, my)):
pygame.draw.rect(screen, (255, 0, 100), (195, 300, 200, 70), 4)
if click:
running = False
# ------------------------------------------ events
click = False
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
running = False
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
click = True
# ------------------------------------------ update
pygame.display.update()
clock.tick(10)
if __name__ == '__main__':
main_menu()
pygame.quit()
|
3,756 | 6d359d987c50fd0d5e963d467a379eb245e3eb40 | import cv2 as cv
'''色彩空间介绍'''
'''
RGB:对于RGB的色彩空间是立方体的色彩空间 三通道 红 黄 蓝 每个灰度级为255
HSV:对于HSV的色彩空间是255度的圆柱体 三通道 高度 圆心角 半径分别是255
HIS
YCrCb
YUV
'''
'''常用的色彩空间转换函数***cvtColor'''
def colorSpaceConvert(image):
'''转换到灰度空间'''
res = cv.cvtColor(image, cv.COLOR_BGR2GRAY)
cv.imshow("gray", res)
'''转换到HSV色彩空间'''
res = cv.cvtColor(image, cv.COLOR_BGR2HSV)
cv.imshow("hsv", res)
'''转换到YUV色彩空间'''
res = cv.cvtColor(image, cv.COLOR_BGR2YUV)
cv.imshow("yuv", res)
image = cv.imread("../girl.jpg")
colorSpaceConvert(image)
'''等待下一个操作的延迟'''
cv.waitKey(0)
'''程序操作结束要销毁所有的窗口'''
cv.destroyAllWindows() |
3,757 | 8c0bae9e49c5ea9fbdee7c5c864afff16cc9f8b8 | import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from sklearn import linear_model
from features import calculateTargets
currency = 'EURUSD'
interval = '1440'
df = pd.read_csv(
r'../data/' + currency.upper() + interval + '.csv',
names=['date', 'time', 'open', 'high', 'low', 'close', 'volume'],
dtype={'open': 'float', 'high': 'float', 'low': 'float', 'close': 'float', 'volume': 'int'},
#parse_dates=[[0, 1]],
# index_col=0,
)
#print df.head()
#print df.tail()
# Use only one feature
#diabetes_X = diabetes.data[:, np.newaxis]
#diabetes_X_temp = diabetes_X[:, :, 2]
## Split the data into training/testing sets
#diabetes_X_train = diabetes_X_temp[:-20]
#diabetes_X_test = diabetes_X_temp[-20:]
# Split the targets into training/testing sets
calculateTargets(df)
print 'targets calculated'
#diabetes_y_train = diabetes.target[:-20]
#diabetes_y_test = diabetes.target[-20:]
## Create linear regression object
#regr = linear_model.LinearRegression()
#
## Train the model using the training sets
#regr.fit(diabetes_X_train, diabetes_y_train)
#
## The coefficients
#print('Coefficients: \n', regr.coef_)
## The mean square error
#print("Residual sum of squares: %.2f"
# % np.mean((regr.predict(diabetes_X_test) - diabetes_y_test) ** 2))
## Explained variance score: 1 is perfect prediction
#print('Variance score: %.2f' % regr.score(diabetes_X_test, diabetes_y_test))
#
## Plot outputs
#plt.scatter(diabetes_X_test, diabetes_y_test, color='black')
#plt.plot(diabetes_X_test, regr.predict(diabetes_X_test), color='blue',
# linewidth=3)
#
#plt.xticks(())
#plt.yticks(())
#
#plt.show() |
3,758 | 62bc8fec6833c5e8bc1598941eaad141ab6c9d5a | #
# -*- coding: utf-8 -*-
# Copyright 2019 Fortinet, Inc.
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""
The fortios firewall monitor class
It is in this file the runtime information is collected from the device
for a given resource, parsed, and the facts tree is populated
based on the configuration.
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import re
from copy import deepcopy
from ansible.module_utils.network.common import utils
from ansible.module_utils.network.fortios.argspec.firewall.firewall import FirewallArgs
FACT_SYSTEM_SUBSETS = frozenset([
'system_current-admins_select',
'system_firmware_select',
'system_fortimanager_status',
'system_ha-checksums_select',
'system_interface_select',
'system_status_select',
'system_time_select',
])
class FirewallFacts(object):
""" The fortios firewall fact class
"""
def __init__(self, module, fos, uri=None, subspec='config', options='options'):
self._module = module
self._fos = fos
self._uri = uri
def populate_facts(self, connection, ansible_facts, data=None):
""" Populate the facts for firewall
:param connection: the device connection
:param ansible_facts: Facts dictionary
:param data: previously collected conf
:rtype: dictionary
:returns: facts
"""
fos = self._fos if self._fos else connection
vdom = self._module.params['vdom']
ansible_facts['ansible_network_resources'].pop('system', None)
facts = {}
if self._uri.startswith(tuple(FACT_SYSTEM_SUBSETS)):
resp = fos.monitor('system', self._uri[len('system_'):].replace('_', '/'), vdom=vdom)
facts.update({self._uri: resp})
ansible_facts['ansible_network_resources'].update(facts)
return ansible_facts
|
3,759 | 18e0ece7c38169d2de91a07dddd4f40b7427848f | # 4. Пользователь вводит целое положительное число.
# Найдите самую большую цифру в числе. Для решения используйте цикл while и арифметические операции.
income_number = int(input('Введите, пожалуйста, целое положительное число '))
max_number = 0
# в другую сторону решение, не так как Вы на вебинаре советовали, но тоже работает, и не сказать чтобы сильно длинее...
while income_number != 0: # продолжаю цикл вплоть до уничтожения числа
num_exp = 10 ** (len(str(income_number)) - 1) # устанавливаю размерность числа
deleted_number = int(income_number / num_exp) # узнаю крайнюю левую цифру
if max_number < deleted_number: # перезапись максимальной, если есть такая необходимость
max_number = deleted_number
income_number = income_number - deleted_number * num_exp # "откусываю" крайнюю левую цифру
print(f'Самая большая цифра в числе {max_number}')
|
3,760 | a6d409b806dbd1e174cac65a26c5e8106a8b93ea | #!/usr/bin/env python3
"""Initiates connection to AWSIoT and provides helper functions
deviceshadowhandler.py
by Darren Dunford
"""
import json
import logging
import queue
from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTShadowClient
LOGGER = logging.getLogger(__name__)
class DeviceShadowHandler:
def status_post(self, status, state=None):
"""Post status message and device state to AWSIoT and LOGGER
:param status: status string
:param state: optional dictionary to add to shadow reported state
:return:
"""
# create new JSON payload to update device shadow
new_payload = {"state": {"reported": {"status": str(status)}, "desired": None}}
if state:
new_payload.update({"state": {"reported": state}})
# update shadow
self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 20)
# log to syslog
LOGGER.info(status)
LOGGER.debug(json.dumps(new_payload))
# constructor
def __init__(self, thingname: str, host: str, root_ca_path: str, private_key_path: str, certificate_path: str):
"""Initiate AWS IoT connection
:param thingname: AWSIoT thing name
:param host: AWSIoT endpoint FQDN
:param root_ca_path: local file path to Amazon root certificate
:param private_key_path: local file path to device private key
:param certificate_path: local file path to device certificate
"""
# Init Shadow Client MQTT connection
self.shadow_client = AWSIoTMQTTShadowClient(thingname)
self.shadow_client.configureEndpoint(host, 8883)
self.shadow_client.configureCredentials(root_ca_path, private_key_path, certificate_path)
# AWSIoTMQTTShadowClient configuration
self.shadow_client.configureAutoReconnectBackoffTime(1, 32, 20)
self.shadow_client.configureConnectDisconnectTimeout(20) # 20 sec
self.shadow_client.configureMQTTOperationTimeout(20) # 20 sec
# force shadow client to use offline publish queueing
# overriding the default behaviour for shadow clients in the SDK
mqtt_client = self.shadow_client.getMQTTConnection()
mqtt_client.configureOfflinePublishQueueing(-1)
# Connect to AWS IoT with a 300 second keepalive
self.shadow_client.connect(300)
# Create a deviceShadow with persistent subscription and register delta handler
self.shadow_handler = self.shadow_client.createShadowHandlerWithName(thingname, True)
self.shadow_handler.shadowRegisterDeltaCallback(self.custom_shadow_callback_delta)
# initial status post
self.status_post('STARTING')
# dictionary to hold callback responses
self._callbackresponses = {}
# callbacks in this class post events on to this queue
self.event_queue = queue.SimpleQueue()
self.settings = {}
# Custom shadow callback for delta -> remote triggering
def custom_shadow_callback_delta(self, payload: str, response_status, token):
"""
:param payload: JSON string ready to be parsed using json.loads(...)
:param response_status: ignored
:param token: ignored
"""
# DEBUG dump payload in to syslog
LOGGER.debug(payload)
# create JSON dictionary from payload
payload_dict = json.loads(payload)
new_payload = {}
# check for command, if received push event on to queue
if payload_dict.get('state').get('command'):
self.event_queue.put_nowait({"command":payload_dict.get('state').get('command')})
new_payload.update({"state": {"desired": {"command": None}}})
# check for settings, if received push event on to queue
if payload_dict.get('state').get('settings'):
self.event_queue.put_nowait({"settings":payload_dict.get('state').get('settings')})
new_payload.update({"state": {"desired": {"settings": payload_dict.get('state').get('settings')}}})
LOGGER.info("Shadow update: " + json.dumps(new_payload))
# update shadow instance status
self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 5)
def custom_shadow_callback_get(self, payload, response_status, token):
"""Callback function records response from get shadow operation
:param payload:
:param response_status:
:param token:
:return:
"""
self._callbackresponses.update({token: {"payload": json.loads(payload), "responseStatus": response_status}})
def get_response(self, token):
"""Return prior get shadow operation response
note each response is deleted when returned, i.e. can only be returned once
:param token:
:return:
"""
return self._callbackresponses.pop(token)
# post all parameters as a shadow update
def post_param(self):
new_payload = {"state": {"reported": {"settings": self.settings}, "desired": None}}
self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 5)
# post state update to device shadow and, if enabled, syslog
def post_state(self, state):
# create new JSON payload to update device shadow
new_payload = {"state": {"reported": {"status": state}, "desired": None}}
self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 20)
# log to syslog
LOGGER.info("New state" + json.dumps(state))
def post_temperature(self, temp):
# create new JSON payload to send device temperature to shadow
new_payload = {"state": {"reported": {"cputemp": temp}}}
self.shadow_handler.shadowUpdate(json.dumps(new_payload), None, 20)
# log to syslog on debug only
LOGGER.debug("New temp payload " + json.dumps(new_payload))
|
3,761 | 65b5db0bc6f23c342138060b7a006ff61e2dcf45 | # -*- coding: utf-8 -*-
"""Test(s) for static files
:copyright: Copyright (c) 2019 RadiaSoft LLC. All Rights Reserved.
:license: http://www.apache.org/licenses/LICENSE-2.0.html
"""
from __future__ import absolute_import, division, print_function
import pytest
import os
_TEST_ID = '__NO_SUCH_STRING_IN_PAGE__'
def setup_module(module):
os.environ.update(
SIREPO_SERVER_GOOGLE_TAG_MANAGER_ID=_TEST_ID,
)
def test_injection(fc):
from pykern import pkcompat, pkunit
from pykern.pkdebug import pkdc, pkdp, pkdlog
from pykern.pkunit import pkeq, pkok, pkre
import re
# test non-static page
r = fc.get('myapp')
pkok(
not re.search(
r'googletag',
pkcompat.from_bytes(r.data)
),
'Unexpected injection of googletag data={}',
r.data
)
# test successful injection
r = fc.get('/en/landing.html')
pkre(_TEST_ID, pkcompat.from_bytes(r.data))
|
3,762 | e75fb023e2e3d3fd258a316a6827b2601c9f4b2d | from selenium import selenium
class SharedSeleniumExecutionContext:
host =None
port =None
browserStartCommand =None
url = None
seleniumInstance=None
isInitialized=False
lastVisitedLocation=None
optionBeingHandled=None
itemToDrag=None
def __init__(self, host, port, browserStartCommand, url):
if SharedSeleniumExecutionContext.seleniumInstance == None:
SharedSeleniumExecutionContext.seleniumInstance = selenium(host, port, browserStartCommand, url)
self.seleniumInstance = SharedSeleniumExecutionContext.seleniumInstance
self.isInitialized = SharedSeleniumExecutionContext.isInitialized
self.setBrowserStartCommand(browserStartCommand)
self.setPort(port)
self.setURL(url)
self.setHost(host)
self.setLastVisitedLocation()
self.setOptionBeingHandled()
self.setItemToDrag()
def setPort(self, port):
self.port = port
SharedSeleniumExecutionContext.port = port
SharedSeleniumExecutionContext.seleniumInstance.port = port
def setHost(self, host):
self.host = host
SharedSeleniumExecutionContext.host= host
SharedSeleniumExecutionContext.seleniumInstance.host = host
def setBrowserStartCommand(self, browserStartCommand):
self.browserStartCommand = browserStartCommand
SharedSeleniumExecutionContext.__browserStartCommand = browserStartCommand
SharedSeleniumExecutionContext.seleniumInstance.browserStartCommand = browserStartCommand
def setURL(self, url):
self.url = url
SharedSeleniumExecutionContext.url = url
SharedSeleniumExecutionContext.seleniumInstance.browserURL = url
def setLastVisitedLocation(self, location=None):
self.lastVisitedLocation = location
SharedSeleniumExecutionContext.lastVisitedLocation = location
def setOptionBeingHandled(self, option=None):
self.optionBeingHandled = option
SharedSeleniumExecutionContext.optionBeingHandled = option
def setItemToDrag(self, item=None):
self.itemToDrag = item
SharedSeleniumExecutionContext.itemToDrag = item
def initialize(self):
if not SharedSeleniumExecutionContext.isInitialized and self.seleniumInstance:
self.seleniumInstance.start()
SharedSeleniumExecutionContext.isInitialized = True
def destroy(self):
if SharedSeleniumExecutionContext.isInitialized:
SharedSeleniumExecutionContext.resetAll()
def __del__(self):
if self.isInitialized:
self.seleniumInstance.stop()
@staticmethod
def resetAll():
if SharedSeleniumExecutionContext.isInitialized and SharedSeleniumExecutionContext.seleniumInstance:
SharedSeleniumExecutionContext.seleniumInstance.stop()
SharedSeleniumExecutionContext.host =None
SharedSeleniumExecutionContext.port =None
SharedSeleniumExecutionContext.browserStartCommand =None
SharedSeleniumExecutionContext.url = None
SharedSeleniumExecutionContext.seleniumInstance=None
SharedSeleniumExecutionContext.isInitialized=False |
3,763 | da751e96c225ebc2d30f3cce01ba2f64d0a29257 | # Chris DeBoever
# cdeboeve@ucsd.edu
import sys, argparse, pdb, glob, os, re
import numpy as np
from bisect import bisect_left
from scipy.stats import binom
### helper functions ###
def find_lt(a,x):
"""
Find rightmost value less than x in list a
Input: list a and value x
Output: rightmost value less than x in a
"""
i = bisect_left(a,x)
if i:
return a[i-1]
raise ValueError
def find_ge(a,x):
"""
Find leftmost item greater than or equal to x in list a
Input: list a and value x
Output: leftmost value less than or equal to x in a
"""
i = bisect_left(a,x)
if i != len(a):
return a[i]
raise ValueError
def get_altL(fn):
"""
Make a list of alternate allele frequencies and number of reads
Input: tsv file with reference freq in first column and alterate freq in second column
Output: a list of tuples with number of reads and alternate allele frequency
"""
f = open(fn,'r')
linesL = [ x.strip().split('\t') for x in f.readlines() ]
f.close()
if linesL[0][0][0] == '#':
linesL = linesL[1:]
for i in range(len(linesL)):
if linesL[i][4] == '0': # if the number of reads supporting alternate is 0, we'll switch to 1 so avoid numeric issues
linesL[i][4] = '1'
return zip([ int(x[4])+int(x[5]) for x in linesL ], [ float(x[5])/(float(x[4])+float(x[5])) for x in linesL ]) # each tuple is [freq,num_reads]
# def generate_cancer_possible_freqL(pL,sL,er):
# I want to make a function which generates the likely frequencies seen in a cancer sample. This would exclude double-hit mutations (i.e. a single site gains somatic mutations on both chromosomes). This simplifications can only be made in the diploid case, however, because ploidy-variable populations might be weird...
def generate_possible_freqL(pL,sL,er):
"""
Generate list of possible allele frequencies
Input: ploidy list, frequency (of each subpopulation) list, and sequencing error rate
Output: list of possible allele frequences
"""
h = sum(pL) # number of different haplotypes
L = [ bin(x)[2:] for x in range(1,2**h-1) ] # range from 1 to 2**h-1 because we don't want 0% or 100% allele freq
M = [ '0'*(len(L[-1])-len(x))+x for x in L ]
p_freqL = []
for i in range(len(pL)):
p_freqL += [sL[i]/pL[i]]*pL[i]
p_freqA = np.array(p_freqL)
sA = np.array(sL)
aL = []
for g in M:
aL.append(sum(np.array([ int(x) for x in list(g) ])*p_freqL))
return sorted(list(set(aL+[er,1-er])))
def freq_to_genotype(pL,sL,er):
"""
Creates dict of expected alternate allele frequencies and consistent genotypes
Input: ploidy list, frequency (of each subpopulation) list, and sequencing error rate
Output: dict of expected alternate allele frequencies and consistent genotypes. Genotypes represented as binary strings in the order of the ploidy list
"""
h = sum(pL) # number of different haplotypes
L = [ bin(x)[2:] for x in range(1,2**h-1) ] # range from 1 to 2**h-1 because we don't want 0% or 100% allele freq
M = [ '0'*(len(L[-1])-len(x))+x for x in L ]
p_freqL = []
for i in range(len(pL)):
p_freqL += [sL[i]/pL[i]]*pL[i]
p_freqA = np.array(p_freqL)
sA = np.array(sL)
aD = {} # dict where each key is an expected alternate allele frequency and each value is a list of genotypes consistent with this alternate allele frequency
for g in M:
alt_freq = sum(np.array([ int(x) for x in list(g) ])*p_freqL)
if aD.has_key(alt_freq):
aD[alt_freq].append(g)
else:
aD[alt_freq] = [g]
aD[er] = ['0'*(len(L[-1])-1) + bin(0)[2:]] # add genotype for 0% alternate allele freq
aD[1-er] = [bin(2**h-1)[2:]] # add genotype for 100% alternate allele freq
return aD
def collapse_genotypes(pL,gL):
"""
Reduces a list of genotypes to distinct genotypes given ploidy
Input: ploidy list pL and list of genotypes gL where each genotype is a binary string ordered according to ploidy list
Output: genotype list with non-redundant genotypes
"""
if len(gL) < 2:
return gL
else:
uniqueL = [] # list of unique genotypes relative to ploidy
for g in gL:
s = ''
for i in xrange(len(pL)):
s += ''.join(sorted(g[0:pL[i]]))
g = g[pL[i]:]
if s not in uniqueL:
uniqueL.append(s)
return uniqueL
def grid_search_parameters(step):
"""
Make a list of parameters to try
Input: step size
Output: subpopulation frequencies to try
"""
f1 = list(np.arange(step,1,step))
f2 = list(np.arange(step,1,step))
f2.reverse()
return zip(f1,f2)
def estimate_genotype(alt_freq,exp_freqL):
"""
Maximum likelihood estimator of alt_freq given possibilities in exp_freqL
Input: observed alternate frequency and list of expected alternate frequencies
Output: ML estimator of true alternate allele frequency
"""
try:
i = find_lt(exp_freqL,alt_freq) # Find rightmost value less than x
except ValueError:
i = float("-inf")
try:
j = find_ge(exp_freqL,alt_freq) # Find leftmost item greater than or equal to x
except ValueError:
j = float("inf")
if alt_freq-i < j-alt_freq:
return i
else:
return j
def main():
### magic variables ###
# these variables can be set at the command line as well
ploidyL = [2,2] # the entries in this list are the expected ploidy of each subpopulation. Default is two diploid subpopulations
error_rate = 0.001 # sequencing error rate
cov_cutoff = 4 # coverage cutoff for variant sites
### gather command line arguments ###
parser = argparse.ArgumentParser(description='This script determines the relative frequencies of different populations and estimates the genotypes.')
parser.add_argument('infile', help='Input tsv file. Columns should be: chrom, position, ref base, alt base, number of reads supporting reference, number of reads supporting alternate.')
parser.add_argument('-o', nargs='?', type=argparse.FileType('w'),default=sys.stdout, help='Output file. Default: standard out')
parser.add_argument('-pL', default=ploidyL, type=int, nargs='+', help='A list of ploidies. Each entry in the list represents the anticipated ploidy of a subpopulation. For instance, if you expect two diploid subpopulations and one triploid subpopulation, enter 2 2 3. Default: {0}'.format(' '.join([str(x) for x in ploidyL])))
parser.add_argument('-er', default=error_rate, type=float, help='Sequencing error rate. For instance, 0.01 means that 1/100 base calls will be incorrect. Default: {0}'.format(error_rate))
parser.add_argument('-cc', default=cov_cutoff, type=int, help='Coverage cutoff. If the coverage of either the alternate or reference allele is less than or equal to this value, the site will not be considered as a variant site. Default: {0}'.format(cov_cutoff))
parser.add_argument('-d', action='store_true', help='Enable python debugger.')
args = parser.parse_args()
inN = args.infile
outF = args.o
ploidyL = args.pL
error_rate = args.er
debug = args.d
inN = os.path.realpath(inN) # get the input file path
if len(ploidyL) > 2:
print >>sys.stderr, 'Sorry, only two subpopulations are currently supported.'
sys.exit(1)
altL = get_altL(inN) # a list of number of reads and alternate allele frequencies
tempL = []
for a in altL:
if a[0]*a[1] > cov_cutoff and a[0]*(1-a[1]) > cov_cutoff and a[0] > cov_cutoff:
tempL.append(a)
altL = tempL
### find population frequencies ###
parL = grid_search_parameters(0.01) # grid search
best_par = []
best_ll = float("-inf")
for par in parL:
exp_freqL = generate_possible_freqL(ploidyL,par,error_rate)
ll = 0 # log-likelihood
for alt in altL:
exp_freq = estimate_genotype(alt[1],exp_freqL)
ll += np.log(binom.pmf(round(alt[0]*alt[1]),alt[0],exp_freq))
# round(alt[0]*alt[1]) is the number of reads we saw supporting alternate allele (i.e. the number of successes under the binomial test)
# alt[0] is the total number of reads covering this site (i.e. the number of attempts in our binomial test)
# exp_freq is our probability of success (i.e. observing a read supporting alternate) from our ML estimation (see estimate_genotype)
if ll > best_ll:
best_ll = ll
best_par = par
### determine genotypes ###
altD = freq_to_genotype(ploidyL,best_par,error_rate) # dict whose keys are alternate allele frequencies and whose values are lists of consistent genotypes
for k in altD.keys():
altD[k] = collapse_genotypes(ploidyL,altD[k])
exp_freqL = sorted(altD.keys())
print >>outF, '#log-likelihood\t{0}\n#population frequencies\t{1}'.format(best_ll,'\t'.join([ str(x) for x in best_par ]))
inF = open(inN,'r')
linesL = inF.readlines()
inF.close()
if linesL[0][0] == '#':
linesL = linesL[1:]
for i in xrange(len(altL)):
alt = altL[i]
[chr,pos,refbase,altbase,refcov,altcov] = linesL[i].strip().split('\t')
genotypeL = altD[estimate_genotype(alt[1],exp_freqL)]
for g in genotypeL:
g = re.sub('0',refbase,g)
g = re.sub('1',altbase,g)
tempL = [] # each element of this list is the genotype of a population
for i in xrange(len(ploidyL)):
tempL.append(g[0:ploidyL[i]])
g = g[ploidyL[i]:]
print >>outF, '\t'.join([chr,pos] + tempL)
# use best population frequency parameters and walk through sites, assign genotypes, p-values or scores maybe?
if __name__ == '__main__':
main()
|
3,764 | 98bc6e0552991d7de1cc29a02242b25e7919ef82 | # Generated by Django 3.0.4 on 2020-07-20 00:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0004_auto_20200720_0154'),
]
operations = [
migrations.DeleteModel(
name='Report',
),
migrations.AlterField(
model_name='registered',
name='Email',
field=models.EmailField(max_length=254, null=True),
),
migrations.AlterField(
model_name='registered',
name='Password',
field=models.CharField(max_length=50, null=True),
),
migrations.AlterField(
model_name='registered',
name='Username',
field=models.CharField(max_length=70, null=True),
),
]
|
3,765 | 4afb556ceca89eb90ba800db4f383afad1cd42a5 | # Turn off bytecode generation
import sys
from asgiref.sync import sync_to_async
from django.core.wsgi import get_wsgi_application
sys.dont_write_bytecode = True
# Django specific settings
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
import django
django.setup()
from db import models
def print_all_models():
return models.Sample.objects.all()
@sync_to_async
def _create_record(name):
return models.Sample.objects.create(name=name)
async def create_record(name=None):
await _create_record(name)
|
3,766 | a29f89750ef3a55116959b217b8c9100b294c66c | from nose.tools import *
from packt_offer import *
from bs4 import BeautifulSoup
class TestPacktOffer:
def setUp(self):
self.proper_soup = BeautifulSoup(
""""
<div id="deal-of-the-day" class="cf">
<div class="dotd-main-book cf">
<div class="section-inner">
<div class="dotd-main-book-image float-left">
<a href="/application-development/github-essentials">
<noscript><img src="//serv.cloudfront.net/sites/imagecache/9781783553716.png" class="bookimage imagecache imagecache-dotd_main_image" itemprop="url"/>
</noscript><img src="//serv.cloudfront.net/sites/imagecache/9781783553716.png" data-original="//d1ldz4te4covpm.cloudfront.net/sites/default/files/imagecache/dotd_main_image/9781783553716.png" class="bookimage imagecache imagecache-dotd_main_image" itemprop="url" style="opacity: 1;">
</a>
</div>
<div class="dotd-main-book-summary float-left">
<div class="dotd-title">
<h2>Example title</h2>
</div>
<br>
<div>
An example description of book offered by Packtpub.
<ul>
<li>First reason why you should read this book.</li>
<li>Second reason why you should read this book.</li>
</ul>
</div>
<div class="dotd-main-book-form cf">
<div class="dots-main-book-price float-left"></div>
<div class="float-left free-ebook"></div>
</div>
</div>
</div>
</div>
</div>""", "html.parser")
for linebreak in self.proper_soup.find_all('br'):
linebreak.extract()
self.improper_soup = BeautifulSoup("""
<div id="deal-of-the-day" class="cf">
<div class="dotd-main-book cf">
<div class="section-inner">
<div class="dotd-main-book-summary float-left">
<div class="dotd-title">
</div>
<br>
</div>
</div>
</div>
</div>""", "html.parser")
for linebreak in self.improper_soup.find_all('br'):
linebreak.extract()
def test_offer_image_url_extracter_proper(self):
result = offer_image_url_extracter(self.proper_soup)
assert_equals(result,
'http://serv.cloudfront.net/sites/imagecache/9781783553716.png')
def test_offer_image_url_extracter_no_content(self):
"""Case when <div> with a given image class is not present in a given page."""
result = offer_image_url_extracter(self.improper_soup)
assert_equals(result, '')
def test_offer_title_extracter_proper(self):
result = offer_title_extracter(self.proper_soup)
assert_equals(result, 'Example title')
def test_offer_title_extracter_no_content(self):
result = offer_title_extracter(self.improper_soup)
assert_equals(result, '')
def test_offer_description_extracter_proper(self):
result = offer_description_extracter(self.proper_soup)
assert_equals(result, """<div>
An example description of book offered by Packtpub.
<ul>
<li>First reason why you should read this book.</li>
<li>Second reason why you should read this book.</li>
</ul>
</div>
""")
def test_offer_description_extracter_no_content(self):
result = offer_description_extracter(self.improper_soup)
assert_equals(result, '')
def test_message_creator_all_proper(self):
msg = message_creator(b'000000', 'www.image.com/image.jpg', 'Offer title', 'Offer description',
'sender@mail.com', ['receiver@mail.com'])
assert_in(
"""\
MIME-Version: 1.0
Subject: Packt offer: Offer title
From: sender@mail.com
To: receiver@mail.com
This is a multi-part message in MIME format.""", msg)
assert_in(
"""\
<div><h2>New Packtpub offer:</h2></div>
</br>
<div>
<img src="cid:image1">
</div>
<div><h2>Offer title</h2></div>
</br>
<div>Offer description</div>
</br>
<a href="https://www.packtpub.com/packt/offers/free-learning">Get it!</a>""", msg)
assert_in(
"""\
Content-Type: image/jpeg
MIME-Version: 1.0
Content-Transfer-Encoding: base64
Content-ID: <image1>
Content-Disposition: inline; filename="www.image.com/image.jpg"\
""", msg)
@raises(AttributeError)
def test_message_creator_wrong_image_url(self):
msg = message_creator(b'000000', 'www.image.com', 'Offer title', 'Offer description',
'sender@mail.com', ['receiver@mail.com'])
|
3,767 | a6ae4324580a8471969e0229c02ea1670728f25b | import rpy2.robjects as robjects
from rpy2.robjects.packages import importr
# print(robjects.__file__)
import sys
sys.path.append('./')
import importlib
import json
import os
from web_app.function.WordCould import word_img
# importlib.reload(sys)
# #sys.setdefaultencoding('gbk')
class Ubiquitination():
def __init__(self,disease,path):
self.disease=disease
path=path
self.num=11
# print('泛素化',self.disease,path)
self.datas_path=self.data_path(self.disease)
self.save=self.save_path(path,self.disease)
# self.analysis(self.datas_path,self.disease,self.save)
def load_R(self):
pass
def data_path(self,name):
exp_path='./web_app/data/disease/exp_data/{}.txt'.format(name)
clinical_path='./web_app/data/disease/clinical/{}.txt'.format(name)
ubiquitina_path='./web_app/data/data/ubiq/UbiqGene.txt'
# print(exp_path)
return (exp_path,clinical_path,ubiquitina_path)
def save_path(self,path,disease):
path=path
disease=disease
sp=path+'/Ubiquitination/'
if not os.path.exists(sp):
os.makedirs(sp)
sp=sp+disease+'/'
if not os.path.exists(sp):
os.makedirs(sp)
# print(sp)
return sp
def analysis(self,data,name,save_path):
data_path=data
name=name
save_path=save_path
# print(data_path[0],'TCGA-BRCA',save_path)
lime_all='./web_app/data/Difference/{}/limma_DEG_all.csv'.format(name)
lime_n='./web_app/data/Difference/{}/limma_DEG_0.05.csv'.format(name)
ubiq='./web_app/data/data/ubiq/UbiqGene.txt'
pheno='./web_app/data/Difference/{}/pheno.csv'.format(name)
exp_data='./web_app/data/disease/exp_data/{}.csv'.format(name)
cli='./web_app/data/disease/clinical/{}.csv'.format(name)
return (lime_all,lime_n,ubiq,pheno,exp_data,cli,save_path)
fig1_result=self.fig1(lime_all,lime_n,ubiq,pheno,exp_data,cli,usa,save_path)
# print(multiple[0])
# print(single[0],single[1])
def fig1(self,lime_all,lime_n,ubiq,pheno,exp_data,cli,save_path):
lime_all=lime_all
lime_n=lime_n
ubiq=ubiq
pheno=pheno
exp_data=exp_data
cli=cli
save_path=save_path+'Fig1/'
if not os.path.exists(save_path):
os.makedirs(save_path)
r=robjects.r
# 加载差异分析文件
r.source('./web_app/script/Conversion_Difference.r')
r.source('./web_app/script/Single.r')
r.source('./web_app/script/Survival_.r')
r.source('./web_app/script/RelatedBubbles.r')
# 调用差异分析函数完成差异分析
# 构建差异基因,绘制差异火山图,热图
difference=r.Difference(lime_all,lime_n,ubiq,pheno,exp_data,save_path)
# print(difference[0],difference[1])
# 单多因素分析
single=r.SingleFactor(cli,exp_data,difference[0],save_path)
# # print([i for i in single])
survival=r.Survival_(single[0],single[1],difference[0],pheno,cli,save_path)
# survival=r.Survival_(single[0],single[1],difference[0],pheno,save_path)
# # # print([i for i in survival])
# # # # 相关性气泡图
bubble=r.RelatedBubbles(survival[0],cli,save_path)
word_img(single[1],save_path)
# print([i for i in bubble])
result={
'code':1,
'difference':[i for i in difference],
'single':[i for i in single],
'survival':[i for i in survival],
'bubble':[i for i in bubble],
}
return result
def fig2(self,save_path):
# save_path=save_path+'Fig2/'
# if not os.path.exists(save_path):
# os.makedirs(save_path)
r=robjects.r
# 加载差异分析文件
r.source('./web_app/script/GeneSurvivalModel/Heatmap.r')
result={
'code':2,
}
return result
# 条带热图
# r.source('./web_app/script/Heatmap.r')
# r.Heatmap('./web_app/temp/Arsz/Ubiquitination/TCGA-BRCA/train.csv',
# "./web_app/temp/Arsz/Ubiquitination/TCGA-BRCA/Cox_genes_OS_pValue.csv",
# './web_app/temp/Arsz/Ubiquitination/TCGA-BRCA/')
# Lasso折线图
# r.source('./web_app/script/LineLasso.r')
# r.LineLasso(
# './web_app/temp/Arsz/Ubiquitination/TCGA-BRCA/train.csv',
# './web_app/temp/Arsz/Ubiquitination/TCGA-BRCA/CoxSingle_train.csv',
# './web_app/temp/Arsz/Ubiquitination/TCGA-BRCA/'
# )
# # 发散曲线
# r.source('./web_app/script/CurveLasso.r')
# r.CurveLasso('./web_app/temp/Arsz/Ubiquitination/TCGA-BRCA/train.csv',
# './web_app/temp/Arsz/Ubiquitination/TCGA-BRCA/signature_gene.txt',
# './web_app/temp/Arsz/Ubiquitination/TCGA-BRCA/')
# # 随机生存森林
# r.source('./web_app/script/RandomSurvivalForest.r')
# r.Rsf('./web_app/temp/Arsz/Ubiquitination/TCGA-BRCA/train.csv',
# './web_app/temp/Arsz/Ubiquitination/TCGA-BRCA/signature_gene.txt',
# './web_app/temp/Arsz/Ubiquitination/TCGA-BRCA/')
def epath(self):
return self.save
def progress(self):
return 1
if __name__ == "__main__":
a=Ubiquitination('TCGA-LIHC','./web_app/temp/Arsz')
x=a.analysis(a.datas_path,a.disease,a.save)
f1=a.fig1(x[0],x[1],x[2],x[3],x[4],x[5],x[6])
|
3,768 | e34e1e220c6d0fe2dc3d42caaefb04b178cdd120 | #!/usr/bin/python
import sys
import random
def has_duplicates(list) :
"""Returns True if there are duplicate in list, false otherwise"""
copy = list[:]
copy.sort()
for item in range(len(list)-1):
if copy[item] == copy[item + 1]:
return True;
return False;
def gen_birthdays(n):
"""returns a list of random bdays of length n"""
list = []
for date in range(n):
list.append(random.randint(1, 365))
return list
def num_matches(students, samples):
"""generates sample bdays for number of students and returns count of how many
had matches"""
count = 0
for i in range(samples):
bday_list = gen_birthdays(students)
if has_duplicates(bday_list):
count += 1
return count
num_students = 23;
num_simulations = 10000
count = num_matches(num_students, num_simulations)
print 'Students: %d' % num_students
print 'Simulations: %d' % num_simulations
print 'Matches: %d' % count
|
3,769 | 10cb4b59d1e1e823c56ae5ceea0514b1c1904292 | ALPACA_KEY = 'Enter your apaca key here'
ALPACA_SECRET_KEY = 'Enter your apaca secret key here'
ALPACA_MARKET = 'enter alpaca market link here'
TWILIO_KEY = 'enter your twilio key here'
TWILIO_SECRET_KEY = 'enter your twilio secret key here'
YOUR_PHONE_NUMBER = 'Enter your phone number'
YOUR_TWILIO_NUMBER = 'Enter your twilio phone number' |
3,770 | e33aca56e4c9f82779278e836308c2e22d3356e2 | # coding=utf-8
class HtmlDownload(object):
"""docstring for HtmlDownload"""
def html_download(city, keyWords, pages):
# root URL
paras = {
'jl': city,
'kw': keyWords,
'pages': pages,
'isadv': 0
}
url = "http://sou.zhaopin.com/jobs/searchresult.ashx?" + urlencode(paras)
response = requests.get(url)
if response.status_code == 200:
return response.text
else:
return None |
3,771 | 21dd3d1deb00e9bc09803d01f1c05673ea8d25d2 | from os import getenv
config_env = {
'api_port': int(getenv('API_PORT')),
'psg_uri': getenv('PSG_URI')
} |
3,772 | 09fb99a15c2727da2ef96028aca5513337449f62 | # Author: Lijing Wang (lijing52@stanford.edu), 2021
import numpy as np
import pandas as pd
import gstools as gs
import matplotlib.pyplot as plt
from matplotlib import patches
import seaborn as sns
plt.rcParams.update({'font.size': 15})
import os
path = os.path.dirname(os.getcwd())
subpath = '/examples/case2_nonlinear_forward_pumping_test/'
num_prior_sample = 5000
num_x = 100
num_y = 100
def print_theta(theta, name = 'theta'):
theta_pd = pd.DataFrame(theta.reshape(1,-1), index = [name], columns = ['mean','variance','max_range','min_range','anisotropy','head_west'])
print(theta_pd)
def visualize_d_2D(d):
num_block = 3
d_vis = np.zeros(num_m)
d_vis[:] = np.nan
for i in range(num_block*num_block*2):
d_vis[np.where(G[i,:]>0)[0]] = d[i]
d_vis = d_vis.reshape(num_x,num_y)
return d_vis
def visualize_one_d(d):
plt.plot(np.arange(70)/10, d.reshape(70,1)[:,0],label = 'pumping well')
plt.xlabel('Days')
plt.ylabel('Head')
plt.legend()
def visualize_one_m(m, vmin = -4, vmax = 0, cmap = 'viridis',title = 'True spatial field, m'):
fig, ax = plt.subplots(figsize = [6,6])
m_show = ax.imshow(m.T, origin = 'lower', cmap = cmap, vmin = vmin, vmax = vmax)
ax.set_xticks([])
ax.set_yticks([])
if title:
ax.set_title(title,fontsize = 13)
well_location = [49,49]
direct_data_loc = [30,70]
ax.scatter(well_location[0],well_location[1],s = 100, color = 'black', label = 'indirect pumping well')
ax.scatter(direct_data_loc[0],direct_data_loc[1],s = 100, color = 'red', label = 'direct logK')
ax.legend()
fig.colorbar(m_show, ax = ax, shrink = 0.6)
def print_theta_multiple(theta, name = 'theta',head = 8):
theta_pd = pd.DataFrame(theta, index = ['theta_'+str(i) for i in np.arange(1,theta.shape[0]+1)], columns = ['mean','variance','max_range','min_range','anisotropy','head_west'])
print(theta_pd.head(head))
def visualize_multiple_m(m, head = 4, vmin = -4, vmax = 0, cmap = 'viridis', theta = None):
plt.figure(figsize = [20,8])
for i in np.arange(head):
ax = plt.subplot(1, 4, i+1)
ax.imshow(m[i,:,:].T, origin = 'lower', cmap = cmap, vmin = vmin, vmax = vmax)
ax.set_xticks([])
ax.set_yticks([])
well_location = [49,49]
direct_data_loc = [30,70]
ax.scatter(well_location[0],well_location[1],s = 50, color = 'black', label = 'pumping well')
ax.scatter(direct_data_loc[0],direct_data_loc[1],s = 50, color = 'red', label = 'direct logK')
if theta is not None:
ax.set_title('\u03B8 = '+str(tuple(np.round(theta[i,:],1))))
def visualize_multiple_pc(m, PCA, head = 8, vmin = -4, vmax = 0, cmap = 'viridis',rect = False):
plt.figure(figsize = [25,10])
for i in np.arange(head):
ax = plt.subplot(1, 10, i+1)
ax.imshow(m[i,:].reshape(num_x,num_y).T, origin = 'lower', cmap = cmap, vmin = vmin, vmax = vmax)
if rect:
rect = patches.Rectangle((32,32),36, 36, linewidth=2,linestyle = 'dashed', edgecolor='black',facecolor='None')
ax.add_patch(rect)
ax.set_xticks([])
ax.set_yticks([])
ax.set_title('PCA '+str(i+1)+': '+str(np.int(PCA['explained_variance'][i]*100))+'%')
def visualize_multiple_d(d, head = 4):
plt.figure(figsize = [25,3])
for i in np.arange(head):
ax = plt.subplot(1, 4, i+1)
ax.plot(np.arange(70)/10, d[:,i].reshape(70,1)[:,0],label = 'pumping well')
#ax.plot(np.arange(70)/10, d[:,i].reshape(70,5)[:,1],label = 'obs well: SW')
#ax.plot(np.arange(70)/10, d[:,i].reshape(70,5)[:,2],label = 'obs well: NE')
##ax.plot(np.arange(70)/10, d[:,i].reshape(70,5)[:,3],label = 'obs well: NW')
#ax.plot(np.arange(70)/10, d[:,i].reshape(70,5)[:,4],label = 'obs well: SE')
ax.set_xlabel('Days')
ax.set_ylabel('Head')
#ax.legend()
def colors_from_values(values, palette_name):
# normalize the values to range [0, 1]
normalized = (values - min(values)) / (max(values) - min(values))
# convert to indices
indices = np.round(normalized * (len(values) - 1)).astype(np.int32)
# use the indices to get the colors
palette = sns.color_palette(palette_name, len(values))
return np.array(palette).take(indices, axis=0)
def visualize_mean_var(mu, covariance, vmin = 20, vmax = 40, cmap = 'viridis'):
var = np.diag(covariance)
plt.figure(figsize = [18,4])
ax = plt.subplot(2, 4, 1)
ax.imshow(mu.reshape(num_x,num_y).T, origin = 'lower', cmap = cmap, vmin = vmin, vmax = vmax)
rect = patches.Rectangle((start_loc_x,start_loc_y),num_grid, num_grid, linewidth=2,linestyle = 'dashed', edgecolor='black',facecolor='None', label = 'pilot area')
ax.add_patch(rect)
rect = patches.Rectangle((start_loc_x+num_grid*2,start_loc_y),num_grid,num_grid, linewidth=2,linestyle = 'dashed', edgecolor='black',facecolor='None', label = 'pilot area')
ax.add_patch(rect)
ax.set_xticks([])
ax.set_yticks([])
ax = plt.subplot(2, 4, 2)
ax.imshow(var.reshape(num_x,num_y).T, origin = 'lower', cmap = cmap, vmin = 0, vmax = 16)
rect = patches.Rectangle((start_loc_x,start_loc_y),num_grid, num_grid, linewidth=2,linestyle = 'dashed', edgecolor='black',facecolor='None', label = 'pilot area')
ax.add_patch(rect)
rect = patches.Rectangle((start_loc_x+num_grid*2,start_loc_y),num_grid,num_grid, linewidth=2,linestyle = 'dashed', edgecolor='black',facecolor='None', label = 'pilot area')
ax.add_patch(rect)
ax.set_xticks([])
ax.set_yticks([])
def visualize_mean_var_MC(m, start_loc, num_grid,vmin = -3, vmax = 1,vmin_var = 0, vmax_var = 0.2, cmap = 'viridis', rect = False):
mu = np.mean(m,axis = 0)
var = np.var(m,axis = 0)
plt.figure(figsize = [10,4])
ax = plt.subplot(1, 2, 1)
ax.imshow(mu.reshape(num_x,num_y).T, origin = 'lower', cmap = cmap, vmin = vmin, vmax = vmax)
if rect:
rect = patches.Rectangle((start_loc,start_loc),num_grid, num_grid, linewidth=2,linestyle = 'dashed', edgecolor='black',facecolor='None', label = 'pilot area')
ax.add_patch(rect)
ax.set_xticks([])
ax.set_yticks([])
well_location = [49,49]
ax.scatter(well_location[0],well_location[1],s = 20, color = 'black', label = 'pumping well')
direct_data_loc = [30,70]
ax.scatter(direct_data_loc[0],direct_data_loc[1],s = 50, color = 'red', label = 'direct logK')
ax = plt.subplot(1, 2, 2)
ax.imshow(var.reshape(num_x,num_y).T, origin = 'lower', cmap = 'magma', vmin = vmin_var, vmax = vmax_var)
if rect:
rect = patches.Rectangle((start_loc,start_loc),num_grid, num_grid, linewidth=2,linestyle = 'dashed', edgecolor='black',facecolor='None', label = 'pilot area')
ax.add_patch(rect)
ax.set_xticks([])
ax.set_yticks([])
def visualize_ensemble_d(d,d_obs,ymin = None,ymax = 11.5):
plt.plot(np.arange(70)/10, d,color = 'C0')
plt.plot(np.arange(70)/10, d_obs,color = 'C1',linewidth = 2,label = 'observed data')
plt.xlabel('Days')
plt.ylabel('Head')
plt.legend()
plt.ylim(ymin,ymax)
# Visualization: updating theta
def pos_pairplot(theta_pos, theta_name):
sns.pairplot(pd.DataFrame(theta_pos.T,columns = theta_name),kind="hist")
def prior_pos_theta(theta, theta_pos, theta_true, theta_name):
num_theta = theta.shape[1]
plt.figure(figsize=[25,10])
for i in np.arange(num_theta):
ax = plt.subplot(2, 3, i+1)
ax.hist(theta[:,i],density=True, bins = 1,label = 'prior',alpha = 0.7)
y_, _, _ = ax.hist(theta_pos[i,:],density=True, bins = 20,label = 'posterior',alpha = 0.7)
ax.vlines(x = theta_true[i], ymin = 0, ymax = np.max(y_),linestyles='--',label = 'true',color = 'black')
ax.legend()
ax.set_title(theta_name[i])
ax.set_ylabel('pdf')
def ML_dimension_reduction_vis(pred_train, y_train, pred_test, y_test, S_d_obs, theta_name):
fig = plt.figure(figsize=[24,10])
num_theta = len(theta_name)
for i in np.arange(num_theta):
ax = plt.subplot(2, 3, i+1)
ax.plot(pred_train[:,i], y_train[:,i],'.',label = 'train')
ax.plot(pred_test[:,i], y_test[:,i],'.',label = 'test')
ax.vlines(x = S_d_obs[0,i],ymin = -1, ymax = 1, linestyles='--',color = 'black',zorder = 100)
ax.plot([-1.2,1.2],[-1.2,1.2])
ax.legend()
ax.set_xlabel('S(d_'+str(i+1)+')')
ax.set_ylabel(theta_name[i]+'_rescaled')
ax.set_xlim(-1.2,1.2)
ax.set_ylim(-1.2,1.2)
def history_plot(history):
# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'validation'], loc='upper left')
plt.show()
|
3,773 | 24813e03de05058925a42847042157fa65450d21 | #!/usr/bin/env python
import os, sys, json
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', 'python_task_helper', 'files'))
from task_helper import TaskHelper
hosts_file = open("/etc/hosts", "r").read()
resolv_file = open("/etc/resolv.conf", "r").read()
output = hosts_file + resolv_file
class Generate(TaskHelper):
def task(self, args):
return {'result': output}
if __name__ == '__main__':
Generate().run() |
3,774 | b0f92b5e4cc972aca84a29b4568e85836f155273 | from app import db
class OrgStaff(db.Model):
__tablename__ = 'org_staff'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('users.id', ondelete="CASCADE"))
invited_by = db.Column(db.Integer, db.ForeignKey('users.id', ondelete="CASCADE"))
org_id = db.Column(db.Integer, db.ForeignKey('organisations.id', ondelete="CASCADE"))
user = db.relationship("User", primaryjoin="User.id==OrgStaff.user_id")
referer = db.relationship("User", primaryjoin="User.id==OrgStaff.invited_by")
org = db.relationship("Organisation", primaryjoin="Organisation.id==OrgStaff.org_id", backref='staff')
created_at = db.Column(db.DateTime, default=db.func.now())
updated_at = db.Column(db.DateTime, default=db.func.now(), onupdate=db.func.now())
|
3,775 | 864e9063ec1ed80cd1da3128a38633cbeb2f8bba | #-*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.files import File as DjangoFile
from django.core.management.base import BaseCommand, NoArgsCommand
from filer.models.filemodels import File
from leonardo.module.media.models import *
from filer.settings import FILER_IS_PUBLIC_DEFAULT
from filer.utils.compatibility import upath
from optparse import make_option
import os
MEDIA_MODELS = [Image, Document, Vector, Video]
class FileImporter(object):
def __init__(self, * args, **kwargs):
self.path = kwargs.get('path')
self.base_folder = kwargs.get('base_folder')
self.verbosity = int(kwargs.get('verbosity', 1))
self.file_created = 0
self.image_created = 0
self.folder_created = 0
def import_file(self, file_obj, folder):
"""
Create a File or an Image into the given folder
"""
created = False
for cls in MEDIA_MODELS:
if cls.matches_file_type(file_obj.name):
obj, created = cls.objects.get_or_create(
original_filename=file_obj.name,
file=file_obj,
folder=folder,
is_public=FILER_IS_PUBLIC_DEFAULT)
if created:
self.image_created += 1
if not created:
obj, created = File.objects.get_or_create(
original_filename=file_obj.name,
file=file_obj,
folder=folder,
is_public=FILER_IS_PUBLIC_DEFAULT)
if created:
self.file_created += 1
if self.verbosity >= 2:
print("file_created #%s / image_created #%s -- file : %s -- created : %s" % (self.file_created,
self.image_created,
obj, created))
return obj
def get_or_create_folder(self, folder_names):
"""
Gets or creates a Folder based the list of folder names in hierarchical
order (like breadcrumbs).
get_or_create_folder(['root', 'subfolder', 'subsub folder'])
creates the folders with correct parent relations and returns the
'subsub folder' instance.
"""
if not len(folder_names):
return None
current_parent = None
for folder_name in folder_names:
current_parent, created = Folder.objects.get_or_create(
name=folder_name, parent=current_parent)
if created:
self.folder_created += 1
if self.verbosity >= 2:
print("folder_created #%s folder : %s -- created : %s" % (self.folder_created,
current_parent, created))
return current_parent
def walker(self, path=None, base_folder=None):
"""
This method walk a directory structure and create the
Folders and Files as they appear.
"""
path = path or self.path
base_folder = base_folder or self.base_folder
# prevent trailing slashes and other inconsistencies on path.
path = os.path.normpath(upath(path))
if base_folder:
base_folder = os.path.normpath(upath(base_folder))
print("The directory structure will be imported in %s" % (base_folder,))
if self.verbosity >= 1:
print("Import the folders and files in %s" % (path,))
root_folder_name = os.path.basename(path)
for root, dirs, files in os.walk(path):
rel_folders = root.partition(path)[2].strip(os.path.sep).split(os.path.sep)
while '' in rel_folders:
rel_folders.remove('')
if base_folder:
folder_names = base_folder.split('/') + [root_folder_name] + rel_folders
else:
folder_names = [root_folder_name] + rel_folders
folder = self.get_or_create_folder(folder_names)
for file_obj in files:
dj_file = DjangoFile(open(os.path.join(root, file_obj)),
name=file_obj)
self.import_file(file_obj=dj_file, folder=folder)
if self.verbosity >= 1:
print(('folder_created #%s / file_created #%s / ' +
'image_created #%s') % (
self.folder_created, self.file_created,
self.image_created))
class Command(NoArgsCommand):
"""
Import directory structure into the filer ::
manage.py --path=/tmp/assets/images
manage.py --path=/tmp/assets/news --folder=images
"""
option_list = BaseCommand.option_list + (
make_option('--path',
action='store',
dest='path',
default=False,
help='Import files located in the path into django-filer'),
make_option('--folder',
action='store',
dest='base_folder',
default=False,
help='Specify the destination folder in which the directory structure should be imported'),
)
def handle_noargs(self, **options):
file_importer = FileImporter(**options)
file_importer.walker()
|
3,776 | 1568cf544a4fe7aec082ef1d7506b8484d19f198 | #exceptions.py
#-*- coding:utf-8 -*-
#exceptions
try:
print u'try。。。'
r = 10/0
print 'result:',r
except ZeroDivisionError,e:
print 'except:',e
finally:
print 'finally...'
print 'END'
try:
print u'try。。。'
r = 10/int('1')
print 'result:',r
except ValueError,e:
print 'ValueError:',e
except ZeroDivisionError,e:
print 'ZeroDivisionError:',e
else:
print 'no error!'
finally:
print 'finally...'
print 'END'
def foo(s):
return 10/int(s)
def bar(s):
return foo(s)*2
def main():
try:
bar('0')
except StandardError,e:
print 'Error!'
finally:
print 'finally...'
main()
def foo(s):
return 10/int(s)
def bar(s):
return foo(s)*2
def main():
bar('0')
#main()
import logging
def foo(s):
return 10/int(s)
def bar(s):
return foo(s)*2
def main():
try:
bar('0')
except StandardError,e:
logging.exception(e)
finally:
print 'finally...'
main()
print 'END'
class FooError(StandardError):
"""docstring for FooError"""
pass
def foo(s):
n = int(s)
if n == 0:
raise FooError('invalid value: %s'%s)
return 10/n
#foo(0)
def foo(s):
n = int(s)
return 10/n
def bar(s):
try:
return foo(s)*2
except StandardError,e:
print 'Log error and raise'
raise
def main():
bar('0')
#main()
import logging
import pdb
logging.basicConfig(level=logging.INFO)
s = '0'
n = int(s)
#pdb.set_trace()
logging.info('n=%d'%n)
#print 10/n
#python -m pdb exceptions.py
#l,n,p,q
|
3,777 | cf7bd8aa9c92d1c3acb9ccc1658d66fa0e7a142d | class Job:
def __init__(self, id, duration, tickets):
self.id = id
self.duration = duration
self.tickets = tickets
def run(self, time_slice):
self.duration -= time_slice
def done(self):
return self.duration <= 0 |
3,778 | 29e54a9ec0d65965645ac4aabf8c247a8857a25f | from translit import convert_input
def openfile(name):
f = open(name, 'r', encoding = 'utf-8')
text = f.readlines()
f.close()
return text
def makedict(text):
A = []
for line in text:
if 'lex:' in line:
a = []
a.append(line[6:].replace('\n',''))
elif 'gramm:' in line:
a.append(line[8:].replace('\n',''))
elif 'trans_ru:' in line:
a.append(line[11:].replace('\n',''))
A.append(a)
return A
def writefile(name, text):
fw = open(name, 'w', encoding = 'utf-8')
fw.write(text)
fw.close()
#alf = 'абвгдежзийклмнопрстуфхцчшыьёюяӧӝӟӵ'
#trans = list('abvgdežzijklmnoprstufxcčšə')
#trans.append('ə̂')
#trans.append('ə̈əɤ')
def dictionary():
A = []
for i in ['ADJ', 'IMIT', 'N', 'N_persn', 'NRel', 'PRO', 'unchangeable', 'V']:
A += makedict(openfile('udm_lexemes_{}.txt'.format(i)))
transl = []
for el in A:
a = []
a.append(convert_input(el[0], 'cyr'))
a += el
transl.append(a)
return transl
def dict_split(transl):
D = {k:[] for k in ['N', 'IMIT', 'V']}
row = '%s\t%s\t%s\t%s\n'
for line in dictionary():
parts = []
if line[2] == 'N' or 'ADJ' in line[2]:
parts.append(line[2])
elif 'N-persn' in line[2] or 'N,' in line[2]:
parts.append('N')
elif 'V,' in line[2]:
parts.append('V')
if 'ADV' in line[2]:
parts.append('ADV')
if 'POST' in line[2]:
parts.append('POST')
if 'PRO' in line[2]:
parts.append('PRO')
if 'NUM' in line[2]:
parts.append('NUM')
if 'INTRJ' in line[2]:
parts.append('INTRJ')
if 'CNJ' in line[2]:
parts.append('CNJ')
if 'IMIT' in line[2]:
parts.append('IMIT')
if 'PART' in line[2]:
parts.append('PART')
if 'N' in parts or 'ADJ' in parts or 'ADV' in parts or 'POST' in parts or 'PRO' in parts or 'NUM' in parts or 'PRAED' in parts or 'INTRJ' in parts or 'CNJ' in parts or 'PART' in parts:
D['N'].append(row % (line[0], line[1], ', '.join(parts), line[3]))
if 'V' in parts or 'PRAED' in parts:
D['V'].append(row % (line[0], line[1], ', '.join(parts), line[3]))
if 'IMIT' in parts:
D['IMIT'].append(row % (line[0], line[1], ', '.join(parts), line[3]))
return D
def main():
D = dict_split(dictionary())
for k in D:
D[k] = set(D[k])
fw = open('udmlex_' + k + '.tsv', 'w', encoding = 'utf-8')
fw.write(''.join(D[k]))
fw.close()
if __name__ == '__main__':
main()
|
3,779 | 08b13069020696d59028003a11b0ff06014a4c68 | from datetime import datetime, timedelta
from request.insider_networking import InsiderTransactions
from db import FinanceDB
from acquisition.symbol.financial_symbols import Financial_Symbols
class FintelInsiderAcquisition():
def __init__(self, trading_date=None):
self.task_name = 'FintelInsiderAcquisition'
self.trading_date = trading_date
self.symbols = Financial_Symbols.get_all()
self.finance_db = None
self._reset_counters()
def _reset_counters(self):
self.found = 0
self.not_found = 0
self.symbols = Financial_Symbols.get_all()
def _log(self, msg, level='info'):
pass
def get_incomplete_insider_tasks(self):
if not self.finance_db or not self.trading_date:
return []
found = set(list(map(lambda x: x['symbol'], self.finance_db.find({"trading_date": str(self.trading_date.date())}, {"symbol": 1}))))
return list(set(self.symbols) - found)
def get_complete_insider_tasks(self):
symbols = []
if not self.finance_db or not self.trading_date:
return symbols
found = set(map(lambda x: x['symbol'], list(self.finance_db.find({"trading_date": str(self.trading_date.date())}, {"symbol": 1}))))
return list(found)
def start(self):
self._reset_counters()
if self.trading_date.weekday() > 4:
self._log('Not running {} on weekend'.format(self.task_name))
elif self.trading_date.weekday() <= 4 and self.trading_date.hour < 16:
self._log('Trading day has not finished yet, {}'.format(self.trading_date.time()))
else:
self.finance_db = FinanceDB('stock_insider')
incomplete = self.get_incomplete_insider_tasks()
insider_transactions = InsiderTransactions(incomplete, batching=True)
for insider_data in insider_transactions.generate():
documents = []
for symbol, data in insider_data.items():
if data:
data['trading_date'] = str(self.trading_date.date())
data['symbol'] = symbol
documents.append(data)
self.found += 1
else:
self.not_found += 1
if documents:
self.finance_db.insert_many(documents)
self._log('{}/{} found/not_found'.format(self.found, self.not_found))
# incomplete = len(self.get_incomplete_insider_tasks())
# complete = len(self.get_complete_insider_tasks())
# self._log('{}/{} complete/incomplete'.format(complete, incomplete))
def sleep_time(self):
now = datetime.now()
if self.found + self.not_found == 0:
if now.weekday() > 4:
next_trading = now + timedelta(days=7-now.weekday())
tomorrow = datetime(year=next_trading.year, month=next_trading.month, day=next_trading.day, hour=16, minute=0, second=0)
return (tomorrow - now).total_seconds()
elif now.weekday() <= 4 and now.hour < 16:
later = datetime(year=now.year, month=now.month, day=now.day, hour=16, minute=0, second=0)
return (later - now).total_seconds()
else:
return 900
elif self.found == 0 and self.not_found > 0:
if now.hour < 16:
later = datetime(year=now.year, month=now.month, day=now.day, hour=16, minute=0, second=0)
return (later - now).total_seconds()
else:
tomorrow = now + timedelta(days=1)
tomorrow = datetime(year=tomorrow.year, month=tomorrow.month, day=tomorrow.day, hour=16, minute=0, second=0)
return (tomorrow - now).total_seconds()
else:
return 900
if __name__ == "__main__":
FintelInsiderAcquisition(datetime.now()).start() |
3,780 | 4689ee7f7178cef16ac1f5375481a9ee8a48f924 | import json
import sys
from pkg_resources import resource_string
# Load a package data file resource as a string. This
_conf = json.loads(resource_string(__name__, 'conf.json'))
# Load a data file specified in "package_data" setup option for this pkg.
_pkg_data = resource_string(__name__, 'data/pkg1.dat')
# Load a data file included in "data_files" setup option.
# FIXME
try:
_sys_data = open(sys.prefix + '/data/data1.dat').read()
except Exception as exc:
print(exc)
_sys_data = '(In editable mode?) Unable to load data file: data/data1.dat'
def hello():
print(_conf['greeting'])
print(_pkg_data)
print(_sys_data)
if __name__ == '__main__':
hello()
|
3,781 | 795936dad7a9e51edf0df66207a43ac4d97e9023 | import pathlib
import shutil
import os
import glob
import pandas as pd
import sqlalchemy as sqla
"""
SCRIPT TO FILL THE DATABASE FROM CSV ON MEGA IF LOSE DATA IN PARTICULAR DATE
"""
PATH = "/home/thomas/Documents/TER/AJOUTER_CSV_BDD/"
folder = "test/"
files_used = []
totalFiles = 0
contents = pathlib.Path(PATH+folder).iterdir()
for path in sorted(contents): # utiliser .stem -> nom sans extension fichier / .name -> nom fichier complet
files_used.append(path.name)
totalFiles+=1
print(files_used)
print(totalFiles)
li = []
for filename in files_used:
df = pd.read_csv(PATH+folder+filename,sep=';',skiprows=range(1,6),index_col=0)
li.append(df)
frame = pd.concat(li)
frame.to_csv("merged.csv",sep=';')
print('FINISH MERGING FILES!')
#Move all files used in folder dest
folder_dest = 'dest'
for file in files_used:
shutil.move(PATH+folder+file, PATH+folder_dest)
print('FINISH MOVING MERGED FILES!')
df = pd.read_csv('merged.csv',sep=';')
df['Date'] = df['Date'].str[0:10] +' '+df['Date'].str[11:19]
df = df.rename(columns={'Date': 'horodatage','Nom parking': 'nom','Type de parc': 'type_parking',"Horaires d'accès au public (pour les usagers non abonnés)": 'horaires','Code parking': 'code_parking','Type de compteur': 'type_compteur', 'Places disponibles': 'places_disponibles'})
df['horodatage'] = pd.to_datetime(df['horodatage'])
df = df.loc[: ,['code_parking','type_compteur','horodatage','places_disponibles']]
print('FINISH CLEAN DF!')
print(df)
df.info()
host = ''
port = ''
db = ''
user = ''
psw = ''
name_table = ''
# dialect+driver://username:password@host:port/database
engine = sqla.create_engine('mysql://'+user+':'+psw+'@'+host+':'+port+'/'+db)
print('CONNECTED!')
"""
df.to_sql(name_table,engine,if_exists='append',index=False,chunksize=1024,dtype={'id': sqla.Integer,'code_parking': sqla.String(255),'type_compteur': sqla.String(255),'horodatage': sqla.DateTime,'places_disponibles': sqla.Integer})
print('Finished export to Database!')
"""
|
3,782 | 874ca60749dba9ca8c8ebee2eecb1b80da50f11f | from sqlalchemy.orm import Session
from fastapi import APIRouter, Depends, File
from typing import List
from ..models.database import ApiSession
from ..schemas.images_schema import ImageReturn
from . import image_service
router = APIRouter()
@router.get("/", response_model=List[ImageReturn])
def get_all_images(db: Session = Depends(ApiSession)):
return image_service.get_all_images(db)
@router.get("/{image_id}", response_model=ImageReturn)
def get_image_by_id(image_id: int, db: Session = Depends(ApiSession)):
return image_service.get_image_by_id(image_id, db)
@router.post("/name/{image_name}", response_model=List[ImageReturn])
def create_images(image_name: str, files: List[bytes] = File(...), db: Session = Depends(ApiSession)):
return image_service.create_images(image_name, files, db)
@router.delete("/{image_id}", response_model=None)
def delete_image_by_id(image_id: int, db: Session = Depends(ApiSession)):
return image_service.delete_image_by_id(image_id, db)
@router.delete("/", response_model=None)
def delete_images_by_ids(image_ids: List[int], db: Session = Depends(ApiSession)):
return image_service.delete_images_by_ids(image_ids, db) |
3,783 | 6475fd59ba2414ea9a174297a8d94e5a2e0a7d8f | from django.contrib import admin
from .models import StoreId
# Register your models here.
class StoreIdAdmin(admin.ModelAdmin):
list_display = ('userid', 'aladin_id', 'yes24_id', 'ridibooks_id', 'start_date', 'end_date')
search_fields = ['userid', 'aladin_id', 'yes24_id', 'ridibooks_id']
admin.site.register(StoreId, StoreIdAdmin)
|
3,784 | 734561c2f127418bdc612f84b3b1ba125b6a2723 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import Common.Common.GeneralSet as GeneralSet
import TestExample.Test as Test
from Common.Common.ProcessDefine import *
def MainRun():
Cmd()
Test.TestGo()
def Cmd():
if (len(sys.argv) != 3):
print('error cmdargument count!')
return
cmd = sys.argv[1]
if cmd != '-serverid':
print('error cmdargument!')
return
cmdvalue = sys.argv[2]
if not cmdvalue.isdigit():
print('error cmdargument type!')
return
GeneralSet.gServerId = int(cmdvalue)
print(GeneralSet.gServerId)
|
3,785 | a8f52772522d1efc097c3d17d9c08199816f1168 | class IndividualStack:
def __init__(self):
self.stack=[None]*5
class StackwithStacks:
def __init__(self):
self.stacks = []
self.stackcount=-1
self.count=0
self.st = None
def push(self, element):
if self.count%5==0:
self.stackcount = self.stackcount+1
self.count=0
self.st=IndividualStack()
self.stacks.append(self.st)
self.st.stack[self.count]=element
self.count = self.count+1
else:
self.st.stack[self.count] = element
self.count = self.count + 1
def pop(self):
if self.count == 1:
self.count=self.count-1
returnval= self.stacks[self.stackcount].stack[self.count]
self.stacks.pop()
self.stackcount=self.stackcount-1
self.count=5
return returnval
else:
self.count = self.count - 1
return self.stacks[self.stackcount].stack[self.count]
st = StackwithStacks()
st.push(1)
st.push(1)
st.push(1)
st.push(1)
st.push(1)
st.push(12)
st.push(13)
st.push(1)
st.push(4)
st.push(7)
st.push(1)
st.push(8)
st.push(1)
st.push(6)
print st.pop()
print st.pop()
print st.pop()
print st.pop()
print st.pop()
print st.pop()
print st.pop()
print st.pop() |
3,786 | 452f35fe2ae9609949a3f92ad7768fc37094a2f1 | import numpy as np
import pytest
import torch
from ignite.contrib.metrics.regression import MeanNormalizedBias
from ignite.engine import Engine
from ignite.exceptions import NotComputableError
def test_zero_sample():
m = MeanNormalizedBias()
with pytest.raises(
NotComputableError, match=r"MeanNormalizedBias must have at least one example before it can be computed"
):
m.compute()
def test_zero_gt():
a = np.random.randn(4)
ground_truth = np.zeros(4)
m = MeanNormalizedBias()
with pytest.raises(NotComputableError, match=r"The ground truth has 0."):
m.update((torch.from_numpy(a), torch.from_numpy(ground_truth)))
def test_wrong_input_shapes():
m = MeanNormalizedBias()
with pytest.raises(ValueError, match=r"Input data shapes should be the same, but given"):
m.update((torch.rand(4, 1, 2), torch.rand(4, 1)))
with pytest.raises(ValueError, match=r"Input data shapes should be the same, but given"):
m.update((torch.rand(4, 1), torch.rand(4, 1, 2)))
with pytest.raises(ValueError, match=r"Input data shapes should be the same, but given"):
m.update((torch.rand(4, 1, 2), torch.rand(4,),))
with pytest.raises(ValueError, match=r"Input data shapes should be the same, but given"):
m.update((torch.rand(4,), torch.rand(4, 1, 2),))
def test_mean_error():
a = np.random.randn(4)
b = np.random.randn(4)
c = np.random.randn(4)
d = np.random.randn(4)
ground_truth = np.random.randn(4)
m = MeanNormalizedBias()
m.update((torch.from_numpy(a), torch.from_numpy(ground_truth)))
np_sum = ((ground_truth - a) / ground_truth).sum()
np_len = len(a)
np_ans = np_sum / np_len
assert m.compute() == pytest.approx(np_ans)
m.update((torch.from_numpy(b), torch.from_numpy(ground_truth)))
np_sum += ((ground_truth - b) / ground_truth).sum()
np_len += len(b)
np_ans = np_sum / np_len
assert m.compute() == pytest.approx(np_ans)
m.update((torch.from_numpy(c), torch.from_numpy(ground_truth)))
np_sum += ((ground_truth - c) / ground_truth).sum()
np_len += len(c)
np_ans = np_sum / np_len
assert m.compute() == pytest.approx(np_ans)
m.update((torch.from_numpy(d), torch.from_numpy(ground_truth)))
np_sum += ((ground_truth - d) / ground_truth).sum()
np_len += len(d)
np_ans = np_sum / np_len
assert m.compute() == pytest.approx(np_ans)
def test_integration():
def _test(y_pred, y, batch_size):
def update_fn(engine, batch):
idx = (engine.state.iteration - 1) * batch_size
y_true_batch = np_y[idx : idx + batch_size]
y_pred_batch = np_y_pred[idx : idx + batch_size]
return torch.from_numpy(y_pred_batch), torch.from_numpy(y_true_batch)
engine = Engine(update_fn)
m = MeanNormalizedBias()
m.attach(engine, "mnb")
np_y = y.numpy()
np_y_pred = y_pred.numpy()
data = list(range(y_pred.shape[0] // batch_size))
mnb = engine.run(data, max_epochs=1).metrics["mnb"]
np_sum = ((np_y - np_y_pred) / np_y).sum()
np_len = len(np_y_pred)
np_ans = np_sum / np_len
assert np_ans == pytest.approx(mnb)
def get_test_cases():
test_cases = [
(torch.rand(size=(100,)), torch.rand(size=(100,)), 10),
(torch.rand(size=(200,)), torch.rand(size=(200,)), 10),
(torch.rand(size=(100,)), torch.rand(size=(100,)), 20),
(torch.rand(size=(200,)), torch.rand(size=(200,)), 20),
]
return test_cases
for _ in range(10):
# check multiple random inputs as random exact occurencies are rare
test_cases = get_test_cases()
for y_pred, y, batch_size in test_cases:
_test(y_pred, y, batch_size)
|
3,787 | d03f87b7dfa8fe2c63500effda1bea5e41f17ffc | #=======================================================================
__version__ = '''0.0.01'''
__sub_version__ = '''20130714221105'''
__copyright__ = '''(c) Alex A. Naanou 2011'''
#-----------------------------------------------------------------------
import os
import sha
import md5
import base64
import time
import pyexiv2 as metadata
#-----------------------------------------------------------------------
# XXX need a strategy to check if two files that have the same GID are
# identical, and if so, need to destinguish them in the GID...
# might be a good idea to add a file hash
# XXX not yet sure if this is unique enough to avoid conflicts if one
# photographer has enough cameras...
# XXX also might be wise to add a photographer ID into here...
##!!! add gid info section to identify the options used to greate a gid, e.g. EXIF date vs. ctime, etc.
##!!! do a general revision and remove leacy...
def image_gid(path, date=None,
format='%(artist)s-%(date)s-%(name)s',
date_format='%Y%m%d-%H%M%S',
default_artist='Unknown',
use_ctime=False,
hash_func=lambda s: sha.sha(s).hexdigest()):
'''
Calculate image GID.
Main gid criteria:
- unique
- calculable from the item (preferably any sub-item)
- human-readable
Default format:
<artist>-<datetime>-<filename>
Example:
Alex_A.Naanou-20110627-195706-DSC_1234
If hash_func is not None, then the function will be used to generate
a hex hash from the above string.
Supported fields:
%(artist)s - Exif.Image.Artist field, stripped and spaces replaced
with underscores.
If no artist info is set this will be set to default_artist.
%(date)s - Exif.Photo.DateTimeOriginal formated to date_format argument.
%(name)s - file name.
NOTE: date and time are the date and time the image was made ('Exif.Image.DateTime')
NOTE: need EXIF data to generate a GID
'''
# get the filename...
data = {
'name': os.path.splitext(os.path.split(path)[-1])[0],
}
##!!! this might fail...
i = metadata.ImageMetadata('%s' % path)
try:
i.read()
except IOError:
# can't read exif...
i = None
# check if we need a date in the id...
if '%(date)s' in format:
if date is not None:
data['date'] = time.strftime(date_format, time.gmtime(date))
elif use_ctime or i is None:
date = os.path.getctime(path)
data['date'] = time.strftime(date_format, time.gmtime(date))
else:
date = i['Exif.Photo.DateTimeOriginal'].value
data['date'] = date.strftime(date_format)
# check if we need an artist...
if '%(artist)s' in format:
data['artist'] = default_artist
if i is not None:
try:
# set the artist if in EXIF...
a = i['Exif.Image.Artist'].value.strip().replace(' ', '_')
if a != '':
data['artist'] = a
except KeyError:
pass
if hash_func is not None:
return hash_func(format % data)
return format % data
#--------------------------------------------------handle_commandline---
def handle_commandline():
from optparse import OptionParser
parser = OptionParser()
##!!! need to define the path so that it shoes up in -h
parser.add_option('-t', '--text',
dest='format',
action='store_const',
const='text',
default='sha',
help='output GUID in base64 format.')
parser.add_option('-b', '--base64',
dest='format',
action='store_const',
const='base64',
default='sha',
help='output GUID in text format.')
parser.add_option('-s', '--sha',
dest='format',
action='store_const',
const='sha',
default='sha',
help='output GUID in sha format.')
options, args = parser.parse_args()
if len(args) != 1:
parser.print_usage()
else:
IN_PATH = args[0]
IN_PATH = IN_PATH.replace('\\', '/')
if options.format == 'text':
print image_gid(IN_PATH, hash_func=None)
elif options.format == 'base64':
# also remove the trailing \n...
print image_gid(IN_PATH, hash_func=lambda s: base64.encodestring(s).strip())
else:
print image_gid(IN_PATH)
#-----------------------------------------------------------------------
if __name__ == '__main__':
handle_commandline()
#=======================================================================
# vim:set ts=4 sw=4 nowrap :
|
3,788 | 937711546271c145d0f0df2981bdd7d1e9297e3a | """
Test /cohort/:id/user/:id
"""
import re
from unittest.mock import patch
from django.urls.base import reverse_lazy
from rest_framework import status
from breathecode.tests.mocks import (
GOOGLE_CLOUD_PATH,
apply_google_cloud_client_mock,
apply_google_cloud_bucket_mock,
apply_google_cloud_blob_mock,
)
from ..mixins import AdmissionsTestCase
class CohortIdUserIdTestSuite(AdmissionsTestCase):
"""Test /cohort/:id/user/:id"""
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_without_auth(self):
"""Test /cohort/:id/user/:id without auth"""
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={'cohort_id': 1, 'user_id': 1})
response = self.client.get(url)
json = response.json()
self.assertEqual(
json, {
'detail': 'Authentication credentials were not provided.',
'status_code': status.HTTP_401_UNAUTHORIZED
})
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_bad_cohort_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True)
url = reverse_lazy('admissions:cohort_id_user_id', kwargs={'cohort_id': 1, 'user_id': 1})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail': 'invalid cohort_id'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_bad_user_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': 999
})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail': 'invalid user_id'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_bad_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': model.user.id
})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail': 'Specified cohort not be found'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_id_but_without_user(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': model.user.id
})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail': 'Specified cohort not be found'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_id_but_with_user(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True, cohort=True, user=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': model.user.id
})
data = {}
response = self.client.put(url, data)
json = response.json()
expected = {'status_code': 400, 'detail': 'Specified cohort not be found'}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True,
cohort=True,
user=True,
specialty_mode=True,
profile_academy=True,
cohort_user=True)
model_dict = self.get_cohort_user_dict(1)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': model.user.id
})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.put(url, data)
json = response.json()
expected = {
'id': model.cohort_user.id,
'role': model.cohort_user.role,
'educational_status': model.cohort_user.educational_status,
'finantial_status': model.cohort_user.finantial_status,
}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(self.count_cohort_user(), 1)
self.assertEqual(self.get_cohort_user_dict(1), model_dict)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_delete_with_id_with_bad_user_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True,
cohort=True,
user=True,
specialty_mode=True,
profile_academy=True,
cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': 9999
})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_delete_with_id_with_bad_cohort_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True,
cohort=True,
user=True,
specialty_mode=True,
profile_academy=True,
cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': 9999,
'user_id': model.user.id
})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_delete_with_id(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True,
cohort=True,
user=True,
specialty_mode=True,
profile_academy=True,
cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': model.user.id
})
data = {'specialty_mode': model.specialty_mode.id}
response = self.client.delete(url, data)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(self.count_cohort_user(), 0)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_unsuccess_task(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True,
cohort=True,
user=True,
profile_academy=True,
cohort_user=True,
task=True,
task_status='PENDING',
task_type='PROJECT')
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': model.user.id
})
data = {
'educational_status': 'GRADUATED',
}
response = self.client.put(url, data)
json = response.json()
expected = {
'status_code': 400,
'detail': 'User has tasks with status pending the educational status cannot be GRADUATED',
}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@patch(GOOGLE_CLOUD_PATH['client'], apply_google_cloud_client_mock())
@patch(GOOGLE_CLOUD_PATH['bucket'], apply_google_cloud_bucket_mock())
@patch(GOOGLE_CLOUD_PATH['blob'], apply_google_cloud_blob_mock())
def test_cohort_id_user_id_put_with_unsuccess_finantial_status(self):
"""Test /cohort/:id/user/:id without auth"""
model = self.generate_models(authenticate=True,
cohort=True,
user=True,
profile_academy=True,
cohort_user=True)
url = reverse_lazy('admissions:cohort_id_user_id',
kwargs={
'cohort_id': model.cohort.id,
'user_id': model.user.id
})
data = {
'educational_status': 'GRADUATED',
'finantial_status': 'LATE',
}
response = self.client.put(url, data)
json = response.json()
expected = {
'status_code': 400,
'detail': 'Cannot be marked as `GRADUATED` if its financial status is `LATE`',
}
self.assertEqual(json, expected)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
3,789 | 99154212d8d5fdb92cd972c727791158d09e3e2c | # -*- coding: utf-8 -*-
# Generated by Django 1.11.28 on 2020-07-10 02:52
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('civictechprojects', '0036_auto_20200708_2251'),
]
operations = [
migrations.AddField(
model_name='projectrelationship',
name='introduction_text',
field=models.CharField(blank=True, max_length=10000),
),
]
|
3,790 | 7088f7233b67dcb855482a76d304aacc1a26abad | import json
import unittest
from music_focus.workflows.weibo_online import WeiboOnline
class Test(unittest.TestCase):
def setUp(self):
pass
def test(self):
workflow_input = {
'result_type': 'posts'
}
wf = WeiboOnline()
r = wf.run(workflow_input)
print(json.dumps(r, ensure_ascii=False, indent=2))
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
3,791 | a4f2ca3155f2bb4c17be5bb56dd889abb5d20293 | # Generated by Django 3.0.4 on 2020-04-04 11:07
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('product', '0003_cost'),
]
operations = [
migrations.AlterField(
model_name='cost',
name='name',
field=models.CharField(max_length=50, unique=True),
),
migrations.AlterField(
model_name='product',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='product',
name='description',
field=models.TextField(default=''),
),
migrations.AlterField(
model_name='product',
name='name',
field=models.CharField(max_length=100, unique=True),
),
migrations.AlterField(
model_name='product',
name='passport_link',
field=models.CharField(default='', max_length=200),
),
migrations.AlterField(
model_name='product',
name='site_link',
field=models.CharField(default='', max_length=200),
),
]
|
3,792 | 086ee4de1d74654ef85bd0a169fdf49c8f52bef2 | import argparse
from figure import Figure
from figure.Circle import Circle
from figure.Square import Square
class FCreator(object):
__types = ['square', 'circle']
def createParser(self, line: str):
parser = argparse.ArgumentParser()
parser.add_argument('-t', '--type', required=True, choices=self.__types)
parser.add_argument('-n', '--name', required=True)
parser.add_argument('-w', '--width', default=20, type=float)
parser.add_argument('-r', '--radius', default=20, type=float)
return parser.parse_args(line.split())
def editParser(self, line: str):
parser = argparse.ArgumentParser()
parser.add_argument('-n', '--name', required=True)
parser.add_argument('-w', '--width', type=float)
parser.add_argument('-r', '--radius', type=float)
return parser.parse_args(line.split())
def create(self, line: str) -> Figure:
params = self.createParser(line)
if params.type == 'square':
return self.createSquare(params.name, params.width)
if params.type == 'circle':
return self.createCircle(params.name, params.radius)
def edit(self, params, figure: Figure):
if figure.type == 'square':
return self.createSquare(params.name, params.width)
if figure.type == 'circle':
return self.createCircle(params.name, params.radius)
def createSquare(self, name: str, width: float):
square = Square(name)
square.width(width)
return square
def createCircle(self, name: str, radius: float):
circle = Circle(name)
circle.radius(radius)
return circle
def getTypes(self) -> str:
return "".join("{}\n".format(t) for t in self.__types)
fcreator = FCreator()
|
3,793 | c5d92ec592250d5bc896d32941364b92ff1d21e9 | #! py -3
# -*- coding: utf-8 -*-
import requests
from urllib.parse import quote
import logging
from urllib.parse import urlparse
logger = logging.getLogger(__name__)
logger = logging.getLogger()
# 配置日志级别,如果不显示配置,默认为Warning,表示所有warning级别已下的其他level直接被省略,
# 内部绑定的handler对象也只能接收到warning级别以上的level,你可以理解为总开关
logger.setLevel(logging.INFO)
formatter = logging.Formatter(fmt="%(asctime)s %(filename)s[line:%(lineno)d]%(levelname)s - %(message)s",
datefmt="%m/%d/%Y %I:%M:%S %p") # 创建一个格式化对象
console = logging.StreamHandler() # 配置日志输出到控制台
console.setLevel(logging.INFO) # 设置输出到控制台的最低日志级别
console.setFormatter(formatter) # 设置格式
logger.addHandler(console)
# 后续这些配置项都会移动到一个单独的配置文件
userAgent = "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36"
# serverUrl = "http://192.168.60.125:19000/"
serverUrl = "http://epm.huaxinglu72hao.com/"
host = "epm.huaxinglu72hao.com"
# 定义当前会话
# 会话使用统一的header 和 cookie
# 下面代码更新header 和 cookie 的使用后续的所有请求都会使用更新后的header 和 cookie
# 所以下面的函数没有返回值
s = requests.Session()
s.headers.update({"User-Agent": userAgent})
s.headers.update({"Referer": serverUrl})
s.headers.update({"Host": host})
def workspaceLogon(account, password):
# 登录
logger.info("logon: 开始模拟登录workspace")
postUrl = "%sworkspace/logon" % serverUrl
postData = {
"sso_username": account,
"sso_password": password,
}
try:
responseRes = s.post(postUrl, data = postData)
except Exception as e:
logger.error(e)
raise RuntimeError("登录失败: 网络异常, 请检查服务器地址配置")
logger.info("登录返回: " + responseRes.text)
# 无论是否登录成功,状态码一般都是 statusCode = 200
sso_token = responseRes.text.split('[')[2].split(']')[0]
assertertoken = responseRes.text.split('[')[6].split(']')[0]
assertertoken_ = {"ora_epm_ctg": assertertoken}
updateHeaders(assertertoken_)
token = {"_sso_token": sso_token}
updateHeaders(token)
CSRF = responseRes.headers.get("X-ORACLE-BPMUI-CSRF")
csrf_ = {"X-ORACLE-BPMUI-CSRF": CSRF}
updateHeaders(csrf_)
ECID = responseRes.headers.get("X-ORACLE-DMS-ECID")
h = {"X-ORACLE-DMS-ECID": ECID}
updateHeaders(h)
def updateHeaders(h):
logger.info(f"更新请求头: {h}")
s.headers.update(h)
def request_dyn():
logger.info ("dyn: 开始测试请求")
postUrl = "%s/raframework/browse/dyn" % serverUrl
postData={
"page": "/conf/CDSConfig.jsp",
"amp":"",
"action": "returnXML",
"LOCALE_LANGUAGE": "en_US",
"rightToLeft": "false",
"accessibilityMode": "false",
"themeSelection": "Skyros",
"sso_token": s.headers.get("_sso_token")
}
responseRes = s.post(postUrl, data=postData)
# logger.info(f"dyn: 响应text:{responseRes.text}")
logger.info(f"dyn: 响应header:{responseRes.headers}")
def request_planning_session(plan_name):
"""
"accessibilityMode": "false",
"bpm.contentheight": "621",
"bpm.contentwidth": "1314",
"bpm.objectpaletteheight": "648",
"bpm.objectpalettewidth": "207",
"cluster": "PLANNING_LWA",
"instance": "7",
"LOCALE_LANGUAGE": "zh_CN",
"mru_id": "PLANNING_LWA_JSTI1:application",
"repository_token": "59d9b714b22a35fb616dd3c05c5850d56b12522a9561499e9ea22afd918b6d36ea703f19668538504f86305c84f95441a1daf4cac09725703738d5073524871af0489411df16d2bb8f5d4726acdcc389b45e9e6ff00482249c53c1886ca68bfc090fcfbd365243996436f5bbe3affd9c87f6d5e8b7626e59adaeebcc4a89a66ef9725d8d4218c8b0c15912455a2690fcd2391a71806767f05fe66b395dda3e74b75ffa16e80c7814c47657dbc5d652da1044edc74ff20d6e604bdd733542457c3befca52c0700d758445b00ad519d0e8dee43e40cb78e070caca6b7c7a56008b2cbad75e83c7c8454f93177992f9166721331db1e11e48a113a51b3ebc2a79f1d74199127183d7708c47a3ff71663d9d",
"rightToLeft": "false",
"sourceApp": "JSTI1",
"sso_token": "sJIsuVcoOMtHD5CgUaVLmuo4SfCatQy4dowOxaTF0cj1CDqPrPW8YYKvn4nU5rsBYG1yLUChBU/ndO+3pDhwFcRbHJZmaiUOnyFhEh97A5xDXatOpkhIPx4CW+ygHNQlmKrbgUZEmJBgwNT4lcBuDPCZiodPZBo3zCkrSMLQcq0R8qoX6nHvfSVW3ep86WHDyJ859v9OCxcbo4FD4tSv4fTdHGdkGtQaRpdMtuSGtvY2hB+Z7MPEHqkhkIVAt0WWVplND5rUdF5yrLVsywYLWq7I2GH3/UVnwSgsmQy/psjChbnHkzqAcxNg837XRFI1EZBVxaGvdJw6U2mu3qlD29oYi2C/UqwODIjXGtj/st29j6fvd3lJHpsneutkVoG0E/mohFU+JzQaCnopeA+L3A8pORvkfwSyqhURqiLLHS0=",
"themeSelection": "Skyros",
"""
logger.info ("planning_session: 开始测试请求")
postUrl = "%s/HyperionPlanning/modules/com/hyperion/planning/Adf.jsp" % serverUrl
postData={
"accessibilityMode": "false",
"bpm.contentheight": "621",
"bpm.contentwidth": "1314",
"bpm.objectpaletteheight": "648",
"bpm.objectpalettewidth": "207",
"cluster": "PLANNING_LWA",
"instance": "7",
"LOCALE_LANGUAGE": "zh_CN",
"mru_id": f"PLANNING_LWA_{plan_name}:application",
"repository_token": s.cookies.get("ORA_EPMWS_session"),
"rightToLeft": "false",
"sourceApp": plan_name,
"sso_token": s.headers.get("_sso_token"),
"themeSelection": "Skyros",
}
responseRes = s.post(postUrl, data=postData)
# logger.info(f"dyn: 响应text:{responseRes.text}")
logger.info(f"planning_session: 响应cookie:{responseRes.cookies}")
# 手动添加两个cookie
s.cookies.set("ORA_HP_MRUApplication", plan_name, path="/HyperionPlanning/", domain=host)
s.cookies.set("ORA_HP_MRUUsername", s.cookies.get("ORA_EPMWS_User"), path="/HyperionPlanning/", domain=host)
logger.info("当前的header为: " + str(s.headers))
logger.info("当前的cookie为: " + str(s.cookies))
# logger.info(f"planning_session: 响应:{responseRes.text}")
# 访问一个具体的表单
import re
def request_planning_table():
# 下面的数据都是写死的, 只适用于JSTI->A0000主要指标表
s.headers["Adf-Ads-Page-Id"] = "2"
s.headers["Adf-Rich-Message"] = "true"
url = serverUrl + "/HyperionPlanning/faces/PlanningCentral?_adf.ctrl-state=9gxaes0ha_55?_adf.ctrl-state="
response = s.post(url + "14hssan6gi_4",
data="p:r:0:pc1:searchName=&org.apache.myfaces.trinidad.faces.FORM=f1&javax.faces.ViewState=!-9xx6pbfv3&oracle.adf.view.rich.DELTAS=%7Bp%3Ar%3A0%3Apc1%3AformTbl%3D%7BviewportSize%3D27%2Crows%3D33%2CscrollTopRowKey%7Cp%3D0%7D%2Cp%3AformTbl%3D%7BselectedRowKeys%3D0%7D%7D&event=p%3AloadBtn&event.p:loadBtn=%3Cm+xmlns%3D%22http%3A%2F%2Foracle.com%2FrichClient%2Fcomm%22%3E%3Ck+v%3D%22_custom%22%3E%3Cb%3E1%3C%2Fb%3E%3C%2Fk%3E%3Ck+v%3D%22module%22%3E%3Cs%3Eenterdata%3C%2Fs%3E%3C%2Fk%3E%3Ck+v%3D%22folderId%22%3E%3Cs%3E%3C%2Fs%3E%3C%2Fk%3E%3Ck+v%3D%22formId%22%3E%3Cn%3E55532%3C%2Fn%3E%3C%2Fk%3E%3Ck+v%3D%22searchFormName%22%3E%3Cs%3EA0000%E4%B8%BB%E8%A6%81%E6%8C%87%E6%A0%87%E8%A1%A8%3C%2Fs%3E%3C%2Fk%3E%3Ck+v%3D%22adhocSessionIdForForm%22%3E%3Cs%3E%3C%2Fs%3E%3C%2Fk%3E%3Ck+v%3D%22artifactType%22%3E%3Cs%3E%3C%2Fs%3E%3C%2Fk%3E%3Ck+v%3D%22tlArtifactType%22%2F%3E%3Ck+v%3D%22tlArtifactId%22%2F%3E%3Ck+v%3D%22immediate%22%3E%3Cb%3E1%3C%2Fb%3E%3C%2Fk%3E%3Ck+v%3D%22type%22%3E%3Cs%3EloadModule%3C%2Fs%3E%3C%2Fk%3E%3C%2Fm%3E&oracle.adf.view.rich.PROCESS=p%3AloadBtn")
# 从输出的结果看被重定向了
logger.info(response.content)
# m = re.search(r"_adf\.ctrl-state=.+?&", response.text)
# current = m.group(0).split("=")[1].replace("&", "")
#
# response = s.post(url + current,
# data="p:r:0:pc1:searchName=&org.apache.myfaces.trinidad.faces.FORM=f1&javax.faces.ViewState=!-9xx6pbfv3&oracle.adf.view.rich.DELTAS=%7Bp%3Ar%3A0%3Apc1%3AformTbl%3D%7BviewportSize%3D27%2Crows%3D33%2CscrollTopRowKey%7Cp%3D0%7D%2Cp%3AformTbl%3D%7BselectedRowKeys%3D0%7D%7D&event=p%3AloadBtn&event.p:loadBtn=%3Cm+xmlns%3D%22http%3A%2F%2Foracle.com%2FrichClient%2Fcomm%22%3E%3Ck+v%3D%22_custom%22%3E%3Cb%3E1%3C%2Fb%3E%3C%2Fk%3E%3Ck+v%3D%22module%22%3E%3Cs%3Eenterdata%3C%2Fs%3E%3C%2Fk%3E%3Ck+v%3D%22folderId%22%3E%3Cs%3E%3C%2Fs%3E%3C%2Fk%3E%3Ck+v%3D%22formId%22%3E%3Cn%3E55532%3C%2Fn%3E%3C%2Fk%3E%3Ck+v%3D%22searchFormName%22%3E%3Cs%3EA0000%E4%B8%BB%E8%A6%81%E6%8C%87%E6%A0%87%E8%A1%A8%3C%2Fs%3E%3C%2Fk%3E%3Ck+v%3D%22adhocSessionIdForForm%22%3E%3Cs%3E%3C%2Fs%3E%3C%2Fk%3E%3Ck+v%3D%22artifactType%22%3E%3Cs%3E%3C%2Fs%3E%3C%2Fk%3E%3Ck+v%3D%22tlArtifactType%22%2F%3E%3Ck+v%3D%22tlArtifactId%22%2F%3E%3Ck+v%3D%22immediate%22%3E%3Cb%3E1%3C%2Fb%3E%3C%2Fk%3E%3Ck+v%3D%22type%22%3E%3Cs%3EloadModule%3C%2Fs%3E%3C%2Fk%3E%3C%2Fm%3E&oracle.adf.view.rich.PROCESS=p%3AloadBtn")
#
# logger.info(response.content)
if __name__ == "__main__":
# 从返回结果来看,有登录成功
workspaceLogon("admin", "welcome1")
request_dyn()
request_planning_session("JSTI")
request_planning_table()
# logger.info("sso_token = %s" % sso_token)
# logger.info("sso_token = %s" % sso_token)
# logger.info("assertertoken = %s" % assertertoken)
# request_dyn(sso_token,assertertoken)
# requestHSS("tYy6FOvH4ZhJR1CUTy83Q9ZJxiNnYbnAt8fjWcMBII4rEmQlYjth+/M4MLIXVuXp7Hi3xQS4+QRySoxvNuFibcGbxbIYRVLFVKogwyhtIAcvtIXMvfhxd8svcLZgIXyTklurCsTarP9KtRgc26B3XRWlDG/QAzVLWyGH26ROffQpUj+bW6yRrj7A0udq1PbqGFXjDZ9iNW0ALbg0Z5NC7g3pBgjtetBohXRmpV32DCw4tI1Y7j7tLnHtSFk/NtdNri5AAFCTqTPd6HYdBzbCDqfP7ZEdfeXJFsfatRE5Pcgqm36hV1U7HeDENhTvNBtZiiQ9OfMdopyHQQvPnBQsyfKzSKTq1O5bSHH9HzQfCJdvq/nkSbalctY2SxIb0vtefJ9fUZ2y4bMAm/g95EZLiKZ5aouVrzOKjt8sl1zVctk+Ivg141wUPqtTULOYdBoi")
|
3,794 | cd49230be3c418853aa2986ed727204e51a6b6ae | import numpy as np
import pandas as pd
from pathlib import Path
import matplotlib as mpl
from matplotlib import pyplot as plt
plt.style.use('seaborn-muted')
#from IPython import get_ipython
from IPython.display import HTML, Markdown
import air_cargo_problems as acp
problems = ['Air Cargo Problem 1',
'Air Cargo Problem 2',
'Air Cargo Problem 3',
'Air Cargo Problem 4']
SEARCHES = ['breadth_first_search',
'depth_first_graph_search',
'uniform_cost_search',
'greedy_best_first_graph_search h_unmet_goals',
'greedy_best_first_graph_search h_pg_levelsum',
'greedy_best_first_graph_search h_pg_maxlevel',
'greedy_best_first_graph_search h_pg_setlevel',
'astar_search h_unmet_goals',
'astar_search h_pg_levelsum',
'astar_search h_pg_maxlevel',
'astar_search h_pg_setlevel']
def get_prob_specs():
Probs = [acp.air_cargo_p1(), acp.air_cargo_p2(),
acp.air_cargo_p3(), acp.air_cargo_p4()]
problems_specs = {'Problem': [name for name in problems],
'Air cargo problem': [i+1 for i in range(len(problems))],
'Cargos': [len(p.cargos) for p in Probs],
'Planes': [len(p.planes) for p in Probs],
'Airports': [len(p.airports) for p in Probs],
'Goal': [len(p.goal) for p in Probs]}
return pd.DataFrame(problems_specs)
specs = get_prob_specs()
def df2tsv(df, fname, replace=False):
if Path(fname).exists():
if replace:
df.to_csv(fname, sep='\t')
#else:
# print(f'File {fname} not replaced.')
return
df.to_csv(fname, sep='\t')
return
def get_problem_data_df(file_stem, problem, raw_dir, out_dir, file_as_tsv=False, replace=False):
"""
Combine all processed files of a problem found in Path(data_dir) with given stem.
The file to be saved to/retrieved from out_dir is passed in file_as_tsv, tab separated csv.
Input example:
file_stem = 'prob_2'
problem = 'Air Cargo Problem 2'
Output: a dataframe, saved to tsv if file_as_tsv=True and not replace; saved as file_stem+'_df.csv'.
"""
if file_stem is None or problem is None:
print('file_stem and problem must have a value.')
return
t = '\t'
# input/output file suffixes:
sfx = ['.csv', '_df.csv']
# Try retrieving it from out_dir if not replacing it:
fout = None
if file_as_tsv:
fout = Path(out_dir).joinpath(file_stem + sfx[1])
if fout.exists() and not replace:
df = pd.read_csv(fout, sep=t)
try:
return df.drop('Unnamed: 0', axis=1)
except KeyError:
pass
# else: (re)process
pfiles = list(Path(raw_dir).glob(file_stem + '*'))
if len(pfiles) == 0:
print(f'No raw files with stem: {file_stem}')
return
dflist = []
for f in pfiles:
df, err = get_results_df(f, problem)
if df is not None:
df = df.merge(specs)
df['index'] = df['Searcher'].apply(lambda x: SEARCHES.index(x)+1)
df['index'] = df['index'].astype(int)
df.set_index('index', drop=True, inplace=True)
dflist.append(df)
del df
else:
print(f'Error from get_results_df:\n\t{err}')
dfout = pd.concat(dflist, ignore_index=False)
dfout.sort_index(inplace=True)
if file_as_tsv:
df2tsv(dfout, fout, replace=replace)
return dfout
def get_results_df(fname, problem):
"""Process csv into dataframe.
"""
t = '\t'
# Cols to add:
val_cols = ['Actions','Expansions','GoalTests','NewNodes','PlanLength','ElapsedSeconds']
err = ''
df = pd.read_csv(fname, sep=t)
if df.shape[0] < len(val_cols):
err = f'Data for {fname.name} is incomplete.'
return None, err
# Rename cols: c (temp) -> Searcher
df.columns = ['c', 'Searcher']
# Add new cols & reindex
df = df.reindex(columns = df.columns.tolist() + val_cols)
# Populate new cols according to row with search name:
sr = df.loc[df.c == 'Searcher', 'Searcher']
for (idx, sr_row) in sr.items():
j = idx
for c in df.columns[2:].tolist():
j += 1
if c == 'ElapsedSeconds':
df.loc[idx, c] = float(df.loc[j, 'Searcher'])
else:
df.loc[idx, c] = int(df.loc[j, 'Searcher'])
df.dropna(inplace=True)
# Add a minute column:
df['Minutes'] = np.round(df.ElapsedSeconds/60, 3)
# Replace values of 1st col with problem name & update col name:
df['c'] = problem
df.rename(columns={'c': 'Problem'}, inplace=True)
df.reset_index(drop=True, inplace=True)
return df, ''
def concat_all_dfs(dflist):
"""
Output combined df for complete runs, Actions>0.
"""
dfall = pd.concat(dflist, ignore_index=False)
dfall.reset_index(drop=False, inplace=True)
dfall.rename(columns={'index': 'id'}, inplace=True)
# reduced
drop_cols = dfall.columns[-4:-1].tolist() + ['Problem','Minutes','GoalTests']
dfa = dfall.drop(drop_cols, axis=1)
del dfall
# add col for function name
dfa['search_fn'] = dfa.Searcher.str.partition(' ')[0]
# reorder cols
dfa = dfa[['Air cargo problem','id','search_fn','Searcher','Actions',
'PlanLength', 'NewNodes','Expansions','ElapsedSeconds']]
# complete runs only:
return dfa[dfa['Actions'].values > 0]
def plans_length(dfa, which):
"""
dfa: frame of concatenated df1 to df4.
Analysis of plan length for which in ['double', 'single']:
PlanLength is double(single)-digit.
"""
if which == 'double':
msk = dfa.PlanLength >= 10
col2 = 'Frequency where PlanLength >=10'
else:
msk = dfa.PlanLength < 10
col2 = 'Frequency where PlanLength <10'
dfa_rows = dfa.shape[0]
dfout = dfa[msk].sort_values(['PlanLength'], ascending=False)
uniq_probs = dfout['Air cargo problem'].unique()
n_plans = dfout.shape[0]
searcher_cnt = dfout['Searcher'].value_counts()
fn_cnt = dfout['search_fn'].value_counts()
# get the html string:
df_fn = fn_cnt.to_frame()
df_fn.reset_index(drop=False, inplace=True)
df_fn.columns = ['Search function', col2]
df_fn_html = df_fn.to_html(index=False, justify='center')
replace_str1 = ' style="text-align: center;"'
replace_str2 = 'class="dataframe"'
df_fn_html = df_fn_html.replace(replace_str1, '')
df_fn_html = df_fn_html.replace(replace_str2, replace_str1)
pct_plans = n_plans/dfa_rows
top2_fn = fn_cnt[0:2].sum()
pct_top2_fn = top2_fn/n_plans
text = f"Out of {dfa_rows} completed searches, {pct_plans:.0%} ({n_plans}), have {which}-digit or longer PlanLength.<br>"
text += f"In that subset, {top2_fn:d} ({pct_top2_fn:.0%}) involve the search functions `{fn_cnt.index[0]}` and `{fn_cnt.index[1]}`."
if len(uniq_probs) < 4:
text += " And this occurs only for Problems: "
pro = ",".join('{}' for p in uniq_probs) +'.<br>'
text += pro.format(*uniq_probs)
else:
text += " And this occurs for all Problems."
text += "<br>"
return df_fn_html, text, dfout
def make_bar_plots(df_list,
x_col, y_col,
problems,
legend_bbox=(.05, .95),
to_file='',
show=False,
excluded=None):
"""
To get 2 bar plots in a row.
"""
import matplotlib.patches as mpatches
def despine(ax):
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
a1 = df_list[0][x_col].unique().astype(int)
a1 = a1[a1>0]
a2 = df_list[1][x_col].unique().astype(int)
a2 = a2[a2>0]
assert len(a1) == len(a2) == 1
action_nums = [a1[0], a2[0]]
p1 = df_list[0]['Air cargo problem'].iloc[0]
p2 = df_list[1]['Air cargo problem'].iloc[0]
# Seach functions names should be common to all dfs:
search = df_list[0].Searcher.tolist()
# Sample cmap according to categories:
s_len = len(search)
cmap = plt.get_cmap('viridis')
m = cmap.N // s_len
colors = [cmap.colors[i*m] for i in range(s_len)]
fig, axs = plt.subplots(nrows=1, ncols=2, figsize=(12,5))
# Use the minutes columns for the more complex problems:
if y_col == 'ElapsedSeconds':
ty_col = 'Elapsed time'
if p1 == 3 or p == 4: # applies to problems 3/4
y_col = 'Minutes'
else:
ty_col = y_col
plt.title(f'{ty_col} vs. {x_col} for Problems {p1} & {p2}',
y = 1.05, fontsize=14)
for i, df in enumerate(df_list):
ylog = False
ylab = f'{y_col}'
# log scale on NewNodes for df2, df3, df4:
if (i == 1 or p1 == 3) and y_col == 'NewNodes':
ylog = True
ylab += ' (log)'
axs[i].set_ylabel(ylab, fontsize=12)
df[y_col].plot.bar(ax=axs[i], logy=ylog,
color=colors,
legend=False)
t = '{}, {} = {:d}'.format(problems[i], x_col, action_nums[i])
axs[i].set_xlabel(t, fontsize=12)
axs[i].set_xticks([])
despine(axs[i])
legt = 'Searchers'
new_lgd = p1 == 3 and excluded is not None
if new_lgd:
# Modify the legend to indicate excluded searches
# (bc colormap is identical to fig1/2, but some runs have no data).
legt += ' (X :: excluded)'
excluded_len = len(excluded)
x_idx = [excluded[i][0]-1 for i in range(excluded_len)]
legend_patches = []
for i, c in enumerate(colors):
lab = search[i]
if new_lgd:
if SEARCHES.index(lab) in x_idx:
lab = lab.replace(' ', ' + ')
lab += ' X'
else:
lab = lab.replace(' ', ' + ')
else:
lab = lab.replace(' ', ' + ')
legend_patches.append(mpatches.Patch(color=c, label=lab))
axs[1].legend(handles=legend_patches,
title=legt,
title_fontsize='14',
fontsize='medium',
bbox_to_anchor=legend_bbox,
loc='upper left',
labelspacing=0.6,
fancybox=True)
plt.tight_layout()
if to_file:
plt.savefig(to_file)
if show:
return axs
def format_multiples(multi):
s = ''
for i in range(len(multi)):
s += '{'+ str(i) +':s}, '
s = s[:-2]
return '[' + s.format(*multi.values) + ']'
def order_analysis(df2, df1, column_to_compare):
"""
df2: has the large values.
"""
colA_larger_values = df2[column_to_compare]
colA_smaller_values = df1[column_to_compare]
# orders of magnitude difference btw dfB and dfA (min, max):
mag = np.round(np.log(colA_larger_values/colA_smaller_values), 0)
mag.sort_values(ascending=False, inplace=True)
mag_aver = int(np.round(mag.mean(), 0))
# get the indices of values above average:
ma = mag[mag > mag_aver].index.tolist()
# get the names of all searchers corresponding to the ma:
above_multiples = (mag_aver, df2.loc[ma, 'Searcher'])
return above_multiples
def comparison_paragraph(df2, df1, heading, column_to_compare, return_html=False):
p1 = df1.loc[0,'Problem'][-1]
p2 = df2.loc[0,'Problem'][-1]
order_aver, searches_above = order_analysis(df2, df1, column_to_compare)
above = format_multiples(searches_above)
headinglc = heading.lower()
text = f"""<h3>* {heading}</h3><p style="font-size:110%;">For Problems {p1} and {p2}, """
text += f"the <i>average</i> order of magnitude difference in {headinglc} is "
text += f"<b>{order_aver:d}</b>, which is surpassed by these searches: {above}.</p>"
if return_html:
return text
else:
return Markdown(text)
def get_elim_candidates(df2, df1):
"""
For the analysis of problems 1 & 2.
List the costliest searches: candidates for elimination on more complex problems.
"""
if df1.loc[1,'Problem']!= problems[0]:
return
nodes_order_av, nodes_above = order_analysis(df2, df1, 'NewNodes')
time_order_av, time_above = order_analysis(df2, df1, 'ElapsedSeconds')
elim_candidates = set(nodes_above[:nodes_order_av]).intersection(set(time_above[:time_order_av]))
# return their 1-base index also:
out = [(SEARCHES.index(c)+1, c) for c in elim_candidates]
return out
def paragraph_p12(candidates_tup, return_html=False):
"""
For displaying the analysis of problems 1 & 2.
"""
elim_list = ""
for i, c in candidates_tup:
elim_list += f"<dt><b>{i:>2}: {c}</b></dt>"
text = """<h3>* Insights from Problems 1 and 2</h3><p style="font-size:110%;">"""
text += """On the basis of Figures 1 and 2, which show the number of new nodes created,
and the time spent by each search function, respectively, the searches that are candidates
for elimination for more complex problems are those at the intersection of the average-ranked
costliest sets viz new nodes creation and search time.<br>These searches are:</p><pre><dl>"""
text += f"<dl>{elim_list}</dl></p></pre>"
if return_html:
return text
else:
return Markdown(text)
def add_div_around_html(div_html_text, output_string=False, div_style="{width: 80%}"):
"""
Wrap an html code str inside a div.
div_style: whatever follows style= within the <div>
Behaviour with `output_string=True`:
The cell is overwritten with the output string (but the cell mode is still in 'code' not 'markdown')
The only thing to do is change the cell mode to Markdown.
If `output_string=False`, the HTML/md output is displayed in an output cell.
"""
div = f"""<div style="{div_style}">{div_html_text}</div>"""
if output_string:
return div
#get_ipython().set_next_input(div, 'markdown')
else:
return Markdown(div) |
3,795 | 4ef4e302304ccf2dc92cdebe134e104af47aae20 | from django.contrib import admin
from Evaluacion.models import Evaluacion
admin.site.register(Evaluacion)
|
3,796 | 92bbccfbfebf905965c9cb0f1a85ffaa7d0cf6b5 | # -*- coding: utf-8 -*-
"""
Created on Fri Jul 19 13:42:09 2019
@author: Administrator
"""
from config.path_config import *
import GV
def ReadTxtName(rootdir):
#读取文件中的每一行,转为list
lines = []
with open(rootdir, 'r') as file_to_read:
while True:
line = file_to_read.readline()
if not line:
break
line = line.strip('\n')
lines.append(line)
return lines
def project_query_lz_main(question):
#找语句中是否匹配到了项目名称
txt_line = ReadTxtName(PROJECT_NAMES)
for project_name in txt_line:
if project_name in question:
#print('我们觉得您是想查' + project_name + '项目的信息')
GV.SHOW = True
return ('我们觉得您是想查' + project_name +
'项目的信息,但是我们还没有记录项目详细信息')
GV.FLAG = 3
GV.SHOW = False
#state = False
#print('与项目无关,此处跳出,接其他模块')
return question
#project_query_lz_main('工银天梭项目进度怎么样了',2) |
3,797 | 9f36b846619ca242426041f577ab7d9e4dad6a43 | import pandas as pd
import numpy as np
import geopandas as gp
from sys import argv
import os
import subprocess
n, e, s, w = map(int, argv[1:5])
output_dir = argv[5]
print(f'{(n, e, s, w)=}')
for lat in range(s, n + 1):
for lon in range(w, e + 1):
latdir = 'n' if lat >= 0 else 's'
londir = 'e' if lon >= 0 else 'w'
fname = f'{latdir}{abs(lat):02d}{londir}{abs(lon):03d}'
print(fname)
url = f'https://prd-tnm.s3.amazonaws.com/StagedProducts/Elevation/13/TIFF/{fname}/USGS_13_{fname}.tif'
print(url)
outf = os.path.join(output_dir, f'{fname}.tif')
subprocess.run(['curl', '--output', outf, url])
|
3,798 | 9dddae5e85bda67bdbb6f0336a29949cb1f4d59e | """A twitter bot that retweets positive tweets from cool lists."""
import sys
import os
from random import choice
from random import shuffle
import twitter
import unirest
twitter = twitter.Api(
consumer_key=os.environ['TWITTER_CONSUMER_KEY'],
consumer_secret=os.environ['TWITTER_CONSUMER_SECRET'],
access_token_key=os.environ['TWITTER_ACCESS_TOKEN_KEY'],
access_token_secret=os.environ['TWITTER_ACCESS_TOKEN_SECRET'])
lists = twitter.GetListsList()
count = 200 / len(lists)
num_tweets = 25
min_tweet_len = 120
max_neg = 0.32
min_pos = 0.6
# ===================== functions =====================
def grab_tweets():
"""Returns a list of 25 random tweets from the authenticated user's lists."""
tweets = []
long_tweets = []
for each in lists:
tweets = tweets + twitter.GetListTimeline(list_id=each.id,
count=count,
include_rts=True)
for tweet in tweets:
if len(tweet.text) >= min_tweet_len:
long_tweets.append(tweet)
shuffle(long_tweets)
if len(long_tweets) >= num_tweets:
return long_tweets[:num_tweets]
else:
return long_tweets
def filter_pos_tweets(tweets):
"""Returns a list of positive tweets after running sentiment analysis."""
pos_tweets = []
for tweet in tweets:
sentiment = unirest.post("https://japerk-text-processing.p.mashape.com/sentiment/",
headers={
"X-Mashape-Key": os.environ['X_MASHAPE_KEY'],
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "application/json"
},
params={
"language": "english",
"text": tweet.text
}
)
if (sentiment.body['probability']['neg'] <= max_neg) & (sentiment.body['probability']['pos'] >= min_pos):
pos_tweets.append(tweet)
log_sentiment(tweet, sentiment)
return pos_tweets
def log_sentiment(tweet, sentiment):
print "TEXT"
print tweet.id
print "label: " + str(sentiment.body['label'])
print "neg: " + str(sentiment.body['probability']['neg'])
print "pos: " + str(sentiment.body['probability']['pos'])
print "neutral: " + str(sentiment.body['probability']['neutral'])
print " "
sys.stdout.flush()
return
def choose_tweet(pos_tweets):
"""Returns a single randomly selected tweet."""
tweet = choice(pos_tweets)
return tweet
def like_tweets(pos_tweets):
"""Authenticated user likes all tweets in pos_tweets."""
for tweet in pos_tweets:
twitter.CreateFavorite(status_id=tweet.id)
return
def retweet(tweet):
"""Authenticated user retweets tweet."""
twitter.PostRetweet(tweet.id, trim_user=False)
return
def run():
"""Runs the bot."""
# Returns a list of 25 random tweets from the authenticated user's lists.
tweets = grab_tweets()
# Returns a list of positive tweets after running sentiment analysis.
pos_tweets = filter_pos_tweets(tweets)
# Returns a single randomly selected positive tweet.
tweet = choose_tweet(pos_tweets)
# Authenticated user retweets randomly selected positive tweet.
retweet(tweet)
# Authenticated user likes all positive tweets.
like_tweets(pos_tweets)
# ===================== run =====================
run()
|
3,799 | 29304bdbf93b0b1308025db1d35a92346c6dcbe0 | def sum_string(string):
list_chars = [zerone for zerone in string if zerone in ["0", "1"]]
return list_chars
def check_triads(trio, final_str):
list_occur_zero = [i for i in range(len(final_str)) if final_str.startswith(trio + '0', i)]
list_occur_one = [i for i in range(len(final_str)) if final_str.startswith(trio + '1', i)]
return [len(list_occur_zero), len(list_occur_one)]
number_str = ""
list_str = []
list_triads = ['000', '001', '010', '011', '100', '101', '110', '111']
while len(list_str) < 100:
print('Print a random string containing 0 or 1:')
number_str = input()
list_str.extend(sum_string(number_str))
if len(list_str) < 100:
print(f'Current data length is {len(list_str)}, {(100 - len(list_str))} symbols left')
print("\nFinal data string:")
final_st = ''.join(list_str)
print(f"{final_st}\n")
for tri in list_triads:
values = check_triads(tri, final_st)
print(f"{tri}: {values[0]},{values[1]}")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.