index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
19,800 | 666eb95e364a5ebf335647e499f3eca1727ef3d7 | '''
Napisz program, który odczytuje od użytkownika wiele liczb.
Program powinien wyliczyć i na końcu wypisać następujące statystyki:
- liczba podanych liczb (ile sztuk),
- suma,
- średnia,
- minimum
- maksimum
NIE używaj funkcji wbudowanych!
'''
liczby = []
while True:
dane = input("Podaj liczbę lub powiedz koniec: ")
try:
float(dane)
liczby.append(float(dane))
except ValueError:
if dane.lower() == "koniec":
break
else:
print("Złe dane, sprobuj ponownie")
print(liczby)
ilosc_liczb = 0
suma_liczb = 0
min_liczb = 0
max_liczb = 0
for liczba in liczby:
ilosc_liczb += 1
suma_liczb += liczba
if liczba < min_liczb or min_liczb == 0:
min_liczb = liczba
elif liczba > max_liczb or max_liczb == 0:
max_liczb = liczba
srednia_liczb = suma_liczb / ilosc_liczb
print(f"Ilość liczb wynosi {ilosc_liczb}")
print(f"Suma liczb wynosi {suma_liczb}")
print(f"Średnia liczb wynosi {srednia_liczb}")
print(f"Najmniejsza liczba to {min_liczb}")
print(f"Największa liczba to {max_liczb}")
|
19,801 | 34d57dbd94323f722a1a91ef53f72a7bde83b33b | #Steph Version 1 Artistic Merit, no lights only music
#Programmer: Steph Perez
#email: sperez33@cnm.edu or steph.perez1994@gmail.com
# purpose: have the option to play music while competing in the various challenges at Pi Wars 2020
# we would like to eventually add in code to control LED lights as well.
# we hope to get additional points for artistic merit.
import csv
import pygame
from pygame import *
import sys
import os |
19,802 | 572f7a2b9aec02117bf6419fcfb9892a6d7f7fa2 | from pygooglechart import QRChart
import re
import uuid
import shutil
import os
import logging
HEIGHT = 210
WIDTH = 210
vCardTemplate = """BEGIN:VCARD
N:{SURNAME};{FIRSTNAME}
TEL;CELL:{TELMOBILE}
TEL;HOME:{TELHOME}
EMAIL:{EMAIL}
URL:{URL}
END:VCARD"""
class QRMaker:
def __init__(self):
self._templateData = vCardTemplate
def createVCard(self, data):
try:
logging.debug("In create vCard")
chart = QRChart(HEIGHT, WIDTH)
templateData = ''
for k, v in data.items():
templateData = self._templateData.replace('{%s}' % k, v)
self._templateData = templateData
match = re.sub(r'{\w*\w}', '', templateData)
chart.add_data(match)
chart.set_ec('H', 0)
uid = uuid.uuid1()
filePath = '%s/../static/cache/%s.png' % (os.path.dirname(__file__), uid)
logging.debug("Creating image: " + filePath)
chart.download(filePath)
return uid
except ex:
logging.debug('Unhandled exception')
logging.exception('Unhandled exception')
def generatePermalink(self, id):
logging.debug("From: " + '%s/../static/cache/%s.png' % (os.path.dirname(__file__), id))
logging.debug("To: " + '%s/../static/images/permalinked/%s.png' % (os.path.dirname(__file__), id))
shutil.copyfile(
'%s/../static/cache/%s.png' % (os.path.dirname(__file__), id),
'%s/../static/images/permalinked/%s.png' % (os.path.dirname(__file__), id)
)
logging.debug('./static/images/permalinked/%s.png' % id)
return './static/images/permalinked/%s.png' % id
|
19,803 | 806bc41af9b6e681d8486368847403e5f7369e01 | # coding=utf-8
# Notations from: http://www.cs.cornell.edu/courses/cs412/2008sp/lectures/lec12.pdf
TYPE_VARIABLE = 'var' # var a;
TYPE_FUNCTION = 'func' # (a + 1) == 2
TYPE_PROCEDURE = 'pro'
TYPE_EXPRESSIONS = 'expr' # a = 1.0;
TYPE_STATEMENTS = 'stat' # int pow(int n, int m)
TYPE_PARAMETER = 'par'
TYPE_ARRAY = 'arr'
class SymbolObject(object):
"""
Key for symbol table hashmap
static int k;
name: k
data_type: int
attribute: static
kind: var
"""
def __init__(self, name, type_of_object, data_type, dp=None, attribute=None, others=None):
"""
:param name: str
:param type_of_object: str
:param data_type: str
:param attribute: str
:param others: list
:return:
"""
self.name = name
self.type_of_object = type_of_object
self.data_type = data_type
if attribute is not None:
for attr, value in attribute.iteritems():
self.__setattr__(attr, value)
self.dp = dp
if others is None:
self.others = others
else:
self.others = []
def __unicode__(self):
return '<%s, %s, %i, %s>' % (self.name, self.type_of_object, self.dp, self.data_type)
def __repr__(self):
return '<%s, %s, %i, %s>' % (self.name, self.type_of_object, self.dp, self.data_type)
|
19,804 | 0ea97becb8bae54ecd065ce5cc11d463baed74e1 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Learns features of inputs.
Principal Author: Matthew Alger
"""
from __future__ import division
import time
import numpy
import theano
from theano.tensor.shared_randomstreams import RandomStreams
class Denoising_Autoencoder(object):
"""
It like learns how to take noisy versions of the input to unnoisy versions
of the input like back to the original versions of the input. -- Buck
"""
def __init__(self
, input_dimension
, hidden_dimension
, output_dimension
, input_batch=None
, output_batch=None
, symbolic_input=None
, rng=None
, theano_rng=None
, learning_rate=0.1
, corruption=0.3):
"""
input_dimension: The dimension of the input vectors.
hidden_dimension: How many hidden nodes to map to.
input_batch: Optional. Input data.
output_batch: Optional. A vector of labels corresponding to each input vector.
output_dimension: How many labels there are.
symbolic_input: Optional. A symbolic input value.
rng: Optional. A NumPy RandomState.
theano_rng: Optional. A Theano RandomStream.
learning_rate: Optional. How large gradient descent jumps are.
corruption: Optional. How much to corrupt the input when learning.
"""
self.input_dimension = input_dimension
self.hidden_dimension = hidden_dimension
self.output_batch = output_batch
self.output_dimension = output_dimension
if symbolic_input is None:
self.initialise_symbolic_input()
else:
self.symbolic_input = symbolic_input
self.initialise_symbolic_output()
if rng is None:
self.initialise_rng()
else:
self.rng = rng
if theano_rng is None:
self.initialise_theano_rng()
else:
self.theano_rng = theano_rng
self.corruption = corruption
self.input_batch = input_batch
self.activation = theano.tensor.nnet.sigmoid
self.learning_rate = learning_rate
self.initialise_corrupted_input()
self.initialise_parameters()
self.initialise_theano_functions()
def initialise_corrupted_input(self):
self.symbolic_corrupted_input = self.theano_rng.binomial(
size=self.symbolic_input.shape,
n=1,
p=1 - self.corruption,
dtype=theano.config.floatX) * self.symbolic_input
def initialise_theano_rng(self):
"""
Initialise and store a Theano RandomStream.
"""
self.theano_rng = RandomStreams(self.rng.randint(2**30))
def initialise_parameters(self):
"""
Initialises and subsequently stores a weight matrix, bias vector,
reverse bias vector, label weight matrix, and label bias vector.
"""
low = -numpy.sqrt(6/(self.input_dimension + self.hidden_dimension))
high = numpy.sqrt(6/(self.input_dimension + self.hidden_dimension))
if self.activation is theano.tensor.nnet.sigmoid:
# We know the optimum distribution for tanh and sigmoid, so we
# assume that we're using tanh unless we're using sigmoid.
low *= 4
high *= 4
self.weights = theano.shared(
value=numpy.asarray(
self.rng.uniform( # This distribution is apparently optimal for tanh.
low=low,
high=high,
size=(self.input_dimension, self.hidden_dimension)),
dtype=theano.config.floatX),
name="W",
borrow=True)
self.bias = theano.shared(
value=numpy.zeros((self.hidden_dimension,),
dtype=theano.config.floatX),
name="b",
borrow=True)
self.reverse_bias = theano.shared(
value=numpy.zeros((self.input_dimension,),
dtype=theano.config.floatX),
name="b'",
borrow=True)
self.reverse_weights = self.weights.T # Tied weights, so the reverse weight
# matrix is just the transpose.
self.label_weights = theano.shared(
value=numpy.zeros((self.hidden_dimension, self.output_dimension),
dtype=theano.config.floatX),
name="lW",
borrow=True)
self.label_bias = theano.shared(
value=numpy.zeros((self.output_dimension,),
dtype=theano.config.floatX),
name="lb",
borrow=True)
def initialise_rng(self):
"""
Initialises and subsequently stores a NumPy RandomState.
"""
self.rng = numpy.random.RandomState()
def initialise_symbolic_input(self):
"""
Initialises and subsequently stores a symbolic input value.
"""
self.symbolic_input = theano.tensor.dmatrix("x")
def initialise_symbolic_output(self):
"""
Initialises and subsequently stores a symbolic output value.
"""
self.symbolic_output = theano.tensor.ivector("y")
def get_hidden_output(self):
"""
Get the values output by the hidden layer.
"""
return self.activation(
theano.tensor.dot(self.symbolic_corrupted_input, self.weights) +
self.bias)
def get_reconstructed_input(self):
"""
Get the reconstructed input.
"""
return self.activation(
theano.tensor.dot(self.get_hidden_output(), self.reverse_weights) +
self.reverse_bias)
def error_rate(self):
"""
Get the rate of incorrect prediction.
"""
return theano.tensor.mean(theano.tensor.neq(
self.get_symbolic_predicted_labels(),
self.symbolic_output))
def get_cost(self):
"""
Get the symbolic cost for the weight matrix and bias vectors.
"""
x = self.symbolic_input
y = self.get_reconstructed_input()
negative_log_loss = -theano.tensor.sum(x*theano.tensor.log(y) +
(1-x)*theano.tensor.log(1-y), axis=1)
mean_loss = theano.tensor.mean(negative_log_loss)
return mean_loss
def get_lr_cost(self):
"""
Get the symbolic cost for the logistic regression matrix and bias vector.
"""
labels = self.get_symbolic_expected_rewards()
return -theano.tensor.mean(
theano.tensor.log(labels)[
theano.tensor.arange(self.symbolic_output.shape[0]),
self.symbolic_output])
def get_symbolic_predicted_labels(self):
"""
Predict labels of a minibatch.
"""
return theano.tensor.argmax(self.get_symbolic_expected_rewards(), axis=1)
def get_symbolic_expected_rewards(self):
"""
Get probabilities of the input values being each label.
"""
prob_matrix = theano.tensor.nnet.softmax(
theano.tensor.dot(self.get_hidden_output(),
self.label_weights) + self.label_bias)
return prob_matrix
def get_updates(self):
"""
Get a list of updates to make when the model is trained.
"""
da_cost = self.get_cost()
weight_gradient = theano.tensor.grad(da_cost, self.weights)
bias_gradient = theano.tensor.grad(da_cost, self.bias)
reverse_bias_gradient = theano.tensor.grad(da_cost, self.reverse_bias)
lr_cost = self.get_lr_cost()
lr_weight_gradient = theano.tensor.grad(lr_cost, self.label_weights)
lr_bias_gradient = theano.tensor.grad(lr_cost, self.label_bias)
updates = [
(self.weights, self.weights - self.learning_rate*weight_gradient),
(self.bias, self.bias - self.learning_rate*bias_gradient),
(self.reverse_bias, self.reverse_bias -
self.learning_rate*reverse_bias_gradient),
(self.label_weights, self.label_weights -
self.learning_rate*lr_weight_gradient),
(self.label_bias, self.label_bias -
self.learning_rate*lr_bias_gradient)]
return updates
def initialise_theano_functions(self):
"""
Compile Theano functions for symbolic variables.
"""
index = theano.tensor.lscalar("i")
batch_size = theano.tensor.lscalar("b")
validation_images = theano.tensor.matrix("vx")
validation_labels = theano.tensor.ivector("vy")
input_matrix = theano.tensor.matrix("ix")
if (self.input_batch is not None and
self.output_batch is not None):
self.train_model_once = theano.function([index, batch_size],
outputs=self.get_cost(),
updates=self.get_updates(),
givens={
self.symbolic_input: self.input_batch[index*batch_size:
(index+1)*batch_size],
self.symbolic_output: self.output_batch[index*batch_size:
(index+1)*batch_size]})
self.validate_model = theano.function(inputs=[validation_images, validation_labels],
outputs=self.error_rate(),
givens={
self.symbolic_input: validation_images,
self.symbolic_output: validation_labels},
allow_input_downcast=True)
self.get_expected_rewards = theano.function([input_matrix],
outputs=self.get_symbolic_expected_rewards(),
givens={self.symbolic_input: input_matrix})
def get_weight_matrix(self):
"""
Get the weight matrix.
"""
return self.weights.get_value(borrow=True)
def train_model(self
, epochs=100
, minibatch_size=20
, yield_every_iteration=False):
"""
Train the model against the given data.
epochs: How long to train for.
minibatch_size: How large each minibatch is.
yield_every_iteration: When to yield.
"""
if self.input_batch is None:
raise ValueError("Denoising autoencoder must be initialised with "
"input data to train model independently.")
if self.output_batch is None:
raise ValueError("RMI denoising autoencoder must be initialised "
"with output data to train model independently.")
batch_count = self.input_batch.get_value(
borrow=True).shape[0]//minibatch_size
for epoch in xrange(epochs):
costs = []
for index in xrange(batch_count):
cost = self.train_model_once(index, minibatch_size)
costs.append(cost)
if yield_every_iteration:
yield (index, cost)
if not yield_every_iteration:
yield (epoch, numpy.mean(costs))
def test_DA(DA, epochs=15):
from sys import argv
import lib.mnist as mnist
print "loading training images"
images = mnist.load_training_images(format="theano", validation=False, div=256.0)
labels = mnist.load_training_labels(format="theano", validation=False)
print "loading test images"
validation_images = mnist.load_training_images(format="numpy", validation=True)
validation_labels = mnist.load_training_labels(format="numpy", validation=True)
print "instantiating denoising autoencoder"
corruption = 0.3
learning_rate = 0.1
hiddens = 500
da = DA(784, hiddens,
input_batch=images,
output_batch=labels,
output_dimension=10,
corruption=corruption,
learning_rate=learning_rate)
print "training..."
for epoch, cost in da.train_model(epochs):
print epoch, cost
print "wrong {:.02%} of the time".format(
float(da.validate_model(validation_images, validation_labels)))
print "done."
import PIL
import lib.dlt_utils as utils
import random
image = PIL.Image.fromarray(utils.tile_raster_images(
X=da.weights.get_value(borrow=True).T,
img_shape=(28, 28), tile_shape=(50, 10),
tile_spacing=(1, 1)))
image.save('../plots/{:010x}_{}_{}_{}_{}_{}.png'.format(
random.randrange(16**10), argv[0].replace("/", "-"), corruption, learning_rate, epochs, hiddens))
if __name__ == '__main__':
test_DA(Denoising_Autoencoder, 10)
|
19,805 | da0a8a098d192cab18fbebe541cbe75fb8f4f868 | from PyQt5 import QtWidgets,QtGui,QtCore
from interfance import Ui_Form_interfance
from add import Ui_Form_add
import sys
class Main(QtWidgets.QMainWindow,Ui_Form_interfance):
def __init__(self):
super().__init__()
self.setupUi(self)
self.pushButton.clicked.connect(self.add_show)
def add_show(self):
self.add=Main_add()
self.add.show()
class Main_add(QtWidgets.QMainWindow,Ui_Form_add):
def __init__(self):
super().__init__()
self.setupUi(self)
QtCore.QCoreApplication.setAttribute(QtCore.Qt.AA_EnableHighDpiScaling) #使窗体按照Qt设计显示
app=QtWidgets.QApplication(sys.argv)
main=Main()
main.show()
sys.exit(app.exec_())
|
19,806 | e3d162ef61767efb34004d3bc8442e1878646f9c | import unittest
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
class Challenge2(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Chrome("../chromedriver.exe")
def tearDown(self):
self.driver.close()
def test_challenge2(self):
self.driver.get("https://www.copart.com")
# self.driver.set_window_size(1128, 648)
porscheFlag = False
searchText = "PORSCHE"
element = None
### Search exotics in search tab and hot Enter
self.driver.find_element(By.ID, "input-search").click()
self.driver.find_element(By.ID, "input-search").send_keys("exotics")
self.driver.find_element(By.ID, "input-search").send_keys(Keys.ENTER)
### Wait for 5 seconds for the table to load on page
try:
element = WebDriverWait(self.driver, 5).until(
EC.presence_of_element_located((By.XPATH, ".//table[@id='serverSideDataTable']")))
except :
print("Data table did not load")
### If table is found, search for PORSCHE in every row, 'Make' cloumn of the table
if element!=None:
if element.is_displayed():
table_tbody = self.driver.find_elements_by_xpath(".//table[@id='serverSideDataTable']/tbody")
for row in table_tbody[0].find_elements_by_xpath(".//tr"):
for make in row.find_elements_by_xpath("//td[5]/span"):
if (make.text.casefold() == searchText.casefold()):
porscheFlag = True
self.assertTrue(porscheFlag, "Couldn't find " + searchText)
else:
assert False, "Couldn't find " + searchText
if __name__ == '__main__':
unittest.main()
|
19,807 | 20e885e3493b66c3964efbde7dc834d617b3a34c | # import Python's libraries
import socket
import time
from binascii import hexlify
# create a socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# bind the socket to all interfaces on some port
port = 1337
s.bind(("", port))
# listen for connections:
s.listen(0)
# accept a connecting client, addr is address of connecting client
c, addr = s.accept()
# set one and zero timings
ZERO = 0.025
ONE = 0.1
msg = "Some message message message message message message message message ..."
n = 0
# convert message to binary
covert = "secret" + "EOF"
covert_bin = ""
for i in covert:
# convert each character to a full byte
covert_bin += bin(int(hexlify(i.encode()), 16))[2:].zfill(8)
# send a message, one character at a time with a delay in between characters:
for i in msg:
c.send(i.encode())
if (covert_bin[n] == "0"):
time.sleep(ZERO)
else:
time.sleep(ONE)
n = (n + 1) % len(covert_bin)
c.send("EOF".encode())
# close the connection
c.close()
|
19,808 | 5e63dff61bec227ad3fdd9bfaba53c018b5dfa34 | class Sql:
userlist = 'SELECT * FROM usertb'
userlistone = "SELECT * FROM usertb WHERE id = '%s'"
userinsert = "Insert INTO usertb VALUES('%s','%s','%s')"
userdelete = "DELETE FROM usertb WHERE id = '%s'"
userupdate = "UPDATE usertb SET pwd = '%s', name = '%s' WHERE id = '%s'"
itemlist = 'SELECT * FROM itemtb'
itemlistone = "SELECT * FROM itemtb WHERE id = %d"
iteminsert = "Insert INTO itemtb VALUES (null,'%s',%d,CURRENT_DATE(),'%s')"
itemdelete = "DELETE FROM itemtb WHERE id = %d"
itemupdate = "UPDATE itemtb SET name = '%s', price = %d, imgname = '%s' WHERE id = %d" |
19,809 | 87c0f41a06313570e3c5e3956fd21557111c376b | # Ví dụ áp dụng gradient descend (GD) với bài toán hồi quy tuyến tính
import numpy as np
from sklearn.linear_model import LinearRegression
import matplotlib.pyplot as plt
np.random.seed(21)
# Tạo 1000 điểm dữ liệu gần đường thẳng y = 4 + 3x
X = np.random.rand(1000)
y = 4 + 3*X + 0.25*np.random.randn(1000) # noise added
# Tìm nghiệm bằng công thức nghiệm (thông qua thư viện)
model = LinearRegression()
model.fit(X.reshape(-1, 1), y.reshape(-1, 1))
w, b = model.coef_[0][0], model.intercept_[0]
sol_sklearn = np.array([b, w])
print("coef: ", model.coef_)
print("intercept: ", model.intercept_)
print("Solution found by sklearn: ", sol_sklearn)
# --------------------------------
# Vẽ đồ thị biểu diễn dữ liệu
plt.plot(X, y, "b.", label="Input data")
# Vẽ đồ thị biểu diễn kết quả
x0 = np.array([0, 1])
y0 = b + w*x0
plt.plot(x0, y0, color="r", linewidth=2, label="Sklearn solution")
plt.axis([0, 1, 0, 10])
plt.legend()
plt.show()
# --------------------------------
# Tìm nghiệm bằng GD: nghiệm w chứa cả hệ số điều chỉnh b
one = np.ones((X.shape[0], 1))
Xbar = np.concatenate((one, X.reshape(-1, 1)), axis=1)
# Tính gradient của hàm số
def grad(w):
N = Xbar.shape[0]
return 1/N * Xbar.T.dot(Xbar.dot(w) - y)
# Tính giá trị của hàm mất mát
def cost(w):
N = Xbar.shape[0]
return 0.5/N * np.linalg.norm(y - Xbar.dot(w))**2
# Hàm thực hiện gradient descend
def myGD(w_init, grad, eta):
w_res = [w_init]
for it in range(100):
w_new = w_res[-1] - eta*grad(w_res[-1])
if np.linalg.norm(grad(w_new))/len(w_new) < 1e-3:
break
w_res.append(w_new)
return w_res, it
w_init = np.array([2, 1])
w1, it1 = myGD(w_init, grad, 1)
print("Solution found by GD: w = ", w1[-1].T, ", after %d iterations." % it1)
|
19,810 | 05d4e6307e937cab54e04f34f8522cc551c7d6f1 | from pyffs.core import State
from pyffs.automaton_generation import generate_universal_automaton
def test_generate_automaton_0():
expected = [
[State(-1, 0), State(-1, 0), State(0, 1)],
]
automaton = generate_universal_automaton(0)
assert automaton.matrix == expected
assert automaton.bit_vectors == [(), (0,), (1,)]
assert automaton.max_i_minus_e == [0]
def test_generate_automaton_1():
expected = [
[State(id=1, min_boundary=0),
State(id=2, min_boundary=0),
State(id=0, min_boundary=1),
State(id=2, min_boundary=0),
State(id=4, min_boundary=0),
State(id=0, min_boundary=1),
State(id=0, min_boundary=1),
State(id=2, min_boundary=0),
State(id=2, min_boundary=0),
State(id=4, min_boundary=0),
State(id=4, min_boundary=0),
State(id=0, min_boundary=1),
State(id=0, min_boundary=1),
State(id=0, min_boundary=1),
State(id=0, min_boundary=1)],
[State(id=-1, min_boundary=0),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=1),
State(id=-1, min_boundary=0),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=1),
State(id=1, min_boundary=1),
State(id=-1, min_boundary=0),
State(id=-1, min_boundary=0),
State(id=-1, min_boundary=0),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=1),
State(id=1, min_boundary=1),
State(id=1, min_boundary=1),
State(id=1, min_boundary=1)],
[State(id=-1, min_boundary=0),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=1),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=2),
State(id=1, min_boundary=1),
State(id=2, min_boundary=1),
State(id=-1, min_boundary=0),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=2),
State(id=1, min_boundary=2),
State(id=1, min_boundary=1),
State(id=1, min_boundary=1),
State(id=2, min_boundary=1),
State(id=2, min_boundary=1)],
[State(id=-1, min_boundary=0),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=1),
State(id=-1, min_boundary=0),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=1),
State(id=1, min_boundary=1),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=3),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=3),
State(id=1, min_boundary=1),
State(id=3, min_boundary=1),
State(id=1, min_boundary=1),
State(id=3, min_boundary=1)],
[State(id=-1, min_boundary=0),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=1),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=2),
State(id=1, min_boundary=1),
State(id=2, min_boundary=1),
State(id=-1, min_boundary=0),
State(id=1, min_boundary=3),
State(id=1, min_boundary=2),
State(id=2, min_boundary=2),
State(id=1, min_boundary=1),
State(id=3, min_boundary=1),
State(id=2, min_boundary=1),
State(id=4, min_boundary=1)]
]
automaton = generate_universal_automaton(1)
assert automaton.matrix == expected
assert automaton.max_i_minus_e == [0, -1, 0, 1, 1]
|
19,811 | 05e989a9bcf3d793fc05ccbdb7ad73b437c5b5bf | #!/usr/bin/env python3
import unittest
import os
import tempfile
import json
import simulator.sim_api as sim_api
import config.config_api as config_api
class TestSimAPI(unittest.TestCase):
def setUp(self):
self.curr_dir = os.path.dirname(os.path.realpath(__file__))
_, self.json_file = tempfile.mkstemp(suffix=".json", dir=self.curr_dir)
self.config = config_api.load_hardware_config()
return
def tearDown(self):
os.remove(self.json_file)
def test_json_exists(self):
self.assertTrue(os.path.isfile(self.json_file))
def test_dump_perf_metrics(self):
hardware = sim_api.init_hardware(self.config)
hardware.dump_perf_metrics(self.json_file)
with open(self.json_file, "r") as f:
perf_metrics = json.load(f)
self.assertIn("hardware", perf_metrics)
self.assertEqual(len(perf_metrics), 1)
def test_dump_timeline(self):
hardware = sim_api.init_hardware(self.config)
hardware.dump_timeline(self.json_file)
with open(self.json_file, "r") as f:
timeline = json.load(f)
self.assertIn("traceEvents", timeline)
self.assertEqual(len(timeline["traceEvents"]), 0)
if __name__ == "__main__":
unittest.main()
|
19,812 | 474bede37a2fb82e5097a3f6534fbc53a33a8876 | from pyspark.sql import SparkSession, functions as F
from pyspark.sql.types import DoubleType,IntegerType
import plotly.graph_objects as go
import pandas as pd
from typing import Dict
from pyspark.sql.types import DataType
class Employment():
def __init__(self, data, sparkSession):
self.spark = sparkSession
self.df_unemployment = data
def extract_info(self, years: list, countries: list ) -> DataType:
years = [str(year) for year in range(years[0], years[-1]+1)]
return self.df_unemployment\
.fillna(0)\
.select([c for c in self.df_unemployment.columns if c in ['CountryName'] + years]) \
.filter(self.df_unemployment.CountryName.isin(countries))\
def add_groups(self, data: DataType, geo_zone: dict) -> DataType:
dataExtended = data.withColumn('GroupName', data.CountryName )
return dataExtended.replace(geo_zone, 1, 'GroupName')
def group_data(self, data: DataType, years:list) -> DataType:
dicc = {}
for year in range(years[0], years[-1]+1):
dicc[str(year)] = 'sum'
for column in data.columns[1:-1]:
data = data.withColumn(column, F.regexp_replace(column,',','.'))
return data.groupby('GroupName').agg(dicc).orderBy('GroupName')
def plot_unemployment(self, data: DataType, years: list, colorSettings) -> None:
pandaDataframe= data.toPandas()
years = [str(year) for year in range(years[0], years[-1]+1)]
fig = go.Figure()
for i, zone in enumerate(list(colorSettings.keys())):
unemployment = pandaDataframe.iloc[i][1:-1]
fig.add_trace(go.Scatter(x=years,
y=unemployment.iloc[::-1],
name=zone,
line_color=colorSettings[zone]))
fig.update_layout(title_text='Unemployment percentage from {} to {}'.format(years[0], years[1]), xaxis_rangeslider_visible=True)
fig.show()
|
19,813 | 76f257920977f33b93f7a6f74d858509b0f97bd1 | with open('D-large.in', 'r') as fh:
data = [row for row in fh.read().split('\n') if row]
cases = int(data[0])
result = ''
for case in range(1, cases + 1):
num_values = int(data[2*case - 1])
row = [int(val) for val in data[2*case].split() if val]
if len(row) != num_values:
raise Exception("Bad data")
count_incorrect = num_values
for index in range(num_values):
if row[index] == index + 1:
count_incorrect -= 1
print 'Case #%s: %s.000000' % (case, count_incorrect)
result += 'Case #%s: %s.000000\n' % (case, count_incorrect)
result = result.strip()
with open('D-large.out', 'w') as fh:
fh.write(result)
|
19,814 | 81408828917a9eb0f571d4eb913258d03d95bb6d | # -*- coding: utf-8 -*-
import scrapy
from scrapy import Request
from spider_execute.items import SpiderFansItem
import json
import time
import traceback
class BilibiliSpider(scrapy.Spider):
"""bilibili爬虫"""
name = "fans"
# 4千万用户id
start_urls = range(1, 400000000)
url = 'https://api.bilibili.com/x/relation/followers?vmid=%s&pn=%s&ps=50&order=desc&jsonp=jsonp'
# 复写settings
custom_settings = {
'AUTOTHROTTLE_ENABLED': False
}
# 复写head
head = {
'Host': 'api.bilibili.com'
}
def start_requests(self):
'''初始化请求资源'''
for mid in self.start_urls:
self.mid = mid
self.page = 1
yield Request(
url=self.url % (self.mid, self.page),
headers=self.head,
callback=self.parse
)
def parse(self, response):
"""结果集"""
try:
# 获取items
data = json.loads(response.body_as_unicode())
for jsData in data['data']['list']:
item_dict = SpiderFansItem()
item_dict['source'] = 'fans'
item_dict['mid'] = self.mid
item_dict['fmid'] = jsData['mid']
item_dict['mtime'] = jsData['mtime']
item_dict['uname'] = jsData['uname']
item_dict['official_verify_type'] = jsData['official_verify']['type']
item_dict['official_verify_desc'] = jsData['official_verify']['desc']
item_dict['sign'] = jsData['sign']
item_dict['insert_time'] = int(time.time())
# 递归
if len(data['data']) == 50 and self.page < 5:
self.page += 1
yield Request(
url=self.url % (self.mid, self.page),
headers=self.head,
callback=self.parse
)
# 入库
else:
yield item_dict
except:
print (traceback.format_exc())
|
19,815 | 4791585fdf3ff2b14f6ecc4a3f9ee2b548148e48 | def basic():
for i in range(251):
print(i)
#basic()
def mult_5():
for i in range(5,1001,5):
print(i)
# mult_5()
def dojo_count():
for i in range (1,101):
if(i%10 == 0):
print("Coding Dojo")
elif(i%5 == 0):
print("Coding")
else:
print(i)
# dojo_count()
def huge():
sum = 0
for i in range(0,500001):
sum+=i
print(sum)
# huge()
def countdown_4():
for i in range(2018,0,-4):
print(i)
# countdown_4()
def flex_counter(lowNum,highNum,mult):
for i in range(lowNum,highNum+1):
if (i%mult == 0):
print(i)
# flex_counter(2,9,3)
|
19,816 | 4b87a8fdf16abc62b982c66f28161311bc5acea3 | #!/usr/bin/env python3
"""
Uniprot
=======
For a given protein file, fetches from Uniprot the accession, keywords and go
of each protein.
Usage:
$ ./uniprot proteins -o uniprot.csv
"""
import argparse
import csv
import sys
import re
import requests
from Bio import ExPASy
from Bio import SeqIO
from Bio import SwissProt
PROTEINS = 'data/homologous-proteins.txt'
def get_info(proteins):
info = []
for protein in proteins:
record = requests.get("http://www.uniprot.org/uniprot/" +
protein.rstrip() + ".xml")
try:
ac = re.findall(r'<accession>(.*?)</accession>', record.text)[0]
kw = ','.join(re.findall(r'<keyword id=".*">(.*)</.*>',
record.text))
go = ','.join(re.findall(r'<property type="term" value="(.*)"/>',
record.text))
info.append((ac, kw, go))
except Exception as e:
print(e)
return info
def write_table(csvfile, info):
table = csv.writer(csvfile, delimiter=',', quotechar='"',
quoting=csv.QUOTE_MINIMAL)
table.writerow(['Accession', 'Keywords', 'Go'])
for (ac, kw, go) in info:
table.writerow([ac, kw, go])
def main():
parser = argparse.ArgumentParser()
parser.add_argument("proteins", type=argparse.FileType('r'),
default=PROTEINS, nargs='?',
help="File containing a list of proteins")
parser.add_argument("-o", "--outfile", type=argparse.FileType('w'),
default=sys.stdout,
help="output file to write the table")
args = parser.parse_args()
info = get_info(args.proteins)
write_table(args.outfile, info)
if __name__ == "__main__":
main()
|
19,817 | 8480dba44b51c823ed242222ba0d5dd5f1839af1 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Nov 7 15:04:25 2016
@author: takanori.hasebe
"""
"""
このプログラムは
指定したa href='...'
の...の部分をとってくるプログラム
つまり判例のpdfのurlをダウンロードしてくる
プログラムをここでは書いている
"""
from bs4 import BeautifulSoup
import urllib.request
from urllib.request import urlopen
#家庭的
#html = urlopen('http://www.courts.go.jp/app/hanrei_jp/list1?filter%5BcourtName%5D=&filter%5BcourtType%5D=&filter%5BbranchName%5D=&filter%5BjikenGengo%5D=&filter%5BjikenYear%5D=&filter%5BjikenCode%5D=&filter%5BjikenNumber%5D=&filter%5BjudgeGengoFrom%5D=&filter%5BjudgeYearFrom%5D=&filter%5BjudgeMonthFrom%5D=&filter%5BjudgeDayFrom%5D=&filter%5BjudgeGengoTo%5D=&filter%5BjudgeYearTo%5D=&filter%5BjudgeMonthTo%5D=&filter%5BjudgeDayTo%5D=&filter%5Btext1%5D=%E5%AE%B6%E5%BA%AD%E7%9A%84&filter%5Btext2%5D=&filter%5Btext3%5D=&filter%5Btext4%5D=&filter%5Btext5%5D=&filter%5Btext6%5D=&filter%5Btext7%5D=&filter%5Btext8%5D=&filter%5Btext9%5D=&action_search=%E6%A4%9C%E7%B4%A2')
#運転
#html = urlopen('http://www.courts.go.jp/app/hanrei_jp/list5?filter%5BcourtName%5D=&filter%5BcourtType%5D=&filter%5BbranchName%5D=&filter%5BjikenGengo%5D=&filter%5BjikenYear%5D=&filter%5BjikenCode%5D=&filter%5BjikenNumber%5D=&filter%5BjudgeGengoFrom%5D=&filter%5BjudgeYearFrom%5D=&filter%5BjudgeMonthFrom%5D=&filter%5BjudgeDayFrom%5D=&filter%5BjudgeGengoTo%5D=&filter%5BjudgeYearTo%5D=&filter%5BjudgeMonthTo%5D=&filter%5BjudgeDayTo%5D=&filter%5BjikenName%5D=&filter%5Btext1%5D=%E9%81%8B%E8%BB%A2&filter%5Btext2%5D=&filter%5Btext3%5D=&filter%5Btext4%5D=&filter%5Btext5%5D=&filter%5Btext6%5D=&filter%5Btext7%5D=&filter%5Btext8%5D=&filter%5Btext9%5D=&action_search=%E6%A4%9C%E7%B4%A2')
#人工知能
#html = urlopen('http://www.courts.go.jp/app/hanrei_jp/list1?action_search=%E6%A4%9C%E7%B4%A2&filter%5BcourtName%5D=&filter%5BcourtType%5D=&filter%5BbranchName%5D=&filter%5BjikenGengo%5D=&filter%5BjikenYear%5D=&filter%5BjikenCode%5D=&filter%5BjikenNumber%5D=&filter%5BjudgeGengoFrom%5D=&filter%5BjudgeYearFrom%5D=&filter%5BjudgeMonthFrom%5D=&filter%5BjudgeDayFrom%5D=&filter%5BjudgeGengoTo%5D=&filter%5BjudgeYearTo%5D=&filter%5BjudgeMonthTo%5D=&filter%5BjudgeDayTo%5D=&filter%5Btext1%5D=%E4%BA%BA%E5%B7%A5%E7%9F%A5%E8%83%BD&filter%5Btext2%5D=&filter%5Btext3%5D=&filter%5Btext4%5D=&filter%5Btext5%5D=&filter%5Btext6%5D=&filter%5Btext7%5D=&filter%5Btext8%5D=&filter%5Btext9%5D=')
#交通
# html = urlopen('http://www.courts.go.jp/app/hanrei_jp/list1?page=351&sort=1&filter%5Btext1%5D=%E4%BA%A4%E9%80%9A')
# 交通事故かつ損害賠償
# html = urlopen('http://www.courts.go.jp/app/hanrei_jp/list1?page=1&sort=1&filter%5Btext1%5D=%E9%81%93%E8%B7%AF%E4%BA%A4%E9%80%9A%E6%B3%95&filter%5Btext4%5D=%E8%B3%A0%E5%84%9F%E8%AB%8B%E6%B1%82&filter%5Btext7%5D=%E4%BA%A4%E9%80%9A%E4%BA%8B%E6%95%85')
# url = 'http://www.courts.go.jp/app/hanrei_jp/list1?page=1&sort=1&filter%5Btext1%5D=%E9%81%93%E8%B7%AF%E4%BA%A4%E9%80%9A%E6%B3%95&filter%5Btext4%5D=%E8%B3%A0%E5%84%9F%E8%AB%8B%E6%B1%82&filter%5Btext7%5D=%E4%BA%A4%E9%80%9A%E4%BA%8B%E6%95%85'
# 懲役
# html = urlopen('http://www.courts.go.jp/app/hanrei_jp/list1?filter%5BcourtName%5D=&filter%5BcourtType%5D=&filter%5BbranchName%5D=&filter%5BjikenGengo%5D=&filter%5BjikenYear%5D=&filter%5BjikenCode%5D=&filter%5BjikenNumber%5D=&filter%5BjudgeDateMode%5D=2&filter%5BjudgeGengoFrom%5D=%E6%98%AD%E5%92%8C&filter%5BjudgeYearFrom%5D=40&filter%5BjudgeMonthFrom%5D=&filter%5BjudgeDayFrom%5D=&filter%5BjudgeGengoTo%5D=%E5%B9%B3%E6%88%90&filter%5BjudgeYearTo%5D=30&filter%5BjudgeMonthTo%5D=&filter%5BjudgeDayTo%5D=&filter%5Btext1%5D=%E9%81%93%E8%B7%AF%E4%BA%A4%E9%80%9A%E6%B3%95&filter%5Btext2%5D=&filter%5Btext3%5D=&filter%5Btext4%5D=%E6%87%B2%E5%BD%B9&filter%5Btext5%5D=%E7%A6%81%E9%8C%AE&filter%5Btext6%5D=%E7%95%99%E7%BD%AE&filter%5Btext7%5D=%E6%99%82%E9%80%9F&filter%5Btext8%5D=&filter%5Btext9%5D=&action_search=%E6%A4%9C%E7%B4%A2')
# url = 'http://www.courts.go.jp/app/hanrei_jp/list1?filter%5BcourtName%5D=&filter%5BcourtType%5D=&filter%5BbranchName%5D=&filter%5BjikenGengo%5D=&filter%5BjikenYear%5D=&filter%5BjikenCode%5D=&filter%5BjikenNumber%5D=&filter%5BjudgeDateMode%5D=2&filter%5BjudgeGengoFrom%5D=%E6%98%AD%E5%92%8C&filter%5BjudgeYearFrom%5D=40&filter%5BjudgeMonthFrom%5D=&filter%5BjudgeDayFrom%5D=&filter%5BjudgeGengoTo%5D=%E5%B9%B3%E6%88%90&filter%5BjudgeYearTo%5D=30&filter%5BjudgeMonthTo%5D=&filter%5BjudgeDayTo%5D=&filter%5Btext1%5D=%E9%81%93%E8%B7%AF%E4%BA%A4%E9%80%9A%E6%B3%95&filter%5Btext2%5D=&filter%5Btext3%5D=&filter%5Btext4%5D=%E6%87%B2%E5%BD%B9&filter%5Btext5%5D=%E7%A6%81%E9%8C%AE&filter%5Btext6%5D=%E7%95%99%E7%BD%AE&filter%5Btext7%5D=%E6%99%82%E9%80%9F&filter%5Btext8%5D=&filter%5Btext9%5D=&action_search=%E6%A4%9C%E7%B4%A2'
# 猶予
# html = urlopen('http://www.courts.go.jp/app/hanrei_jp/list1?filter%5BcourtName%5D=&filter%5BcourtType%5D=&filter%5BbranchName%5D=&filter%5BjikenGengo%5D=&filter%5BjikenYear%5D=&filter%5BjikenCode%5D=&filter%5BjikenNumber%5D=&filter%5BjudgeDateMode%5D=2&filter%5BjudgeGengoFrom%5D=%E6%98%AD%E5%92%8C&filter%5BjudgeYearFrom%5D=40&filter%5BjudgeMonthFrom%5D=&filter%5BjudgeDayFrom%5D=&filter%5BjudgeGengoTo%5D=%E5%B9%B3%E6%88%90&filter%5BjudgeYearTo%5D=30&filter%5BjudgeMonthTo%5D=&filter%5BjudgeDayTo%5D=&filter%5Btext1%5D=%E9%81%93%E8%B7%AF%E4%BA%A4%E9%80%9A%E6%B3%95&filter%5Btext2%5D=&filter%5Btext3%5D=&filter%5Btext4%5D=%E7%8C%B6%E4%BA%88&filter%5Btext5%5D=&filter%5Btext6%5D=&filter%5Btext7%5D=%E6%99%82%E9%80%9F&filter%5Btext8%5D=&filter%5Btext9%5D=&action_search=%E6%A4%9C%E7%B4%A2')
# url = 'http://www.courts.go.jp/app/hanrei_jp/list1?filter%5BcourtName%5D=&filter%5BcourtType%5D=&filter%5BbranchName%5D=&filter%5BjikenGengo%5D=&filter%5BjikenYear%5D=&filter%5BjikenCode%5D=&filter%5BjikenNumber%5D=&filter%5BjudgeDateMode%5D=2&filter%5BjudgeGengoFrom%5D=%E6%98%AD%E5%92%8C&filter%5BjudgeYearFrom%5D=40&filter%5BjudgeMonthFrom%5D=&filter%5BjudgeDayFrom%5D=&filter%5BjudgeGengoTo%5D=%E5%B9%B3%E6%88%90&filter%5BjudgeYearTo%5D=30&filter%5BjudgeMonthTo%5D=&filter%5BjudgeDayTo%5D=&filter%5Btext1%5D=%E9%81%93%E8%B7%AF%E4%BA%A4%E9%80%9A%E6%B3%95&filter%5Btext2%5D=&filter%5Btext3%5D=&filter%5Btext4%5D=%E7%8C%B6%E4%BA%88&filter%5Btext5%5D=&filter%5Btext6%5D=&filter%5Btext7%5D=%E6%99%82%E9%80%9F&filter%5Btext8%5D=&filter%5Btext9%5D=&action_search=%E6%A4%9C%E7%B4%A2'
# 損害賠償
# html = urlopen('http://www.courts.go.jp/app/hanrei_jp/list1?filter%5BcourtName%5D=&filter%5BcourtType%5D=&filter%5BbranchName%5D=&filter%5BjikenGengo%5D=&filter%5BjikenYear%5D=&filter%5BjikenCode%5D=&filter%5BjikenNumber%5D=&filter%5BjudgeDateMode%5D=2&filter%5BjudgeGengoFrom%5D=%E6%98%AD%E5%92%8C&filter%5BjudgeYearFrom%5D=40&filter%5BjudgeMonthFrom%5D=&filter%5BjudgeDayFrom%5D=&filter%5BjudgeGengoTo%5D=%E5%B9%B3%E6%88%90&filter%5BjudgeYearTo%5D=30&filter%5BjudgeMonthTo%5D=&filter%5BjudgeDayTo%5D=&filter%5Btext1%5D=%E9%81%93%E8%B7%AF%E4%BA%A4%E9%80%9A%E6%B3%95&filter%5Btext2%5D=&filter%5Btext3%5D=&filter%5Btext4%5D=%E6%90%8D%E5%AE%B3%E8%B3%A0%E5%84%9F%E8%AB%8B%E6%B1%82&filter%5Btext5%5D=%E6%94%AF%E6%89%95%E3%81%88&filter%5Btext6%5D=&filter%5Btext7%5D=%E6%99%82%E9%80%9F&filter%5Btext8%5D=&filter%5Btext9%5D=&action_search=%E6%A4%9C%E7%B4%A2')
# url = 'http://www.courts.go.jp/app/hanrei_jp/list1?filter%5BcourtName%5D=&filter%5BcourtType%5D=&filter%5BbranchName%5D=&filter%5BjikenGengo%5D=&filter%5BjikenYear%5D=&filter%5BjikenCode%5D=&filter%5BjikenNumber%5D=&filter%5BjudgeDateMode%5D=2&filter%5BjudgeGengoFrom%5D=%E6%98%AD%E5%92%8C&filter%5BjudgeYearFrom%5D=40&filter%5BjudgeMonthFrom%5D=&filter%5BjudgeDayFrom%5D=&filter%5BjudgeGengoTo%5D=%E5%B9%B3%E6%88%90&filter%5BjudgeYearTo%5D=30&filter%5BjudgeMonthTo%5D=&filter%5BjudgeDayTo%5D=&filter%5Btext1%5D=%E9%81%93%E8%B7%AF%E4%BA%A4%E9%80%9A%E6%B3%95&filter%5Btext2%5D=&filter%5Btext3%5D=&filter%5Btext4%5D=%E6%90%8D%E5%AE%B3%E8%B3%A0%E5%84%9F%E8%AB%8B%E6%B1%82&filter%5Btext5%5D=%E6%94%AF%E6%89%95%E3%81%88&filter%5Btext6%5D=&filter%5Btext7%5D=%E6%99%82%E9%80%9F&filter%5Btext8%5D=&filter%5Btext9%5D=&action_search=%E6%A4%9C%E7%B4%A2'
# 無罪
html = urlopen('http://www.courts.go.jp/app/hanrei_jp/list1?filter%5BcourtName%5D=&filter%5BcourtType%5D=&filter%5BbranchName%5D=&filter%5BjikenGengo%5D=&filter%5BjikenYear%5D=&filter%5BjikenCode%5D=&filter%5BjikenNumber%5D=&filter%5BjudgeDateMode%5D=2&filter%5BjudgeGengoFrom%5D=%E6%98%AD%E5%92%8C&filter%5BjudgeYearFrom%5D=40&filter%5BjudgeMonthFrom%5D=&filter%5BjudgeDayFrom%5D=&filter%5BjudgeGengoTo%5D=%E5%B9%B3%E6%88%90&filter%5BjudgeYearTo%5D=30&filter%5BjudgeMonthTo%5D=&filter%5BjudgeDayTo%5D=&filter%5Btext1%5D=%E9%81%93%E8%B7%AF%E4%BA%A4%E9%80%9A%E6%B3%95&filter%5Btext2%5D=&filter%5Btext3%5D=&filter%5Btext4%5D=%E8%A2%AB%E5%91%8A%E4%BA%BA%E3%81%AF%E7%84%A1%E7%BD%AA&filter%5Btext5%5D=&filter%5Btext6%5D=&filter%5Btext7%5D=%E6%99%82%E9%80%9F&filter%5Btext8%5D=&filter%5Btext9%5D=&action_search=%E6%A4%9C%E7%B4%A2')
url = 'http://www.courts.go.jp/app/hanrei_jp/list1?filter%5BcourtName%5D=&filter%5BcourtType%5D=&filter%5BbranchName%5D=&filter%5BjikenGengo%5D=&filter%5BjikenYear%5D=&filter%5BjikenCode%5D=&filter%5BjikenNumber%5D=&filter%5BjudgeDateMode%5D=2&filter%5BjudgeGengoFrom%5D=%E6%98%AD%E5%92%8C&filter%5BjudgeYearFrom%5D=40&filter%5BjudgeMonthFrom%5D=&filter%5BjudgeDayFrom%5D=&filter%5BjudgeGengoTo%5D=%E5%B9%B3%E6%88%90&filter%5BjudgeYearTo%5D=30&filter%5BjudgeMonthTo%5D=&filter%5BjudgeDayTo%5D=&filter%5Btext1%5D=%E9%81%93%E8%B7%AF%E4%BA%A4%E9%80%9A%E6%B3%95&filter%5Btext2%5D=&filter%5Btext3%5D=&filter%5Btext4%5D=%E8%A2%AB%E5%91%8A%E4%BA%BA%E3%81%AF%E7%84%A1%E7%BD%AA&filter%5Btext5%5D=&filter%5Btext6%5D=&filter%5Btext7%5D=%E6%99%82%E9%80%9F&filter%5Btext8%5D=&filter%5Btext9%5D=&action_search=%E6%A4%9C%E7%B4%A2'
cnt = 0
n = 0
#最後のページまで
while True:
print(str(n + 1)+'ページ目')
print(url)
#web page全体のhtmlの構造が入っている
bsObj = BeautifulSoup(html.read(), 'lxml')
#web page全体から一番最初の<a href=''>を取得
result = bsObj.a['href']
#web page全体の全ての<a href=''>を取得
bsObj.find_all('a') # aタグ全てを取得。リストで返ってくる
#結果をリスト形式で保存する
hrefResult = list()
for link in bsObj.find_all('a'):
#print(link.get('href')) # 全てのリンク先を表示
hrefResult.append(link.get('href'))
#pdfをダウンロードするリンクを抽出
result = list()
for i in range(len(hrefResult)):
if hrefResult[i] == None:
pass
elif '/app/files' in hrefResult[i]:
#print(i)
result.append(hrefResult[i])
#print('len(result): '+str(len(result)))
#print(result)
#len(result)
#ここでurlで指定さてた判例データのpdfをとってきている
topStr = 'http://www.courts.go.jp/'
for i in range(len(result)):
print('cnt: '+str(cnt))
url = topStr + result[i]
#print(url)
urllib.request.urlretrieve(url, 'notguilty/notguilty'+str(cnt)+'.pdf')
cnt += 1
topStr = 'http://www.courts.go.jp'
print('cnt:: '+str(cnt))
"""""""""""""""""""""""""""""""""""
###ここでダウンロードしたい件数を必ず指定する
"""""""""""""""""""""""""""""""""""
if cnt == int(35):
break
elif '/app/hanrei_jp/list1' in hrefResult[-3]:
url = topStr + hrefResult[-3]
#print(url)
elif '/app/hanrei_jp/list5' in hrefResult[-3]:
url = topStr + hrefResult[-3]
#print(url)
elif '/app/hanrei_jp/list0' in hrefResult[-3]:
url = topStr + hrefResult[-3]
#print(url)
else:
break
html = urlopen(url)
n += 1
print('クローリング終了')
|
19,818 | 9aad67b6084451c369ab8b01b4ad73efd223f2d4 | #!/usr/bin/python
import os
import sys
import bopy.gen3pp.toaststudies as tstudy
if __name__ == '__main__':
try:
ddir = sys.argv[1]
meas = sys.argv[2]
cfgfile = sys.argv[3]
w = int(sys.argv[4])
kind = sys.argv[5]
except:
print >>sys.stderr, 'Usage: {0} <ddir> <meas> <cfgfile> <w> <kind>'.format(os.path.basename(sys.argv[0]))
sys.exit(0)
print meas, w, kind
tstudy.tsfit(ddir, meas, cfgfile, w, kind)
|
19,819 | cd71c87e6c950b866f06b31cfe60424266700013 | import logging
from modifying_nouns_m import m_funcs
from modifying_nouns_f import f_funcs
from modifying_nouns_n import n_funcs
funcs = {
'm': m_funcs,
'f': f_funcs,
'n': n_funcs
}
def q_0(animacy, **b):
inf = [[], []]
inf[0].append(b['основа'])
inf[0].append(b['основа'])
inf[0].append(b['основа'])
inf[0].append(b['основа'])
inf[0].append(b['основа'])
inf[0].append(b['основа'])
inf[1].append(b['основа'])
inf[1].append(b['основа'])
inf[1].append(b['основа'])
inf[1].append(b['основа'])
inf[1].append(b['основа'])
inf[1].append(b['основа'])
return inf
funcs['m']['0'] = q_0
funcs['f']['0'] = q_0
funcs['n']['0'] = q_0
class FailCollector(object):
def __init__(self):
self.fails = {}
def append(self, fail):
if fail in self.fails:
self.fails[fail] += 1
else:
self.fails[fail] = 1
def get(self, descending = True):
return self.fails.items()
def getSorted(self, descending = True):
return sorted(self.fails.items(), key = lambda x: x[1], reverse = descending)
failCollector = FailCollector()
def safeRun(word, f, *args, **kwargs):
try:
return f(*args, **kwargs)
except KeyError:
logging.error("not enough arguments to generate forms for {}: args={}, kwargs={}".format(word, args, kwargs))
return None
def getWordForms(word, gender, animacy, inflection, **b):
if gender in funcs:
if inflection in funcs[gender]:
# whoo-hoo!
return safeRun(word, funcs[gender][inflection], animacy, **b)
elif inflection and inflection.endswith('-') and inflection[:-1] in funcs[gender]:
inf = safeRun(word, funcs[gender][inflection[:-1]], animacy, **b)
if inf:
inf[1] = [None, None, None, None, None, None]
return inf
elif inflection and inflection.startswith('(') and inflection.endswith(')') and inflection[1:-1] in funcs[gender]:
inf = safeRun(word, funcs[gender][inflection[1:-1]], animacy, **b)
if inf:
inf[0] = [None, None, None, None, None, None]
return inf
else:
failCollector.append((gender, animacy, inflection))
logging.error("no function for {}: [{},{},{}]".format(word, gender, animacy, inflection))
return None
else:
failCollector.append((gender, animacy, inflection))
logging.error("bad gender for {}: [{},{},{}]".format(word, gender, animacy, inflection))
return None
|
19,820 | 3fcca367ded90d76b2f170b731f24c35c612f8d8 | import os
import argparse
from tqdm import tqdm
from loguru import logger
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torch.backends.cudnn as cudnn
from torch.utils.data import IterableDataset, Dataset, DataLoader
import dgl.function as fn
import sklearn.linear_model as lm
import sklearn.metrics as skm
from dataloader import build_cora_dataset, NodesSet, NodesGraphCollactor
from model import SAGENet
class CrossEntropyLoss(nn.Module):
def forward(self, block_outputs, pos_graph, neg_graph):
with pos_graph.local_scope():
pos_graph.ndata['h'] = block_outputs
pos_graph.apply_edges(fn.u_dot_v('h', 'h', 'score'))
pos_score = pos_graph.edata['score']
with neg_graph.local_scope():
neg_graph.ndata['h'] = block_outputs
neg_graph.apply_edges(fn.u_dot_v('h', 'h', 'score'))
neg_score = neg_graph.edata['score']
score = torch.cat([pos_score, neg_score])
label = torch.cat([torch.ones_like(pos_score), torch.zeros_like(neg_score)]).long()
loss = F.binary_cross_entropy_with_logits(score, label.float())
return loss
def load_subtensor(nfeat, seeds, device='cpu'):
"""
Extracts features and labels for a subset of nodes
"""
# logger.info(len(seeds))
seeds_feats = nfeat[list(seeds)].to(device)
# batch_labels = labels[input_nodes].to(device)
return seeds_feats
def compute_acc_unsupervised(emb, graph):
"""
Compute the accuracy of prediction given the labels.
"""
train_mask = graph.ndata['train_mask']
test_mask = graph.ndata['test_mask']
val_mask = graph.ndata['val_mask']
train_nids = torch.LongTensor(np.nonzero(train_mask)).squeeze().cpu().numpy()
val_nids = torch.LongTensor(np.nonzero(val_mask)).squeeze().cpu().numpy()
test_nids = torch.LongTensor(np.nonzero(test_mask)).squeeze().cpu().numpy()
emb = emb.cpu().detach().numpy()
labels = graph.ndata['label'].cpu().numpy()
train_labels = labels[train_nids]
val_labels = labels[val_nids]
test_labels = labels[test_nids]
emb = (emb - emb.mean(0, keepdims=True)) / emb.std(0, keepdims=True)
lr = lm.LogisticRegression(multi_class='multinomial', max_iter=1000)
lr.fit(emb[train_nids], train_labels)
pred = lr.predict(emb)
f1_micro_train = skm.f1_score(train_labels, pred[train_nids], average='micro')
f1_micro_eval = skm.f1_score(val_labels, pred[val_nids], average='micro')
f1_micro_test = skm.f1_score(test_labels, pred[test_nids], average='micro')
return f1_micro_train, f1_micro_eval, f1_micro_test
def train(args, graph):
# os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
cudnn.benchmark = True
graph.to(device)
features = graph.ndata['feat']
in_feats = features.shape[1]
n_classes = 7
collator = NodesGraphCollactor(graph, neighbors_every_layer=args.neighbors_every_layer)
batch_sampler = NodesSet(graph)
data_loader = DataLoader(
batch_sampler,
batch_size=512,
shuffle=True,
num_workers=6,
collate_fn=collator
)
# should aggregate while testing.
test_collator = NodesGraphCollactor(graph, neighbors_every_layer=[10000])
test_data_loader = DataLoader(
batch_sampler,
batch_size=10000,
shuffle=False,
num_workers=6,
collate_fn=test_collator
)
# Define model and optimizer
model = SAGENet(in_feats, args.num_hidden, n_classes,
args.num_layers, F.relu, args.dropout)
model.cuda()
# loss_fcn = nn.CrossEntropyLoss()
loss_fcn = CrossEntropyLoss()
optimizer = optim.AdamW(model.parameters(), lr=args.lr, betas=(0.9, 0.999), eps=1e-08,
weight_decay=0.1, amsgrad=False)
top_acc, top_f1 = 0, 0
for epoch in range(args.num_epochs):
acc_cnt = 0
for step, (pos_graph, neg_graph, blocks, all_seeds) in enumerate(data_loader):
# pos_nodes, neg_nodes_batch = collator.sample_pos_neg_nodes(batch)
# logger.info(len(batch), len(all_seeds))
# logger.info(len(pos_nodes), len(neg_nodes_batch), torch.tensor(neg_nodes_batch).shape)
feats = load_subtensor(features, all_seeds, device=device)
# pos_feats = load_subtensor(features, pos_nodes, device=device)
# neg_feats = load_subtensor(features, neg_nodes_batch, device=device)
# logger.info(heads_feats.shape, pos_feats.shape, neg_feats.shape)
blocks = [b.to(device) for b in blocks]
pos_graph = pos_graph.to(device)
neg_graph = neg_graph.to(device)
bacth_pred = model(blocks, feats)
loss = loss_fcn(bacth_pred, pos_graph, neg_graph)
optimizer.zero_grad()
loss.backward()
optimizer.step()
# batch_acc_cnt = (torch.argmax(bacth_pred, dim=1) == batch_labels.long()).float().sum()
# acc_cnt += int(batch_acc_cnt)
logger.info(f"Train Epoch:{epoch}, Loss:{loss}")
# evaluation
for step, (pos_graph, neg_graph, blocks, all_seeds) in enumerate(test_data_loader):
feats = load_subtensor(features, all_seeds, device=device)
blocks = [b.to(device) for b in blocks]
bacth_pred = model(blocks, feats)
f1_micro_train, f1_micro_eval, f1_micro_test = compute_acc_unsupervised(bacth_pred, graph)
if top_f1 < f1_micro_test:
top_f1 = f1_micro_test
logger.info(
f" train f1:{f1_micro_train}, Val micro F1: {f1_micro_eval}, Test micro F1:{f1_micro_test}, TOP micro F1:{top_f1}")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="parameter set")
parser.add_argument('--num_epochs', type=int, default=64)
parser.add_argument('--num_hidden', type=int, default=256)
parser.add_argument('--dropout', type=float, default=0.0)
parser.add_argument('--lr', type=float, default=0.001)
parser.add_argument('--num_layers', type=int, default=2)
# TODO: multiple negative nodes
parser.add_argument('--neighbors_every_layer', type=list, default=[10], help="or [10, 5]")
parser.add_argument("--gpu", type=str, default='0',
help="gpu or cpu")
args = parser.parse_args()
graph = build_cora_dataset(add_symmetric_edges=True, add_self_loop=True)
train(args, graph)
|
19,821 | ce5d98ebe073ad62038446818cef1c733c65e53f | """
Evolution of ByteBeat Algorithms
Charles Martin
November 2016
"""
from __future__ import print_function
from deap import base, creator, gp, tools, algorithms
import operator
import numpy as np
from scipy import signal
import random
import pywt
import matplotlib.pyplot as plt
import subprocess
SOX_COMMAND = "sox -r 8000 -c 1 -t u8"
LAME_COMMAND = "lame -V 0"
## MIR
# simple peak detection
## borrowed from bpm_detection.py
## https://github.com/scaperot/the-BPM-detector-python
def peak_detect(data):
max_val = np.amax(abs(data))
peak_ndx = np.where(data==max_val)
if len(peak_ndx[0]) == 0: #if nothing found then the max must be negative
peak_ndx = np.where(data==-max_val)
return peak_ndx
## borrowed from bpm_detection.py
## https://github.com/scaperot/the-BPM-detector-python
def bpm_detector(data,fs):
cA = []
cD = []
correl = []
cD_sum = []
levels = 4
max_decimation = 2**(levels-1);
min_ndx = 60./ 220 * (fs/max_decimation)
max_ndx = 60./ 40 * (fs/max_decimation)
for loop in range(0,levels):
cD = []
# 1) DWT
if loop == 0:
[cA,cD] = pywt.dwt(data,'db4');
cD_minlen = len(cD)/max_decimation+1;
cD_sum = np.zeros(cD_minlen);
else:
[cA,cD] = pywt.dwt(cA,'db4');
# 2) Filter
cD = signal.lfilter([0.01],[1 -0.99],cD);
# 4) Subtractargs.filename out the mean.
# 5) Decimate for reconstruction later.
cD = abs(cD[::(2**(levels-loop-1))]);
cD = cD - np.mean(cD);
# 6) Recombine the signal before ACF
# essentially, each level I concatenate
# the detail coefs (i.e. the HPF values)
# to the beginning of the array
cD_sum = cD[0:cD_minlen] + cD_sum;
if [b for b in cA if b != 0.0] == []:
return 0.0,0.0 ## no data
# adding in the approximate data as well...
cA = signal.lfilter([0.01],[1 -0.99],cA);
cA = abs(cA);
cA = cA - np.mean(cA);
cD_sum = cA[0:cD_minlen] + cD_sum;
# ACF
correl = np.correlate(cD_sum,cD_sum,'full')
midpoint = len(correl) / 2
correl_midpoint_tmp = correl[midpoint:]
peak_ndx = peak_detect(correl_midpoint_tmp[min_ndx:max_ndx]);
if len(peak_ndx) > 1:
return 0.0,0.0 ## no data
peak_ndx_adjusted = peak_ndx[0]+min_ndx;
bpm = 60./ peak_ndx_adjusted * (fs/max_decimation)
return bpm,correl
def playback_expr(e):
"""
prints the byte beat as chars to standard out forever.
"""
t = 1
while True:
print(chr(int(
e(t)
) % 256 ), end="")
t += 1
def playback_expr_count(e):
"""
Prints the first 50000 chars of bytebeat to standard out.
"""
for t in range(50000):
print(chr(int(
e(t+1)
) % 256 ), end="")
t += 1
def playback_char(e,t):
"""
Evaluate a bytebeat e at timestep t+1
"""
return (int(e(t+1)) % 256)
#@profile
def gen_beat_output(e):
"""
Returns the first 70000 steps for a bytebeat e
"""
return [playback_char(e,t) for t in range(70000)]
"""
Setup the Evolutionary Programming system
"""
def beat_division(a,b):
"""
Integer division protected that returns 0 for n/0.
"""
if b == 0:
return 0
return a // b
pset = gp.PrimitiveSet("MAIN", 1)
pset.addPrimitive(operator.add, 2)
pset.addPrimitive(operator.mod, 2)
pset.addPrimitive(operator.mul, 2)
pset.addPrimitive(operator.rshift,2)
#pset.addPrimitive(operator.lshift,2)
pset.addPrimitive(operator.or_,2)
pset.addPrimitive(operator.and_,2)
pset.addPrimitive(operator.xor,2)
pset.addPrimitive(operator.sub,2)
pset.addPrimitive(beat_division,2)
pset.addTerminal(1)
pset.addTerminal(2)
pset.addTerminal(3)
pset.addTerminal(5)
pset.addTerminal(7)
pset.addTerminal(11)
pset.addTerminal(13)
pset.renameArguments(ARG0='t')
#@profile
def eval_beat(individual):
"""
Evaluation and fitness function used for evolution.
"""
# compile the individual
routine = gp.compile(individual, pset)
# generate some test output
try:
test_output = gen_beat_output(routine)
except:
return 0.0,
## do some stats on the beat
sd = np.std(np.array(test_output))
bpm, correl = bpm_detector(test_output,24000)
bpm_score = 1 - abs((bpm/120.0)-1)
sd_score = sd / 128.0
del test_output
# return the score
return float(bpm_score * sd_score),
def output_beat_to_file(file_name, e):
"""
Output the bytebeat to a file.
Converts to wav with sox
"""
print("Writing to file:", file_name)
routine = gp.compile(e,pset)
with open(file_name+".raw",'w') as f:
for t in range(200000):
f.write(chr(int(routine(t+1))%256))
# Now convert to wav
subprocess.call(SOX_COMMAND + " " + file_name + ".raw" + " " + file_name + ".wav", shell=True)
subprocess.call(LAME_COMMAND + " " + file_name + ".wav", shell=True)
def output_beat_to_std_out(e):
routine = gp.compile(e,pset)
for t in range(50000):
print((chr(int(routine(t+1))%256)), end="")
"""
Visualisation
"""
def bitlist(n):
"""
Returns a list of bits for a char.
"""
return [n >> i & 1 for i in range(7,-1,-1)]
def print_image(indiv,name):
"""
Prints out a typical bytebeat image from an individual with a given filename.
"""
routine = gp.compile(indiv,pset)
output = gen_beat_output(routine)
bits = np.array(map(bitlist,output)[0:24000]).transpose()
plt.style.use('classic')
plt.imshow(bits,interpolation='nearest',aspect='auto',cmap=plt.get_cmap('Greys'))
plt.savefig(name+".png",dpi=150)
"""
Setup the GP evolution
"""
creator.create("FitnessMax", base.Fitness, weights=(1.0,))
creator.create("Individual", gp.PrimitiveTree, fitness=creator.FitnessMax)
toolbox = base.Toolbox()
# Attribute generator
toolbox.register("expr_init", gp.genFull, pset=pset, min_=0, max_=2)
# Structure initializers
toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.expr_init)
toolbox.register("population", tools.initRepeat, list, toolbox.individual)
# toolbox setup
toolbox.register("evaluate", eval_beat)
toolbox.register("select", tools.selTournament, tournsize=4)
toolbox.register("mate", gp.cxOnePoint)
toolbox.register("expr_mut", gp.genFull, min_=0, max_=2)
toolbox.register("mutate", gp.mutUniform, expr=toolbox.expr_mut, pset=pset)
def print_pop(p):
for index, indiv in enumerate(p.items):
output_beat_to_filed("individual"+str(index),indiv) # output to files.
# convert to wav?
def main():
#random.seed(1024)
#random.seed(318)
print("Setting up Evolution of bytebeats!")
pop = toolbox.population(n=20)
hof = tools.HallOfFame(3)
stats = tools.Statistics(lambda ind: ind.fitness.values)
stats.register("avg", np.mean)
stats.register("std", np.std)
stats.register("min", np.min)
stats.register("max", np.max)
print("Starting EA Simple")
algorithms.eaSimple(pop, toolbox, 0.5, 0.2, 100, stats, halloffame=hof)
print("Finished Evolution, now saving hall of fame.")
for index, indiv in enumerate(hof.items):
title = "halloffame-" + str(index)
output_beat_to_file(title,indiv) # output to files!
print_image(indiv,title)
#output_beat_to_std_out(indiv) # output to standard output!
#print("Done saving hall of fame.")
return pop, hof, stats
if __name__ == "__main__":
main()
|
19,822 | fd2b5d776ddb4b661afefb19fdd1abcf85bd9fc5 | import string
class Solution:
def titleToNumber(self, s: str) -> int:
store = {
letter: index + 1
for index, letter in enumerate(string.ascii_uppercase)
}
s = s[::-1]
result = 0
for index, letter in enumerate(s):
power = 26 ** index if index else 1
result += store[letter] * power
return result
|
19,823 | 96c90d30096df1a80c747a69b78f966e5da122f7 | """
Copyright 2018 IBM Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
import ast
from ..parser.stanListener import stanListener
import astor
import astpretty
import torch
import ipdb
if __name__ is not None and "." in __name__:
from .ir import *
from .ir2python import *
else:
assert False
def gatherChildrenIRList(ctx):
irs = []
if ctx.children is not None:
for child in ctx.children:
if hasattr(child, 'ir') and child.ir is not None:
irs += child.ir
return irs
def gatherChildrenIR(ctx):
irs = []
if ctx.children is not None:
for child in ctx.children:
if hasattr(child, 'ir') and child.ir is not None:
irs.append(child.ir)
return irs
def is_active(f):
return f is not None and f() is not None
def idxFromExprList(exprList):
if len(exprList) == 1:
return exprList[0]
else:
return Tuple(
exprs = exprList)
class StanToIR(stanListener):
def __init__(self):
self.networks = None
self._to_model = []
def exitVariableDecl(self, ctx):
vid = ctx.IDENTIFIER().getText()
dims = ctx.arrayDims().ir if ctx.arrayDims() is not None else None
type_ = ctx.type_().ir
dims = None
if ctx.arrayDims() is not None and type_.dim is not None:
# Avi: to check
dims = Tuple(exprs = [ctx.arrayDims().ir, type_.dim])
elif ctx.arrayDims() is not None:
dims = ctx.arrayDims().ir
elif type_.dim is not None:
dims = type_.dim
init = ctx.expression().ir if is_active(ctx.expression) else None
ctx.ir = VariableDecl(
id = vid,
dim = dims,
type_ = type_,
init = init)
def exitType_(self, ctx):
ptype = ctx.primitiveType()
if ctx.primitiveType() is not None:
type_ = ctx.primitiveType().getText()
elif ctx.vectorType() is not None:
# TODO: differentiate row_vector
# type_ = ctx.vectorType().getText()
type_ = 'vector'
elif ctx.matrixType() is not None:
#type_ = ctx.matrixType().getText()
type_ = 'matrix'
else:
assert False, f"unknown type: {ptype.getText()}"
constraints = ctx.typeConstraints().ir if ctx.typeConstraints() else None
is_array = ctx.isArray is not None
dims = ctx.arrayDims().ir if ctx.arrayDims() is not None else None
ctx.ir = Type_(type_ = type_, constraints = constraints, is_array = is_array, dim = dims)
def exitTypeConstraints(self, ctx):
constraints_list = ctx.typeConstraintList()
if constraints_list:
ctx.ir = [x.ir for x in constraints_list.typeConstraint()]
def exitTypeConstraint(self, ctx):
id_ = ctx.IDENTIFIER()
if id_.getText() == 'lower':
sort = 'lower'
elif id_.getText() == 'upper':
sort = 'upper'
else:
assert False, f'unknown constraint: {id_.getText()}'
constant = ctx.atom().ir
constraint = Constraint(sort = sort, value = constant)
ctx.ir = constraint
def exitInferredArrayShape(self, ctx):
ctx.ir = AnonymousShapeProperty()
def exitArrayDim(self, ctx):
if is_active(ctx.expression):
ctx.ir = ctx.expression().ir
elif is_active(ctx.inferredArrayShape):
ctx.ir = ctx.inferredArrayShape().ir
def exitInferredTensorShape(self, ctx):
ctx.ir = AnonymousShapeProperty()
def exitArrayDimCommaList(self, ctx):
ctx.ir = gatherChildrenIR(ctx)
def exitArrayDims(self, ctx):
cl = ctx.arrayDimCommaList()
elements = cl.ir
if len(elements) == 1:
ctx.ir = elements[0]
else:
ctx.ir = Tuple(exprs = elements)
def exitParameterDecl(self, ctx):
if is_active(ctx.variableDecl):
ctx.ir = ctx.variableDecl().ir
else: # Could be more defensive
pass
def exitParameterDeclsOpt(self, ctx):
ctx.ir = gatherChildrenIR(ctx)
def exitVariableDeclsOpt(self, ctx):
ctx.ir = gatherChildrenIR(ctx)
# Vector, matrix and array expressions (section 4.2)
def exitConstant(self, ctx):
if ctx.IntegerLiteral() is not None:
f = int
elif ctx.RealLiteral() is not None:
f = float
else:
assert False, "Unknonwn literal"
ctx.ir = Constant(value = f(ctx.getText()))
def exitVariable(self, ctx):
ctx.ir = Variable(id = ctx.getText())
def exitIndexExpression(self, ctx):
if is_active(ctx.expressionCommaListOpt):
ctx.ir = ctx.expressionCommaListOpt().ir
else:
assert False, "Unknown index expression:{}.".format(ctx.getText())
def exitAtom(self, ctx):
if is_active(ctx.constant):
ctx.ir = ctx.constant().ir
elif is_active(ctx.variable):
ctx.ir = ctx.variable().ir
elif is_active(ctx.expression):
ctx.ir = ctx.expression().ir
elif is_active(ctx.atom) and is_active(ctx.indexExpression):
name = ctx.atom().ir
index = ctx.indexExpression().ir
ctx.ir = Subscript(id = name, index = index)
elif is_active(ctx.netLValue):
ctx.ir = ctx.netLValue().ir
elif is_active(ctx.variableProperty):
ctx.ir = ctx.variableProperty().ir
else:
assert False, "Not yet implemented atom: {}".format(ctx.getText())
def exitExpression(self, ctx):
if is_active(ctx.atom):
ctx.ir = ctx.atom().ir
return
if is_active(ctx.callExpr):
ctx.ir = ctx.callExpr().ir
return
if ctx.TRANSPOSE_OP() is not None:
assert False, "Not yet implemented"
elif ctx.e1 is not None and ctx.e2 is not None:
self._exitBinaryExpression(ctx)
elif ctx.e1 is not None:
if is_active(ctx.PLUS_OP):
op = UPlus()
elif is_active(ctx.MINUS_OP):
op = UMinus()
elif is_active(ctx.NOT_OP):
op = UNot()
else:
assert False, f'Unknown operator: {ctx.getText()}'
ctx.ir = UnaryOperator(
value = ctx.e1.ir,
op = op)
else:
text = ctx.getText()
assert False, "Not yet implemented: {}".format(text)
def _exitBinaryExpression(self, ctx):
left = ctx.e1.ir
right = ctx.e2.ir
if is_active(ctx.LEFT_DIV_OP):
assert False, "Not yet implemented"
mapping = {
ctx.PLUS_OP : Plus,
ctx.MINUS_OP : Minus,
ctx.POW_OP : Pow,
ctx.OR_OP : Or,
ctx.AND_OP : And,
ctx.GT_OP : GT,
ctx.LT_OP : LT,
ctx.GE_OP : GE,
ctx.LE_OP : LE,
ctx.EQ_OP : EQ,
ctx.DOT_DIV_OP : DotDiv,
ctx.DIV_OP : Div,
ctx.DOT_MULT_OP : DotMult,
ctx.MULT_OP : Mult}
op = None
for src in mapping:
if is_active(src):
op = mapping[src]()
break
if op is not None:
ctx.ir = BinaryOperator(left = left,
right = right,
op = op)
elif ctx.e3 is not None:
false = ctx.e3.ir
ctx.ir = ConditionalStmt(test = left,
true = right,
false = false)
else:
text = ctx.getText()
assert False, "Not yet implemented: {}".format(text)
def exitExpressionCommaList(self, ctx):
## TODO: check wheter we want to build a list of statements
## or a List node
ctx.ir = gatherChildrenIR(ctx)
def exitExpressionCommaListOpt(self, ctx):
ir = gatherChildrenIRList(ctx)
if len(ir) == 1:
ctx.ir = ir[0]
else:
ctx.ir = Tuple(exprs = ir)
# Statements (section 5)
# Assignment (section 5.1)
def exitLvalue(self, ctx):
id = Variable(ctx.IDENTIFIER().getText())
if ctx.expressionCommaList() is not None:
idx = idxFromExprList(ctx.expressionCommaList().ir)
ctx.ir = Subscript(id = id, index = idx)
else:
ctx.ir = id
def exitAssignStmt(self, ctx):
lvalue = ctx.lvalue().ir
expr = ctx.expression().ir
if ctx.op is not None:
op = None
if ctx.PLUS_EQ() is not None:
op = Plus()
if ctx.MINUS_EQ() is not None:
op = Minus()
if ctx.MULT_EQ() is not None:
op = Mult()
if ctx.DOT_MULT_EQ() is not None:
op = DotMult()
if ctx.DIV_EQ() is not None:
op = Div()
if ctx.DOT_DIV_EQ() is not None:
op = DotDiv()
if op:
expr = BinaryOperator(left = lvalue, op = op, right = expr)
ctx.ir = AssignStmt(
target = lvalue,
value = expr)
# Sampling (section 5.3)
def exitLvalueSampling(self, ctx):
if is_active(ctx.lvalue):
ctx.ir = ctx.lvalue().ir
elif is_active(ctx.expression):
ctx.ir = ctx.expression().ir
elif is_active(ctx.netLValue):
ctx.ir = ctx.netLValue().ir
else:
assert False
def exitNetParam(self, ctx):
ids = [ctx.IDENTIFIER().getText()]
if ctx.netParam():
ir = ctx.netParam()[0].ir
ids.extend(ir)
ctx.ir = ids
def exitNetworksBlock(self, ctx):
ops = ctx.netVariableDeclsOpt()
decls = [x.ir for x in ops.netVariableDecl()]
nets = NetworksBlock(decls = decls)
self.networks = nets
ctx.ir = nets
def exitNetClass(self, ctx):
ctx.ir = ctx.getText()
exitNetName = exitNetClass
def exitNetVariableDecl(self, ctx):
netCls = ctx.netClass().ir
name = ctx.netName().ir
parameters = []
ctx.ir = NetDeclaration(name = name, cls = netCls, \
params = parameters)
def exitNetParamDecl(self, ctx):
netName = ctx.netName().getText()
if self.networks is not None:
nets = [x for x in self.networks.decls if x.name == netName]
if len(nets) == 1:
nets[0].params.append(ctx.netParam().ir)
elif len(nets) > 1:
raise AlreadyDeclaredException(netName)
else:
raise UndeclaredNetworkException(netName)
else:
raise UndeclaredNetworkException(netName)
def exitNetLValue(self, ctx):
name = ctx.netName().getText()
ids = ctx.netParam().ir
ctx.ir = NetVariable(name = name, ids = ids)
def exitVariableProperty(self, ctx):
property = ctx.IDENTIFIER().getText()
if is_active(ctx.netLValue):
var = ctx.netLValue().ir
cls = NetVariableProperty
elif is_active(ctx.variable):
var = ctx.variable().ir
cls = VariableProperty
else:
assert False, "Not yet implemented."
ctx.ir = cls(var = var, prop= property)
def exitSamplingStmt(self, ctx):
lvalue = ctx.lvalueSampling().ir
if ctx.PLUS_EQ() is not None:
assert False, 'Not yet implemented'
elif ctx.truncation() is not None:
assert False, 'Not yet implemented'
else:
id = ctx.IDENTIFIER()[0].getText()
exprList = ctx.expressionCommaList().ir
ctx.ir = SamplingStmt(target = lvalue,
id = id,
args = exprList)
# For loops (section 5.4)
def exitForStmt(self, ctx):
id = ctx.IDENTIFIER().getText()
body = ctx.statement().ir if hasattr(ctx.statement(), 'ir') else None
atom = ctx.atom()
from_ = atom[0].ir
to_ = atom[1].ir if len(atom) > 1 else None
ctx.ir = ForStmt(id = id,
from_ = from_,
to_ = to_,
body = body)
# Conditional statements (section 5.5)
def exitConditionalStmt(self, ctx):
test = ctx.expression().ir
false = ctx.s2.ir if ctx.s2 is not None else None
ctx.ir = ConditionalStmt(
test = test,
true = ctx.s1.ir,
false = false,
)
# While loops (section 5.6)
def exitWhileStmt(self, ctx):
expr = ctx.expression().ir
stmt = ctx.statement().ir
ctx.ir = WhileStmt(
test = expr,
body = stmt
)
# Blocks (section 5.7)
def exitBlockStmt(self, ctx):
body = gatherChildrenIRList(ctx)
ctx.ir = BlockStmt(body)
# Functions calls (sections 5.9 and 5.10)
def exitCallExpr(self, ctx):
id = ctx.IDENTIFIER().getText()
args = ctx.expressionOrStringCommaList().ir
ctx.ir = CallStmt(id = id, args = args)
def exitCallStmt(self, ctx):
ctx.ir = ctx.callExpr().ir
def exitExpressionOrString(self, ctx):
if ctx.expression() is not None:
ctx.ir = ctx.expression().ir
else:
ctx.ir = Str(value=ctx.getText())
def exitExpressionOrStringCommaList(self, ctx):
ir = gatherChildrenIR(ctx)
elements = ir if ir is not None else []
ctx.ir = List(elements = elements)
# statements
def exitStatement(self, ctx):
if ctx.assignStmt() is not None:
ctx.ir = ctx.assignStmt().ir
if ctx.samplingStmt() is not None:
ctx.ir = ctx.samplingStmt().ir
if ctx.incrementLogProbStmt() is not None:
ctx.ir = ctx.incrementLogProbStmt().ir
if ctx.forStmt() is not None:
ctx.ir = ctx.forStmt().ir
if ctx.conditionalStmt() is not None:
ctx.ir = ctx.conditionalStmt().ir
if ctx.whileStmt() is not None:
ctx.ir = ctx.whileStmt().ir
if ctx.blockStmt() is not None:
ctx.ir = ctx.blockStmt().ir
if ctx.callStmt() is not None:
ctx.ir = ctx.callStmt().ir
if ctx.BREAK() is not None:
ctx.ir = BreakStmt()
if ctx.CONTINUE() is not None:
ctx.ir = ContinueStmt()
def exitIncrementLogProbStmt(self, ctx):
ctx.ir = SamplingFactor(target=ctx.expression().ir)
def exitStatementsOpt(self, ctx):
ctx.ir = gatherChildrenIR(ctx)
# Program blocks (section 6)
def exitDataBlock(self, ctx):
body = gatherChildrenIRList(ctx)
for ir in body:
if ir.is_variable_decl():
ir.set_data()
ctx.ir = Data(body = body)
def exitTransformedDataBlock(self, ctx):
body = gatherChildrenIRList(ctx)
for ir in body:
if ir.is_variable_decl():
ir.set_transformed_data()
ctx.ir = TransformedData(body = body)
def code_block(self, ctx, cls):
body = gatherChildrenIRList(ctx)
ctx.ir = cls(body = body)
def exitParametersBlock(self, ctx):
self.code_block(ctx, Parameters)
for ir in ctx.ir.body:
if ir.is_variable_decl():
ir.set_parameters()
def exitTransformedParametersBlock(self, ctx):
body = gatherChildrenIRList(ctx)
for ir in body:
if ir.is_variable_decl():
ir.set_transformed_parameters()
self._to_model = body
ctx.ir = TransformedParameters(body = body)
def exitGuideBlock(self, ctx):
self.code_block(ctx, Guide)
def exitGuideParametersBlock(self, ctx):
self.code_block(ctx, GuideParameters)
def exitPriorBlock(self, ctx):
# TODO: unify gatherChildrenIRList: check for StatemetnsOpt
body = gatherChildrenIR(ctx)
ctx.ir = Prior(body = body)
def exitModelBlock(self, ctx):
body = gatherChildrenIRList(ctx)
ctx.ir = Model(body= self._to_model + body)
def exitGeneratedQuantitiesBlock(self, ctx):
body = gatherChildrenIRList(ctx)
for ir in body:
if ir.is_variable_decl():
ir.set_generated_quatities()
ctx.ir = GeneratedQuantities(body= body)
def exitProgram(self, ctx):
body = []
for child in ctx.children:
if hasattr(child, 'ir') and child.ir is not None:
body.append(child.ir)
ctx.ir = Program(body = body)
|
19,824 | 82c44de7183f973f0de6d2281675b0160e5cda41 | from __future__ import division, absolute_import, print_function
import numpy as np
import tensorflow as tf
data = np.array([[1, 2], [3, 4]], dtype="Float32")
inputs = tf.placeholder(tf.float32, (2, 2), name="input")
x = tf.random_uniform([2, 2])
output = inputs + x
with tf.Session() as sess:
print(sess.run(output, feed_dict={inputs:data}))
tf.train.write_graph(tf.get_default_graph(), "my-model", "SimpleRandomUniform.pb", as_text=False)
|
19,825 | 0a47960c132b0a66e1ba390b1149da65214c749b | __title__ = 'django-imagekit'
__author__ = 'Justin Driscoll, Bryan Veloso, Greg Newman, Chris Drackett, Matthew Tretter, Eric Eldredge'
__version__ = (1, 1, 0, 'final', 0)
__license__ = 'BSD'
|
19,826 | 0360369c46ef146d52bd377c56e245cf632fd7b2 | #!/usr/bin/python
import MySQLdb
import getpass
import sys
import telnetlib
import re
import socket
USER = "****"
PASSWD = "****"
PATH = "config_files"
def checkTelnetOpen(host, port):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
result = sock.connect_ex((host, port))
if result == 0:
# print "Port is open"
print >>sys.stderr, "host telnet port is open ", host
return 1
else:
print >>sys.stderr, "fatal error host telnet disable ", host
return 0
def telnetGetConfig(host, user, passwd):
try:
#tn = telnetlib.Telnet(host)
print "**"
tn = telnetlib.Telnet(host, timeout=10)
tn.set_debuglevel(1)
try:
tn.read_until("login:")
tn.write(user + "\n")
tn.read_until("Password:")
tn.write(PASSWD + "\n")
except EOFError:
print "Authentication to "+ self.telnet_host+" failed.\n"
return None
tn.write("config show\n")
tn.read_until(b">")
tn.write("exit\n")
return tn.read_all()
except socket.timeout:
pass
return None
# Open database connection
db = MySQLdb.connect("****t","****","****","*****" )
# prepare a cursor object using cursor() method
cursor = db.cursor()
sql = "SELECT * FROM radioStatus \
WHERE cpe_status = 'online' \
AND (product='ePMP 1000' OR product='ePMP Force 180') ORDER BY ip "
try:
# Execute the SQL command
cursor.execute(sql)
# Fetch all the rows in a list of lists.
results = cursor.fetchall()
for row in results:
mac = row[0]
ip = row[1]
product = row[2]
product_type = row[3]
cpe_status = row[4]
# Now print fetched result
print "mac=%s,ip=%s,product=%s,type=%s, status=%s" % \
(mac, ip, product, product_type, cpe_status )
filename = "%s/%s.txt" % (PATH, ip)
fn = open(filename,"w")
if (checkTelnetOpen(ip, 23)):
resultado = telnetGetConfig(ip, USER, PASSWD)
print "llegara aca"
fn.write( resultado )
fn.close()
except:
pass
#print "Error: unable to fecth data"
# disconnect from server
db.close()
|
19,827 | befc00db8a7cc9626fe03c9d7c47e0b3d040f99e | import hashlib
import os
from typing import List
import psycopg2.extras
from psycopg2.extensions import connection, cursor
from arctic_tern.filename import parse_file_name, MigrationFile
def migrate(dir: str, conn: connection, schema: str = None):
_prepare_meta_table(conn, schema)
pm = _fetch_previous_migrations(_get_schema_cursor(conn, schema))
pmi = iter(pm)
try:
cm = next(pmi)
except StopIteration:
cm = None
for sql_file in _get_sql_files(dir):
if sql_file.is_equal(cm):
print('Skipping {}'.format(sql_file.path))
try:
cm = next(pmi)
except StopIteration:
cm = None
else:
curs = _get_schema_cursor(conn, schema)
_execute_file(sql_file, curs)
curs.close()
conn.commit()
def _execute_file(migration_file: MigrationFile, curs: cursor):
with open(migration_file.path) as stream:
curs.execute(stream.read())
t = """INSERT INTO arctic_tern_migrations VALUES (%s, %s, %s, now())"""
curs.execute(t, [migration_file.stamp, migration_file.name, migration_file.hash_])
def _get_sql_files(dir: str) -> List[MigrationFile]:
abs_dir = os.path.abspath(dir)
file_list = []
for fn in os.listdir(dir):
file_info = parse_file_name(fn)
if file_info:
full_path = os.path.join(abs_dir, fn)
file_info.path = full_path
file_info.hash_ = _hash(full_path)
file_list.append(file_info)
return file_list
def _hash(file: str) -> str:
sha3 = hashlib.sha3_224()
with open(file, "rb") as stream:
for chunk in iter(lambda: stream.read(65536), b""):
sha3.update(chunk)
return sha3.hexdigest()
def _prepare_meta_table(conn: connection, schema: str):
create = """CREATE TABLE IF NOT EXISTS {}.arctic_tern_migrations
(
stamp bigint NOT NULL PRIMARY KEY,
file_name varchar,
sha3 char(56),
migrate_time timestamptz
);"""
c2 = create.format(schema or 'public')
with conn.cursor() as curs: # type: cursor
curs.execute(c2)
def _fetch_previous_migrations(curs: cursor):
sql = """SELECT * FROM arctic_tern_migrations"""
curs.execute(sql)
migrations = []
for row in curs:
stamp = row[0]
name = row[1]
sha3 = row[2]
mf = MigrationFile(stamp, name, sha3)
migrations.append(mf)
return migrations
def _execute_with_schema(conn: connection, schema: str, *args, **kwargs):
with conn.cursor() as curs: # type: cursor
if schema:
curs.execute('SET search_path TO %s', [schema])
curs.execute(*args, **kwargs)
def _get_schema_cursor(conn: connection, schema: str = None) -> cursor:
curs = conn.cursor()
if schema:
curs.execute('SET search_path TO %s', [schema])
return curs
if __name__ == "__main__":
migrate('../tests/scripts', dbname='mig', user='postgres', password='root')
migrate('../tests/scripts', dbname='mig', user='postgres', password='root', schema='tern')
# print(_get_sql_files('../tests/scripts'))
|
19,828 | 17ef9a2c66e988abf72637495db3c1132bef9319 | from django.urls import path
from .views import project_assessment_list, project_assessment, project_assessment_result
from .views import registrant_assessment_list, registrant_assessment, registrant_assessment_result
from .views import team_assessment, team_assessment_complete
from .views import team_assessment_results_calculate, team_assessments_result
app_name = 'assessments'
urlpatterns = [
path('project/list/', project_assessment_list, name='project_assessment_list'),
path('project/idea/<int:idea_id>/score/', project_assessment, name='project_assessment'),
path('project/idea/<int:idea_id>/results/', project_assessment_result, name='project_assessment_result'),
path('registrant/list/', registrant_assessment_list, name='registrant_assessment_list'),
path('registrant/<int:registrant_id>/score/', registrant_assessment, name='registrant_assessment'),
path('registrant/<int:registrant_id>/results/', registrant_assessment_result, name='registrant_assessment_result'),
path('team/<int:team_id>/score/', team_assessment, name='team_assessment'),
path('team/<int:team_id>/score/complete/', team_assessment_complete, name='team_assessment_complete'),
path('team/result/calculate/', team_assessment_results_calculate, name='team_assessment_results_calculate'),
path('team/result/', team_assessments_result, name='team_assessments_result'),
]
|
19,829 | 8ed109d2a3573a7ec8afd9d555d48ab6ecebeec3 | import random
import time
from matplotlib import pyplot as plt
class Env:
def __init__(self):
self.wind = [0, 0, 0, 1, 1, 1, 2, 2, 1, 0]
def action_result(self, initial_location, action_taken):
row, col = initial_location
row = row - self.wind[col]
if action_taken == 0:
row -= 1
elif action_taken == 1:
col -= 1
elif action_taken == 2:
row += 1
elif action_taken == 3:
col += 1
elif action_taken == 4:
col += 1
row -= 1
elif action_taken == 5:
col -= 1
row -= 1
elif action_taken == 6:
col -= 1
row += 1
elif action_taken == 7:
col += 1
row += 1
if row < 0:
row = 0
elif row > 6:
row = 6
if col < 0:
col = 0
elif col > 9:
col = 9
final_location = [row, col]
return final_location
class State:
def __init__(self, row, col):
self.values = [0]*8
self.coordinates = [row, col]
class Agent:
def __init__(self):
self.model = Env()
self.row = 3
self.col = 0
self.episode_complete = False
self.states = []
the_row = []
for i in range(7):
for j in range(10):
the_row.append(State(i, j))
self.states.append(the_row)
the_row = []
self.current_state = self.states[self.row][self.col]
self.next_state = self.states[self.row][self.col]
def episode(self, step_size, gamma, display_actions):
action_taken = 0
while not self.episode_complete:
if self.col == 7 and self.row == 3:
self.episode_complete = True
self.row = 3
self.col = 0
self.current_state = self.states[self.row][self.col]
self.next_state = self.states[self.row][self.col]
else:
epsilon = 0.1
if random.random() <= epsilon:
action_taken = random.randint(0, 7)
else:
action_taken = self.current_state.values.index(max(self.current_state.values))
if display_actions:
# print(self.current_state.coordinates,self.next_state.coordinates)
x_change = int(self.next_state.coordinates[1] - self.current_state.coordinates[1])
if x_change < 0:
file.write("W ")
elif x_change > 0:
file.write("E ")
y_change = int(self.next_state.coordinates[0] - self.current_state.coordinates[0])
if y_change < 0:
for change in range(-y_change):
file.write("N ")
elif y_change > 0:
for change in range(y_change):
file.write("S ")
final_loc = self.model.action_result([self.row, self.col], action_taken)
self.row = final_loc[0]
self.col = final_loc[1]
self.next_state = self.states[self.row][self.col]
q1 = epsilon * sum(self.current_state.values)/8 + (1 - epsilon) * max(self.current_state.values)
q2 = gamma * (epsilon * sum(self.next_state.values)/8 + (1 - epsilon) * max(self.next_state.values))
self.current_state.values[action_taken] += step_size * (-1 + q2 - q1)
self.current_state = self.states[self.row][self.col]
self.episode_complete = False
file = open(r"King's Windy Solution.txt", "w")
agent = Agent()
step_size = 0.5
start_time = time.time()
times = []
for i in range(150):
times.append(500000 * (time.time() - start_time))
agent.episode(0.5, 1, False)
agent.episode(0.5, 1, True)
episodes = list(range(0, 150, 1))
plt.xlabel("Time Steps")
plt.ylabel("Episodes")
plt.plot(times, episodes)
plt.savefig("King's Windy World.png", dpi=300, bbox_inches='tight')
|
19,830 | 33a7a24f75a046eb40f8d0edb14bed355961c34b | x = 10
if x == 0:
print ("x is zero")
else:
print ("x is ",x)
print ("All done.") |
19,831 | 5595422615dddbf1f80847001cf414048ea9948f | import importlib #画图的包
import sys
from scipy.misc import imread
importlib.reload(sys)#我不知道这是你干嘛的
from wordcloud import WordCloud,STOPWORDS#要安装wordcloud库
import jieba.analyse # 导入结巴分词
import numpy as np # numpy
from PIL import Image # 图片处理库PIL
import matplotlib.pyplot as plt
def handle(filename, stopword):#文件处理函数
with open(filename, 'rb') as f:#打开文件 注意这里是可以读成 rb 二进制的
data = f.read()
print(data)
wordlist = jieba.analyse.extract_tags(data, topK=300) # 分词,取词频统计后的排名前400的词
wordStr = " ".join(wordlist)
print(wordStr)
hand = np.array(Image.open('wang_white.png')) # 打开一张图片(随便一张图片就行),词语以图片形状为背景分布
#下面是化云图的参数设置,画布啊啥的,如果需要学习,建议每一个参数百度一下含义
my_cloudword = WordCloud(
# wordcloud参数配置
width=1024,#画布的宽度
height=768,#画布的长度
background_color = 'white', # 背景颜色 一般都是 白色
mask = hand, # 背景图片
max_words = 300, # 最大显示的字数
stopwords = stopword, # 停用词
max_font_size = 80, # 字体最大值
font_path='simsun.ttc', # 设置中文字体,若是有中文的话,这句代码必须添加,不然会出现方框,不出现汉字
random_state=5, # 设置有多少种随机生成状态,即有多少种配色方案
)
my_cloudword.generate(wordStr) # 生成图片
my_cloudword.to_file('Leehom.png') # 保存
plt.imshow(my_cloudword) # 显示词云图
plt.axis('off') # 是否显示x轴、y轴下标
plt.show() # 显示 --------------------------------这个地方有改动
if __name__ == '__main__':
handle('yiran2.txt', STOPWORDS) #你觉得困困难的部分应该是词的部分,随便一个文本的TXT就行了。你可以简单的复制一段话到txt文件就可以了。
|
19,832 | 250c5d44b708eee0d91f9e7bfd5111a612c65203 | # Generated by Django 2.2.4 on 2019-08-10 11:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('distribution', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='course',
name='levelname',
field=models.CharField(blank=True, max_length=512, null=True),
),
]
|
19,833 | ed3491d7ebad1728890073901a763b7f6e4b89d5 | <<<<<<< HEAD
# Given a string, return a "rotated left 2" version where the first 2 chars
# are moved to the end. The string length will be at least 2.
def left2(str):
return str[2:] + str[:2]
=======
# Given a String, a "a rotated left 2" verison where the first 2 chars
# are moved to the end. The string length will be at least 2.
def left2(strn):
return strn[2:] + strn [:2]
>>>>>>> master
|
19,834 | d4a36d705dabb77053a68e761a426557b48fd09a | # --------------------------------------------------------
# Fast/er R-CNN
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import numpy as np
def xyxy_to_xywh(xyxy):
"""Convert [x1 y1 x2 y2] box format to [x1 y1 w h] format."""
if isinstance(xyxy, (list, tuple)):
# Single box given as a list of coordinates
assert len(xyxy) == 4
x1, y1 = xyxy[0], xyxy[1]
w = xyxy[2] - x1 + 1
h = xyxy[3] - y1 + 1
return (x1, y1, w, h)
elif isinstance(xyxy, np.ndarray):
# Multiple boxes given as a 2D ndarray
return np.hstack((xyxy[:, 0:2], xyxy[:, 2:4] - xyxy[:, 0:2] + 1))
else:
raise TypeError("Argument xyxy must be a list, tuple, or numpy array.")
def expand_boxes(boxes, scale):
"""Expand an array of boxes by a given scale."""
box_dim = boxes.shape[1]
if box_dim == 4:
w_half = (boxes[:, 2] - boxes[:, 0]) * 0.5
h_half = (boxes[:, 3] - boxes[:, 1]) * 0.5
x_c = (boxes[:, 2] + boxes[:, 0]) * 0.5
y_c = (boxes[:, 3] + boxes[:, 1]) * 0.5
w_half *= scale
h_half *= scale
boxes_exp = np.zeros(boxes.shape)
boxes_exp[:, 0] = x_c - w_half
boxes_exp[:, 2] = x_c + w_half
boxes_exp[:, 1] = y_c - h_half
boxes_exp[:, 3] = y_c + h_half
elif box_dim == 5:
boxes_exp = boxes.copy()
boxes_exp[:, 2:4] *= scale
else:
raise Exception("Unsupported box dimension: {}".format(box_dim))
return boxes_exp
|
19,835 | 3ae08f09ece9a3dbb0d10686931509d342bc20ce | import math
dataInput = [3,225,1,225,6,6,1100,1,238,225,104,0,1101,91,67,225,1102,67,36,225,1102,21,90,225,2,13,48,224,101,-819,224,224,4,224,1002,223,8,223,101,7,224,224,1,223,224,223,1101,62,9,225,1,139,22,224,101,-166,224,224,4,224,1002,223,8,223,101,3,224,224,1,223,224,223,102,41,195,224,101,-2870,224,224,4,224,1002,223,8,223,101,1,224,224,1,224,223,223,1101,46,60,224,101,-106,224,224,4,224,1002,223,8,223,1001,224,2,224,1,224,223,223,1001,191,32,224,101,-87,224,224,4,224,102,8,223,223,1001,224,1,224,1,223,224,223,1101,76,90,225,1101,15,58,225,1102,45,42,224,101,-1890,224,224,4,224,1002,223,8,223,1001,224,5,224,1,224,223,223,101,62,143,224,101,-77,224,224,4,224,1002,223,8,223,1001,224,4,224,1,224,223,223,1101,55,54,225,1102,70,58,225,1002,17,80,224,101,-5360,224,224,4,224,102,8,223,223,1001,224,3,224,1,223,224,223,4,223,99,0,0,0,677,0,0,0,0,0,0,0,0,0,0,0,1105,0,99999,1105,227,247,1105,1,99999,1005,227,99999,1005,0,256,1105,1,99999,1106,227,99999,1106,0,265,1105,1,99999,1006,0,99999,1006,227,274,1105,1,99999,1105,1,280,1105,1,99999,1,225,225,225,1101,294,0,0,105,1,0,1105,1,99999,1106,0,300,1105,1,99999,1,225,225,225,1101,314,0,0,106,0,0,1105,1,99999,1008,677,677,224,102,2,223,223,1005,224,329,1001,223,1,223,1108,677,226,224,1002,223,2,223,1006,224,344,101,1,223,223,107,677,226,224,1002,223,2,223,1006,224,359,101,1,223,223,108,677,677,224,1002,223,2,223,1006,224,374,1001,223,1,223,108,226,677,224,1002,223,2,223,1006,224,389,101,1,223,223,7,226,677,224,102,2,223,223,1006,224,404,1001,223,1,223,1108,677,677,224,1002,223,2,223,1005,224,419,101,1,223,223,1008,226,677,224,102,2,223,223,1006,224,434,101,1,223,223,107,226,226,224,102,2,223,223,1005,224,449,1001,223,1,223,1007,677,677,224,1002,223,2,223,1006,224,464,1001,223,1,223,1007,226,226,224,1002,223,2,223,1005,224,479,101,1,223,223,1008,226,226,224,102,2,223,223,1006,224,494,1001,223,1,223,8,226,226,224,102,2,223,223,1006,224,509,101,1,223,223,1107,677,677,224,102,2,223,223,1005,224,524,1001,223,1,223,1108,226,677,224,1002,223,2,223,1006,224,539,101,1,223,223,1107,677,226,224,1002,223,2,223,1006,224,554,101,1,223,223,1007,677,226,224,1002,223,2,223,1005,224,569,101,1,223,223,7,677,226,224,1002,223,2,223,1006,224,584,101,1,223,223,107,677,677,224,1002,223,2,223,1005,224,599,1001,223,1,223,8,226,677,224,1002,223,2,223,1005,224,614,101,1,223,223,7,677,677,224,1002,223,2,223,1006,224,629,1001,223,1,223,1107,226,677,224,1002,223,2,223,1006,224,644,101,1,223,223,108,226,226,224,102,2,223,223,1005,224,659,1001,223,1,223,8,677,226,224,1002,223,2,223,1005,224,674,101,1,223,223,4,223,99,226]
sample = [3,0,4,0,99]
sample2 = [1002,4,3,4,33]
test1 = [3,9,8,9,10,9,4,9,99,-1,8]
test2 = [3,9,7,9,10,9,4,9,99,-1,8]
test3 = [3,3,1108,-1,8,3,4,3,99]
test4 = [3,3,1107,-1,8,3,4,3,99]
test5 = [3,12,6,12,15,1,13,14,13,4,13,99,-1,0,1,9]
test6 = [3,3,1105,-1,9,1101,0,0,12,4,12,99,1]
test7 = [3,21,1008,21,8,20,1005,20,22,107,8,21,20,1006,20,31,1106,0,36,98,0,0,1002,21,125,20,4,20,1105,1,46,104,999,1105,1,46,1101,1000,1,20,4,20,1105,1,46,98,99]
def getValue(data, index, modes, offset):
for i in range(offset-1):
modes = modes // 10
mode = modes % 10
if mode == 1:
address = index + offset
else:
address = data[index + offset]
val = data[address]
#print(str(index) + ", " + str(offset) + ", " + str(index + offset))
#print("val at " + str(address) + " = " + str(val))
return val
def opcode1(data, i, modes):
#print(i)
valA = getValue(data, i, modes, 1)
valB = getValue(data, i, modes, 2)
target = data[i + 3]
data[target] = valA + valB
#print(str(i) + ") modes " + str(modes) + ", instruction " + str(data[i]) + " , valA: " + str(valA) + ", valB " + str(valB) + ", target: " + str(target))
return 4
def opcode2(data, i, modes):
valA = getValue(data, i, modes, 1)
valB = getValue(data, i, modes, 2)
target = data[i + 3]
data[target] = valA * valB
#print(str(i) + ") modes " + str(modes) + ", instruction " + str(data[i]) + " , valA: " + str(valA) + ", valB " + str(valB) + ", target: " + str(target))
return 4
def opcode3(data, i, modes):
target = data[i + 1]
inp = int(input("Input:"))
#print("storing value " + str(inp) + " to location " + str(target))
data[target] = inp
return 2
def opcode4(data, i, modes):
valA = getValue(data, i, modes, 1)
print(str(valA))
return 2
def opcode5(data, i, modes):
valA = getValue(data, i, modes, 1)
valB = getValue(data, i, modes, 2)
#print(str(i) + ") modes " + str(modes) + ", instruction " + str(data[i]) + " , valA: " + str(valA) + ", valB " + str(valB))
if valA != 0:
return valB
return i + 3
def opcode6(data, i, modes):
valA = getValue(data, i, modes, 1)
valB = getValue(data, i, modes, 2)
if valA == 0:
return valB
return i + 3
def opcode7(data, i, modes):
valA = getValue(data, i, modes, 1)
valB = getValue(data, i, modes, 2)
target = data[i + 3]
if valA < valB:
data[target] = 1
else:
data[target] = 0
return 4
def opcode8(data, i, modes):
valA = getValue(data, i, modes, 1)
valB = getValue(data, i, modes, 2)
target = data[i + 3]
if valA == valB:
data[target] = 1
else:
data[target] = 0
#print(str(i) + ") modes " + str(modes) + ", instruction " + str(data[i]) + " , valA: " + str(valA) + ", valB " + str(valB) + ", target: " + str(target))
return 4
def taskA(data):
i = 0
while i < len(data):
opcode = data[i]
#print(str(i) + ") " + str(opcode))
#print(data)
modes = math.floor(opcode/100)
opcode = opcode % 100
if opcode == 99:
break
elif opcode == 1:
i += opcode1(data, i, modes)
elif opcode == 2:
i += opcode2(data, i, modes)
elif opcode == 3:
i += opcode3(data, i, modes)
elif opcode == 4:
i += opcode4(data, i, modes)
elif opcode == 5:
i = opcode5(data, i, modes)
elif opcode == 6:
i = opcode6(data, i, modes)
elif opcode == 7:
i += opcode7(data, i, modes)
elif opcode == 8:
i += opcode8(data, i, modes)
else:
print("UNKNOWN OPCODE " + str(opcode) + " at position " + str(i))
exit(1)
return data[0]
def main():
taskA(dataInput)
#taskA(sample2)
main() |
19,836 | 7a3acb912f59fdb96b8c33f32a0c6687c9835212 | # Create your views here.
from django.shortcuts import render, redirect, get_object_or_404
from django.http import HttpResponse, HttpResponseRedirect
from django.core.mail import EmailMessage
from itertools import chain
from time import perf_counter
from django.template import loader
from django.urls import reverse
from django.views import generic
from django.utils import timezone
from django.conf import settings
from django.urls import reverse_lazy
from django.db.models import Q
from .forms import ContactForm
from django.core.mail import EmailMessage
MAPBOX_TOKEN = settings.MAPBOX_TOKEN
SM_LINKS = settings.SOCIAL_MEDIA_LINKS
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[-1].strip()
else:
ip = request.META.get('REMOTE_ADDR')
return ip
def view_error(request, exception=None):
# make a redirect to homepage
return redirect('/')
def landing(request):
# if request.method == 'GET':
# try:
# query = request.GET.get('query')
# if query:
# redirect_url = reverse_lazy('home:search')
# extra_params = '?query_org=%s' % query
# full_redirect_url = '%s%s' % (redirect_url, extra_params)
# return HttpResponseRedirect( full_redirect_url )
# except Exception as e: print(e)
context = {'sms':SM_LINKS,
}
return render(request, 'home/landing.html',context)
def about(request):
context = {
}
return render(request, 'home/about.html',context)
def websites(request):
context = {
}
return render(request, 'home/websites.html',context)
def resume(request):
context = {
}
return render(request, 'home/resume.html',context)
def contact(request):
if request.method == 'GET':
form = ContactForm()
else:
form = ContactForm(request.POST)
if form.is_valid():
mail_subject = form.cleaned_data['subject']
from_email = form.cleaned_data['from_email']
message = form.cleaned_data['message']
message += " from " + from_email
try:
email = EmailMessage(
mail_subject, message, to=['ben.truong.0324@gmail.com']
)
email.send()
except BadHeaderError:
return HttpResponse('Invalid header found.')
return redirect('home:contactSuccess')
return render(request, "home/contact.html"
, {'form': ContactForm}
)
def contactSuccess(request):
return render(request, 'home/contactSuccess.html') |
19,837 | 539abdcdb12a9b277854365fbfdbf9a740c6bfd7 | #!/usr/bin/python
#Filename:using_sys.py
import sys
#print 'sys argument nums:',count(sys.argv)
print 'The command line arguments are:'
for i in sys.argv:
print i
print '\n\nThe PYTHONPATH is',sys.path,'\n'
|
19,838 | 5f5dc6019cfd482065f224f5cd7669a0aab20ae1 |
import sys
# sys.path.append('./mod/')
# sys.path.append('.\\mod\\')
sys.path.insert(0, './mod')
import fileHandler
import Logger as audit
import random
# import logging
# FORMAT = '[%(levelname)s]%(asctime)s: %(message)s'
# logging.basicConfig(filename="./logs/samplelogs.log", level=logging.INFO, format=FORMAT)
# print(sys.path)
audit.setup_logging("./logs/")
audit.logging.debug(sys.path)
print('_' * 50)
print('')
print("Testing Custom Module Design")
#print('\n' * 2)
print('_' * 50)
fileHandler.ShowFile('README.md')
#strFileContent = fileHandler.LoadFile()
# print(strFileContent)
#fileHandler.CreateFile("File1.txt","My sample text\nand some more text")
#fileHandler.AppendFile("File1.txt","\nThis is new text appended to the file\nand some more text")
#MySearchResults = fileHandler.SearchFile("LICENSE","public")
# print(MySearchResults)
MySearchResults = fileHandler.SearchFile("File1.txt", "and")
# print(MySearchResults)
audit.logging.info(MySearchResults)
# fileHandler.DeleteFile("File1.txt")
fileHandler.LoadCSV("test.csv")
audit.logging.error("whoops")
audit.logging.warning("dont dare")
audit.logging.critical("Oh Christ!")
try:
x = 10
y = 0
Res = x / y
# except ZeroDivisionError:
except Exception:
audit.logging.exception("This is used in try catch scenarios!")
|
19,839 | 89bc8c1702624be83e8f1d09764ad13c711345d7 | class PackageManager:
def __init__(self, name=None, version=None):
self.name = name
self.version = version
def get_information(self):
print('name :', self.name)
print('version :', self.version)
pm = PackageManager('pip', 2.2)
pm.get_information()
print()
print(pm.name) |
19,840 | 64c627470e9c5c3958d2a4da37faa66ffb52bbce | #!/usr/bin/env python
class typeEnum:
scope, identifier = range(5, 15, 5)
class ObjDesc:
def __init__( self, type, name):
self.type = type
self.name = name
self.scope = None
self.items = None
def __repr__( self):
if self.type == typeEnum.scope:
if self.items:
return 'Scope: "%s" Items: %s ' % (self.name, len(self.items))
else:
return 'Scope: "%s" Empty' % self.name
def __iter__( self):
return ObjDescIterator( self)
class ObjDescIterator:
def __init__( self, obj):
self.obj = obj
def next( self):
if self.obj == None:
raise StopIteration
else:
t = self.obj
self.obj = self.obj.scope
return t
def newScope( name_):
return ObjDesc( type=typeEnum.scope, name=name_)
def newIdentifier( name_):
return ObjDesc( type=typeEnum.identifier, name=name_)
def buildTree( t):
t.items = [ newIdentifier('a'), newIdentifier('b'), newIdentifier('c')]
t.scope = newScope('locals')
t.scope.items = [ newIdentifier('d'), newIdentifier('e')]
def buildList( t):
t[0].items = [ newIdentifier('a'), newIdentifier('b'), newIdentifier('c')]
t.append( newScope('locals'))
t[1].items = [ newIdentifier('d'), newIdentifier('e')]
def scan( head):
print 'Head:'
for t in head:
print t
from itertools import ifilter
def findItem( head, name):
if list( ifilter( lambda x: x.name==name, head)):
print 'Found it'
tree = newScope('root')
buildTree( tree)
scan( tree)
listOfLists = [ newScope( 'root')]
buildList( listOfLists)
scan( listOfLists)
findItem( listOfLists, 'locals')
|
19,841 | e06c9d886645ce1366a723049105beec65393de6 | """
dev2 api schema
'dev2.baidu.com' api schema # noqa: E501
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import baiduads
from baiduads.fmdatareport.model.get_phone_recordings_request import GetPhoneRecordingsRequest
class TestGetPhoneRecordingsRequest(unittest.TestCase):
"""GetPhoneRecordingsRequest unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testGetPhoneRecordingsRequest(self):
"""Test GetPhoneRecordingsRequest"""
# FIXME: construct object with mandatory attributes with example values
# model = GetPhoneRecordingsRequest() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
19,842 | f6481127ceb9c3cdc22c0c767a3713379300833c | #Python 2/3 compatibility
from __future__ import print_function,division,absolute_import
from builtins import input,range
from six import iteritems
from ikdb import *
from ikdb import functionfactory
from klampt import *
import pkg_resources
if pkg_resources.get_distribution('klampt').version >= '0.7':
NEW_KLAMPT = True
from klampt.model import ik
from klampt.io import loader
from klampt.vis.glrobotprogram import *
from klampt.vis.glcommon import *
from klampt import PointPoser,TransformPoser
from klampt.model import collide
from klampt.math import se3
#patch to Klamp't 0.6.X
class GLWidgetProgram(GLPluginProgram):
def __init__(self,world,name):
GLPluginProgram.__init__(self,name)
self.widgetPlugin = GLWidgetPlugin()
self.setPlugin(self.widgetPlugin)
self.widgetMaster = self.widgetPlugin.klamptwidgetmaster
self.world = world
def display(self):
GLPluginProgram.display(self)
self.world.drawGL()
else:
NEW_KLAMPT = False
from klampt import ik,loader
from klampt.glrobotprogram import *
from klampt import PointPoser,TransformPoser
from klampt import robotcollide as collide
from klampt import se3
import sys
import traceback
import numpy as np
#preload
from sklearn.neighbors import NearestNeighbors,BallTree
class IKDBVisualTester(GLWidgetProgram):
def __init__(self,visWorld,planningWorld,name="IK Database visual tester"):
GLWidgetProgram.__init__(self,visWorld,name)
self.planningWorld = planningWorld
self.collider = collide.WorldCollider(visWorld)
self.ikdb = ManagedIKDatabase(planningWorld.robot(0))
self.ikWidgets = []
self.ikIndices = []
self.ikProblem = IKProblem()
self.ikProblem.setFeasibilityTest('collisionFree',None)
qmin,qmax = planningWorld.robot(0).getJointLimits()
self.ikProblem.setCostFunction('jointRangeCost_dynamic',[qmin,qmax])
self.drawDb = False
self.continuous = False
self.reSolve = False
self.currentConfig = self.world.robot(0).getConfig()
def mousefunc(self,button,state,x,y):
#Put your mouse handler here
#the current example prints out the list of objects clicked whenever
#you right click
GLWidgetProgram.mousefunc(self,button,state,x,y)
self.reSolve = False
dragging = False
if NEW_KLAMPT:
dragging = self.widgetPlugin.klamptwidgetdragging
else:
dragging = self.draggingWidget
if not dragging and button == 2 and state==0:
#down
clicked = self.click_world(x,y)
if clicked is not None and isinstance(clicked[0],RobotModelLink):
#make a new widget
link, wpt = clicked
lpt = se3.apply(se3.inv(link.getTransform()),wpt)
self.ikIndices.append(len(self.ikWidgets))
self.ikWidgets.append(PointPoser())
self.ikWidgets[-1].set(wpt)
self.widgetMaster.add(self.ikWidgets[-1])
self.ikProblem.addObjective(ik.objective(link,local=lpt,world=wpt))
GLWidgetProgram.mousefunc(self,button,state,x,y)
self.refresh()
return
#the dx,dy arguments are needed to be cross-compatible between 0.6.x and 0.7
def motionfunc(self,x,y,dx=0,dy=0):
dragging = False
if NEW_KLAMPT:
retval = GLWidgetProgram.motionfunc(self,x,y,dx,dy)
dragging = self.widgetPlugin.klamptwidgetdragging
else:
retval = GLWidgetProgram.motionfunc(self,x,y)
dragging = self.draggingWidget
if dragging:
#update all the IK objectives
for i in range(len(self.ikWidgets)):
index = self.ikIndices[i]
if isinstance(self.ikWidgets[i],PointPoser):
wptnew = self.ikWidgets[i].get()
obj = self.ikProblem.objectives[index]
link = obj.link()
lpt,wptold = obj.getPosition()
obj.setFixedPoint(link,lpt,wptnew)
#don't solve now, wait for refresh to process
if self.continuous and wptnew != wptold:
self.reSolve = True
elif isinstance(self.ikWidgets[i],TransformPoser):
Rnew,tnew = self.ikWidgets[i].get()
obj = self.ikProblem.objectives[index]
link = obj.link()
Rold,told = obj.getTransform()
obj.setFixedTransform(link,Rnew,tnew)
#don't solve now, wait for refresh to process
if self.continuous and (Rnew,tnew) != (Rold,told):
self.reSolve = True
return retval
def keyboardfunc(self,c,x,y):
if c=='h':
print ('HELP:')
print ('[right-click]: add a new IK constraint')
print ('[space]: tests the current configuration')
print ('d: deletes IK constraint')
print ('t: adds a new rotation-fixed IK constraint')
print ('f: flushes the current database to disk')
print ('s: saves the current database to disk')
print ('b: performs one background step')
print ('B: starts / stops the background thread')
print ('v: toggles display of the database')
print ('c: toggles continuous re-solving of IK constraint its as being moved')
print ('o: toggles soft / hard IK constraints')
elif c==' ':
self.planningWorld.robot(0).setConfig(self.currentConfig)
soln = self.ikdb.solve(self.ikProblem)
if soln:
print ("Solved")
self.currentConfig = soln
self.refresh()
else:
print ("Failure")
elif c=='d':
for i,w in enumerate(self.ikWidgets):
if w.hasHighlight():
print ("Deleting IK widget")
#delete it
index = self.ikIndices[i]
self.widgetMaster.remove(w)
del self.ikWidgets[i]
del self.ikIndices[i]
del self.ikProblem.objectives[index]
for j in range(len(self.ikIndices)):
self.ikIndices[j] = j
self.refresh()
break
elif c=='t':
clicked = self.click_world(x,y)
if clicked is not None and isinstance(clicked[0],RobotModelLink):
#make a new widget
link, wpt = clicked
Tlink = link.getTransform()
self.ikIndices.append(len(self.ikWidgets))
self.ikWidgets.append(TransformPoser())
self.ikWidgets[-1].set(*Tlink)
self.widgetMaster.add(self.ikWidgets[-1])
self.ikProblem.addObjective(ik.objective(link,R=Tlink[0],t=Tlink[1]))
self.refresh()
elif c=='f':
self.ikdb.flush()
elif c=='s':
self.ikdb.save()
elif c=='b':
self.ikdb.backgroundStep()
self.refresh()
elif c=='B':
if hasattr(self.ikdb,'thread'):
self.ikdb.stopBackgroundLoop()
else:
self.ikdb.startBackgroundLoop(0)
elif c=='v':
self.drawDb = not self.drawDb
elif c=='c':
self.continuous = not self.continuous
elif c == 'o':
self.ikProblem.setSoftObjectives(not self.ikProblem.softObjectives)
def display(self):
if self.reSolve:
self.planningWorld.robot(0).setConfig(self.currentConfig)
soln = self.ikdb.solve(self.ikProblem)
if soln:
self.currentConfig = soln
self.reSolve = False
self.world.robot(0).setConfig(self.currentConfig)
GLWidgetProgram.display(self)
glDisable(GL_LIGHTING)
#draw IK goals
for obj in self.ikProblem.objectives:
linkindex = obj.link()
link = self.world.robot(0).link(linkindex)
lp,wpdes = obj.getPosition()
wp = se3.apply(link.getTransform(),lp)
glLineWidth(4.0)
glDisable(GL_LIGHTING)
glColor3f(0,1,0)
glBegin(GL_LINES)
glVertex3f(*wp)
glVertex3f(*wpdes)
glEnd()
glLineWidth(1)
#draw end positions of solved problems
if self.drawDb:
glPointSize(3.0)
glBegin(GL_POINTS)
for k,db in iteritems(self.ikdb.databases):
for i in range(db.numProblems()):
try:
p = db.getProblem(i)
except Exception as e:
traceback.print_exc()
exit(0)
if db.solutions[i] is None:
glColor3f(1,0,0)
else:
glColor3f(0,0,1)
for obj in p.objectives:
lp,wpdes = obj.getPosition()
glVertex3f(*wpdes)
glColor3f(1,1,0)
for pjson,soln in self.ikdb.backburner:
p = IKProblem()
p.fromJson(pjson)
for obj in p.objectives:
lp,wpdes = obj.getPosition()
glVertex3f(*wpdes)
glEnd()
return
def click_world(self,x,y):
"""Helper: returns (obj,pt) where obj is the closest world object
clicked, and pt is the associated clicked point (in world coordinates).
If no point is clicked, returns None."""
#get the viewport ray
if NEW_KLAMPT:
(s,d) = self.view.click_ray(x,y)
else:
(s,d) = self.click_ray(x,y)
#run the collision tests
collided = []
for g in self.collider.geomList:
(hit,pt) = g[1].rayCast(s,d)
if hit:
dist = vectorops.dot(vectorops.sub(pt,s),d)
collided.append((dist,g[0]))
if len(collided)==0:
return None
dist,obj = min(collided,key=lambda x:x[0])
return obj,vectorops.madd(s,d,dist)
def main():
print ("ikdbtest2.py: This example visually shows the learning process")
print ("USAGE: ikdbtest2.py [ROBOT OR WORLD FILE]")
print ("Press h for help.")
import sys
import os
fn = os.path.expanduser("~/Klampt-examples/data/robots/tx90ball.rob")
if len(sys.argv) > 1:
fn = sys.argv[1]
world = WorldModel()
world.readFile(fn)
planningWorld = world.copy()
#for free base robots
qmin,qmax = world.robot(0).getJointLimits()
for i,(a,b) in enumerate(zip(qmin,qmax)):
if not np.isfinite(a):
print ("Setting finite bound on joint",i)
qmin[i] = -math.pi
if not np.isfinite(b):
print ("Setting finite bound on joint",i)
qmax[i] = math.pi
planningWorld.robot(0).setJointLimits(qmin,qmax)
functionfactory.registerDefaultFunctions()
functionfactory.registerCollisionFunction(planningWorld)
functionfactory.registerJointRangeCostFunction(planningWorld.robot(0))
tester = IKDBVisualTester(world,planningWorld)
tester.run()
if __name__ == "__main__":
main()
|
19,843 | e351989547c97343ce9c235918f0fefafa8d13a4 | import requests
from bs4 import BeautifulSoup as bs
import pandas as pd
def scrapeESPN(url, lists):
# List to be used for API calls
datalist = ['draftTable__headline draftTable__headline--pick',
'draftTable__headline draftTable__headline--team',
'draftTable__headline draftTable__headline--player',
'draftTable__headline draftTable__headline--school',
'draftTable__headline draftTable__headline--pos']
# empty objects, will be asigned BeautifulSoup results objects
pick = ''
team = ''
player = ''
school = ''
position = ''
#List of variables to loop through to call
colList = [pick,
team,
player,
school,
position]
try:
r = requests.get(url)
r.raise_for_status() #http errors raise HTTPError exceptions
soup = bs(r.content,features="lxml")
for n, row in enumerate(datalist):
colList[n] = soup.find_all("span", attrs={"class": row})
for elem in colList[n]:
lists[n].extend(elem)
except requests.exceptions.RequestException as e:
raise SystemExit(e)
return lists
|
19,844 | fabf3eb8cd79d2a7e535cb410a9d826c7256a002 | from pstats import Stats
class CPythonParser(object):
@staticmethod
def parse(dump):
stats = []
index = dump.rfind("Ordered by")
for line in dump[index:].split("\n")[3:-3]:
line = " ".join(line.split())
l = line.split(" ")
filename_line_function = not (l[5].startswith("{") and l[-1].endswith("}"))
stats.append({
"ncalls": int(l[0].split("/")[0]),
"tottime": float(l[1]),
"tottime_percall": float(l[2]),
"cumtime": float(l[3]),
"cumtime_percall": float(l[4]),
"filename": l[5].split(":")[0] if filename_line_function else "",
"line": int(l[5].split(":")[1].split("(")[0]) if filename_line_function else "",
"function": l[5].split(":")[1].split("(")[1][:-1] if filename_line_function else " ".join(l[5:]),
})
return stats
|
19,845 | 3b6dda7cdce4b8a3d5ca8b04e7a145069f5390da | # 5. How Tuple is beneficial as compare to the list?
# The difference between the tuple and the list are :
# Tuple are immutable --i.e we cnnot change the data,
# that is why very less inbuild function are applicable on the tuple.
# The tuple shoild be used when we know that the data is not to be changed : For eg:Months.
# List are mutable ==i.e it can be changed and has many build in function.
# THE MAIN DIFFERENCE IS THAT TUPLES ARE FASTER THAN THE LIST HENCE SHOULD BE USED WHEREEVER NECESSARY. |
19,846 | a9e02c776f8fb96f258d2fc861d5c07813ae50e5 | from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.common.action_chains import ActionChains
import sys
import requests
import os
import time
def getACMBibTeX():
cookies = {
'__atssc': 'google%3B4',
'AK': 'expires%3D1491882135%7Eaccess%3D%2F10%2E1145%2F1350000%2F1348248%2F%2A%7Emd5%3D7588bec54ebd697d657408e93ac9febb',
'cffp_mm': '4',
'CFID': '749514123',
'CFTOKEN': '79742520',
'IP_CLIENT': '9941550',
'SITE_CLIENT': '5598578',
'__atuvc': '23%7C14%2C5%7C15',
'__atuvs': '58ef12e243097221003',
'_ga': 'GA1.2.1946224287.1491122454',
}
headers = {
'Accept-Encoding': 'gzip, deflate, sdch',
'Accept-Language': 'en-US,en;q=0.8',
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.133 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Cache-Control': 'max-age=0',
'Connection': 'keep-alive',
}
URL = sys.argv[1]
# URL = 'http://dl.acm.org/citation.cfm?id=339421'
r = requests.get(URL, headers=headers, cookies=cookies)
########### FOR SAFARI USERS ###########
driver = webdriver.Safari()
########## UNCOMMENT TO RUN FIREFOX (Version 50+) IF YOU DON'T HAVE SAFARI ##########
# profile = webdriver.FirefoxProfile()
# profile.set_preference("browser.helperApps.neverAsk.saveToDisk", 'text/plain')
# profile.set_preference("browser.download.folderList", 1)
# # profile.set_preference("browser.download.dir", os.getcwd())
# driver = webdriver.Firefox(profile)
driver.get(r.url)
driver.execute_script("window.scrollBy(300, 0)")
time.sleep(1)
bibtex_link = driver.find_element_by_link_text('BibTeX')
bibtex_link.click()
time.sleep(1)
bibtex_text = driver.find_element_by_id('theformats_body')
while len(bibtex_text.text.encode('utf-8')) < 12:
bibtex_text = driver.find_element_by_id('theformats_body')
time.sleep(1)
driver.find_element_by_link_text('download').click()
time.sleep(1)
g()
driver.quit()
def g():
path = os.path.expanduser('~/Downloads')
mylist = os.listdir(path)
mylist = [os.path.join(path, file) for file in mylist]
abstractfile = max(mylist, key=os.path.getmtime)
print open(abstractfile).read()
getACMBibTeX()
|
19,847 | 7b6800e934c4db5b03bc5d209b145b3dc4cec249 | from queue import PriorityQueue
from collections import namedtuple
Pos=namedtuple('Pos','x y')
def swim_till_end(grid):
queue=PriorityQueue()
time=set()
moves=((0,1),(1,0),(0,-1),(-1,0))
start=Pos(0,0)
start_cost=grid[start.x][start.y]
end=Pos(len(grid)-1,len(grid)-1)
queue.put((start_cost,start))
time.add(start)
while queue.qsize():
curr=queue.get()
for move in moves:
new_move=Pos(curr[1].x+move[0],curr[1].y+move[1])
if new_move.x==end.x and new_move.y==end.y:
return max(grid[new_move.x][new_move.y],curr[0])
if 0<=new_move.x<len(grid) and 0<=new_move.y<len(grid) and new_move not in time:
cost=max(grid[new_move.x][new_move.y],curr[0])
time.add(new_move)
queue.put((cost,new_move))
grid= [[0,1,2,3,4],[24,23,22,21,5],[12,13,14,15,16],[11,17,18,17,20],[10,9,39,27,6]]
print(swim_till_end(grid)) |
19,848 | dc4377b42d16bc34bf40503933919c7fb307c2a6 | from dateparser.calendars.jalali import JalaliCalendar
from Monument import Monument, Dataset
import importer_utils as utils
import importer as importer
class IrFa(Monument):
def update_labels(self):
name = utils.remove_markup(self.name)
self.add_label("fa", name)
def update_descriptions(self):
desc = "Iranian national heritage site"
self.add_description("en", desc)
def set_adm_location(self):
self.set_from_dict_match(
lookup_dict=self.data_files["provinces"],
dict_label="iso",
value_label="ISO",
prop="located_adm"
)
def set_location(self):
self.set_from_dict_match(
lookup_dict=self.data_files["cities"],
dict_label="itemLabel",
value_label="city",
prop="location"
)
def set_heritage(self):
"""Set the heritage status, using mapping file."""
if self.has_non_empty_attribute("registration_date"):
try:
iso_date = JalaliCalendar(self.registration_date).get_date()
except TypeError:
print("dateparser.JalaliCalendar could not handle: {}".format(
self.registration_date))
iso_date = None
if iso_date:
date_dict = utils.datetime_to_dict(
iso_date.get('date_obj'), "%Y%m%d")
qualifier = {"start_time": utils.package_time(date_dict)}
heritage = self.mapping["heritage"]["item"]
self.add_statement("heritage_status", heritage, qualifier)
else:
self.add_to_report(
"registration_date", self.registration_date, "start_time")
else:
super().set_heritage()
def set_directions(self):
if self.has_non_empty_attribute("address"):
monolingual = utils.package_monolingual(self.address, 'fa')
self.add_statement("directions", monolingual)
def set_heritage_id(self):
self.add_statement("heritage_iran", self.id)
def __init__(self, db_row_dict, mapping, data_files, existing, repository):
Monument.__init__(self, db_row_dict, mapping,
data_files, existing, repository)
self.set_monuments_all_id("id")
self.set_changed()
self.set_wlm_source()
self.set_heritage_id()
self.set_heritage()
self.set_country()
self.set_coords()
self.set_adm_location()
self.set_location()
self.set_directions()
self.set_is()
self.set_image()
self.set_commonscat()
self.update_labels()
self.update_descriptions()
self.set_wd_item(self.find_matching_wikidata(mapping))
if __name__ == "__main__":
"""Command line entry point for importer."""
args = importer.handle_args()
dataset = Dataset("ir", "fa", IrFa)
dataset.data_files = {
"provinces": "iran_provinces.json", # http://tinyurl.com/yd9xed2s
"cities": "iran_cities.json" # http://tinyurl.com/ybslxkm9
}
importer.main(args, dataset)
|
19,849 | 5bd234d032a1cef724c7d19f94ecdca75497c3b5 | #!/usr/bin/env python3
# Copyright (c) Meta Platforms, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os.path
import shlex
import subprocess
import numpy as np
import pytest
import torch
from gym import spaces
from habitat import read_write
from habitat_baselines.config.default import get_config
from habitat_baselines.rl.ddppo.policy import PointNavResNetPolicy
ACTION_SPACE = spaces.Discrete(4)
OBSERVATION_SPACES = {
"depth_model": spaces.Dict(
{
"depth": spaces.Box(
low=0,
high=1,
shape=(256, 256, 1),
dtype=np.float32,
),
"pointgoal_with_gps_compass": spaces.Box(
low=np.finfo(np.float32).min,
high=np.finfo(np.float32).max,
shape=(2,),
dtype=np.float32,
),
}
),
"rgb_model": spaces.Dict(
{
"rgb": spaces.Box(
low=0,
high=255,
shape=(256, 256, 3),
dtype=np.uint8,
),
"pointgoal_with_gps_compass": spaces.Box(
low=np.finfo(np.float32).min,
high=np.finfo(np.float32).max,
shape=(2,),
dtype=np.float32,
),
}
),
"blind_model": spaces.Dict(
{
"pointgoal_with_gps_compass": spaces.Box(
low=np.finfo(np.float32).min,
high=np.finfo(np.float32).max,
shape=(2,),
dtype=np.float32,
),
}
),
}
MODELS_DEST_DIR = "data/ddppo-models"
MODELS_BASE_URL = "https://dl.fbaipublicfiles.com/habitat/data/baselines/v1/ddppo/ddppo-models"
MODELS_TO_TEST = {
"gibson-2plus-resnet50.pth": {
"backbone": "resnet50",
"observation_space": OBSERVATION_SPACES["depth_model"],
"action_space": ACTION_SPACE,
},
"gibson-2plus-mp3d-train-val-test-se-resneXt50-rgb.pth": {
"backbone": "se_resneXt50",
"observation_space": OBSERVATION_SPACES["rgb_model"],
"action_space": ACTION_SPACE,
},
"gibson-0plus-mp3d-train-val-test-blind.pth": {
"backbone": None,
"observation_space": OBSERVATION_SPACES["blind_model"],
"action_space": ACTION_SPACE,
},
}
def _get_model_url(model_name):
return f"{MODELS_BASE_URL}/{model_name}"
def _get_model_path(model_name):
return f"{MODELS_DEST_DIR}/{model_name}"
@pytest.fixture(scope="module", autouse=True)
def download_data():
for model_name in MODELS_TO_TEST:
model_url = _get_model_url(model_name)
model_path = _get_model_path(model_name)
if not os.path.exists(model_path):
print(f"Downloading {model_name}.")
download_command = (
"wget --continue " + model_url + " -P " + MODELS_DEST_DIR
)
subprocess.check_call(shlex.split(download_command))
assert os.path.exists(
model_path
), "Download failed, no package found."
@pytest.mark.parametrize(
"pretrained_weights_path,backbone,observation_space,action_space",
[
(
_get_model_path(model_name),
params["backbone"],
params["observation_space"],
params["action_space"],
)
for model_name, params in MODELS_TO_TEST.items()
],
)
def test_pretrained_models(
pretrained_weights_path, backbone, observation_space, action_space
):
config = get_config(
"test/config/habitat_baselines/ddppo_pointnav_test.yaml"
)
with read_write(config):
ddppo_config = config.habitat_baselines.rl.ddppo
ddppo_config.pretrained = True
ddppo_config.pretrained_weights = pretrained_weights_path
if backbone is not None:
ddppo_config.backbone = backbone
policy = PointNavResNetPolicy.from_config(
config=config,
observation_space=observation_space,
action_space=action_space,
)
pretrained_state = torch.load(pretrained_weights_path, map_location="cpu")
prefix = "actor_critic."
policy.load_state_dict(
{ # type: ignore
k[len(prefix) :]: v
for k, v in pretrained_state["state_dict"].items()
}
)
|
19,850 | 11e95071b390c9229a0cff53e511e3c4cea97cef | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-19 11:29
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('PGE', '0010_auto_20170717_2202'),
]
operations = [
migrations.AlterField(
model_name='project',
name='start_date',
field=models.DateField(default=datetime.date(2017, 7, 19)),
),
migrations.AlterField(
model_name='session',
name='start_date_time',
field=models.DateTimeField(default=datetime.datetime(2017, 7, 19, 16, 59, 24, 643439)),
),
migrations.AlterField(
model_name='task',
name='employees',
field=models.ManyToManyField(to='PGE.Employee'),
),
migrations.AlterField(
model_name='task',
name='start_date',
field=models.DateField(default=datetime.date(2017, 7, 19)),
),
]
|
19,851 | ec8b6e058140383a45e2808546cbc502d495582f | import numpy as np
import itertools as iter
#---Customize to specific use case.---#
def GetNextState(State):
A = self.GetActions(State)
# Get random action.
(m, n) = A.shape
i = np.random.randint(0, m)
Action = A[i, :]
NextState = self.ApplyAction(State, Action)
return NextState
def GetActions(State):
# Get the possible actions given a state.
N = (int)(np.sum(State))
if(any(State) == None):
return None
# Get matrix of possible actions.
A = np.array(list(iter.product([0, 1], repeat = N)))
AToS = MapActionsToState(State, A)
#print "Before Illegal:", AToS
AToS = RemoveIllegalActions(AToS)
#print "After Illegal:", AToS
# Remove no action.
if(len(A) > 0):
A = np.delete(A, 0, 0)
return AToS
def MapActionsToState(State, A):
(m, n) = A.shape
AToS = np.zeros((m, len(State)))
for i in range(m):
a = 0
for j in range(len(State)):
if(State[j] == 0):
continue
else:
if(A[i, a] == 1):
AToS[i,j] = 1
a += 1
else:
a += 1
return AToS
def RemoveIllegalActions(Actions):
(m, n) = Actions.shape
IllegalActions = np.array([[1.,1.,1.,1.],
[1.,1.,0.,0.],
[0.,0.,1.,1.],
[1.,1.,0.,1.],
[1.,0.,1.,1.],
[0.,1.,1.,1.],
[1.,1.,1.,0.]])
(p, q) = IllegalActions.shape
ActionsAfter = np.array([[0.,0.,0.,0.]])
for i in range(m):
Illegal = False
for j in range(p):
if(all(Actions[i,:] == IllegalActions[j,:])):
Illegal = True
break
if(not Illegal):
ActionsAfter = np.vstack([ActionsAfter, Actions[i,:]])
return ActionsAfter
def ApplyAction(State1, Action):
State2 = State1 - Action
for s in State2:
if(s < 0):
s = 0
return State2
def IsTerminal(State):
if(np.sum(State) == 0):
return True
else:
return False
|
19,852 | c477eb26d7284d563a25c0e6c2cff35f7e0cea27 | import geojson
from geojson import Feature, FeatureCollection
from geopy.geocoders import Nominatim
from shapely.geometry import shape
from cosmos.types import BoundingBox
def compute_geojson(data_type, elements):
geojson_features = ()
features = data_type['features'](elements)
tags = (element.get('tags') for element in elements)
try:
geometry = getattr(geojson, data_type['geometry'])
geojson_features = (Feature(geometry=geometry(feature), properties=tag)
for feature, tag in zip(features, tags))
except AttributeError:
raise AttributeError('Invalid geometry type in data_type.')
return FeatureCollection(list(geojson_features))
def extract_elements_from_osm(data, element_type):
return (element for element in data[
'elements'] if element['type'] == element_type)
def process_osm_output(data, data_type, format):
features = ()
tags = ()
elements = []
if isinstance(data, list):
for d in data:
elements.extend(
list(extract_elements_from_osm(d, data_type['element'])))
else:
elements = list(extract_elements_from_osm(data, data_type['element']))
if format == 'geojson':
return compute_geojson(data_type, elements)
else:
features = data_type['features'](elements)
tags = (element.get('tags') for element in elements)
return features, tags
def coords_for(name):
geocoder = Nominatim()
location = geocoder.geocode(name, geometry='geojson')
try:
geometry = shape(location.raw['geojson'])
# Coordinates have to be flipped in order to work in overpass
if geometry.geom_type == 'Polygon':
west, south, east, north = geometry.bounds
return BoundingBox(south, west, north, east)
elif geometry.geom_type == 'MultiPolygon':
bboxs = (BoundingBox(*(g.bounds[0:2][::-1] + g.bounds[2:][::-1]))
for g in geometry)
return bboxs
elif geometry.geom_type == 'Point':
south, north, west, east = (float(coordinate)
for coordinate in
location.raw['boundingbox'])
return BoundingBox(south, west, north, east)
except (KeyError, AttributeError):
raise AttributeError(
'No bounding box available for this location name.')
|
19,853 | bdb8b52a055852c645531758e2379bec39e098d1 | # !/usr/bin/env python
# -*- coding: utf-8 -*-
import collections
import os
class Node(object):
def __init__(self, nid, leaf=True):
self.id = nid
self.suffix_link = None
self.leaf = leaf
def repr(self):
return "Node(%d)" % self.id
def explain(self):
_str = ""
if self.leaf:
_str = "Node[shape = doublecircle] %d;\n" % self.id
else:
_str = "Node[shape = circle] %d;\n" % self.id
if self.suffix_link != None:
_str += "%d->%d[style = dotted];\n" % (self.id, self.suffix_link.id)
return _str
class Edge(object):
def __init__(self, start_char, end_char, start_node, end_node):
self.start_char = start_char
self.end_char = end_char
self.start_node = start_node
self.end_node = end_node
def update_start(self, start_char, start_node):
self.start_char = start_char
self.start_node = start_node
def update_end(self, end_char, end_node):
self.end_char = end_char
self.end_node = end_node
def length(self):
return (self.end_char - self.start_char)
def explain(self, seq):
return "%d->%d[label = \"%s\"]" % (self.start_node.id, self.end_node.id, seq[self.start_char:self.end_char])
def repr(self):
return "Edge(%d, %d, %d, %d)" % (self.start_char, self.end_char, self.start_node.id, self.end_node.id)
class ActivePoint(object):
def __init__(self, active_node, active_edge, active_length):
self.node = active_node
self.edge =active_edge
self.length = active_length
def repr(self):
return "ActivePoint(%s, %s, %d)" % (self.node.repr(), self.edge.repr(), self.length)
class SuffixTree(object):
def __init__(self, seq):
self.seq = seq
self.elements = set(seq)
self.size = len(self.seq)
self.root = Node(0)
self.node_list = [self.root]
self.edge_dict = collections.defaultdict()
self.active_point = ActivePoint(self.root, None, 0)
self.parent_node = None
self.suffix_link = None
self.reminder = 1 # the number of suffixes we had to actively insert at the end of each step
self.rule = 0 # the default rule
def build_suffixTree(self):
for i in range(len(self.seq)):
self.rule = 0
self.insert_suffixes(i)
def insert_suffixes(self, start):
self.parent_node = self.active_point.node
if self.active_point.edge == None:
if (self.active_point.node.id, self.seq[start]) in self.edge_dict:
active_edge = self.edge_dict[(self.active_point.node.id, self.seq[start])]
self.active_point.edge = active_edge
self.active_point.length += 1
self.reminder += 1
if self.active_point.length == self.active_point.edge.length():
self.active_point.node = self.active_point.edge.end_node
self.active_point.edge = None
self.active_point.length = 0
self.parent_node = self.active_point.node
return
else:
# If 'ab' is in the tree, every suffix of it must be in the tree.
if self.seq[self.active_point.edge.start_char + self.active_point.length] == self.seq[start]:
self.active_point.length += 1
self.reminder += 1
if self.active_point.length == self.active_point.edge.length():
self.active_point.node = self.active_point.edge.end_node
self.active_point.edge = None
self.active_point.length = 0
self.parent_node = self.active_point.node
return
else:
# print "Splitting"
new_node = Node(len(self.node_list), False)
new_edge = Edge(self.active_point.edge.start_char + self.active_point.length, self.active_point.edge.end_char, new_node, self.active_point.edge.end_node)
# print new_edge.explain(self.seq)
self.node_list.append(new_node)
self.edge_dict[(new_node.id, self.seq[self.active_point.edge.start_char + self.active_point.length])] = new_edge
self.active_point.edge.update_end(self.active_point.edge.start_char + self.active_point.length, new_node)
# print self.active_point.edge.explain(self.seq)
self.parent_node = new_node
# Apply Rule 1 when active_point.node is root
if self.active_point.node == self.root:
self.rule = 1
self.active_point.length -= 1
self.reminder -= 1
else: # Apply Rule 2 when active_point.node is not root
print self.active_point.node.id
if self.active_point.node.suffix_link != None:
self.active_point.node = self.active_point.node.suffix_link
else:
self.active_point.node = self.root
self.reminder -= 1
self.rule = 2
if self.suffix_link != None:
self.suffix_link.suffix_link = new_node
# print "suffix_link %d --> %d" % (self.suffix_link.id, new_node.id)
self.suffix_link = new_node
new_node = Node(len(self.node_list))
new_edge = Edge(start, self.size, self.parent_node, new_node)
self.node_list.append(new_node)
self.edge_dict[(self.parent_node.id, self.seq[start])] = new_edge
# print new_edge.explain(self.seq)
if self.rule != 0:
if self.reminder > 1:
if self.rule == 1:
self.active_point.edge = self.edge_dict[(self.active_point.node.id, self.seq[start - self.reminder + 1])]
self.insert_suffixes(start)
else:
self.active_point.edge = self.edge_dict[(self.active_point.node.id, self.seq[self.active_point.edge.start_char])]
self.insert_suffixes(start)
else:
self.rule = 0
self.active_point.node= self.root
self.active_point.edge = None
self.insert_suffixes(start)
self.suffix_link = None # reset suffix_link
def draw_graph(self, fn):
fp = open(fn, 'wb')
fp.write("digraph G {\n")
fp.write("rankdir=LR;\n")
fp.write("size=\"8,5\"\n")
for node in sorted(self.node_list):
fp.write(node.explain())
for edge in sorted(self.edge_dict.values()):
fp.write(edge.explain(self.seq))
fp.write('\n')
fp.write('}')
fp.close()
cmd = "dot -Tpng %s > %s.png" % (fn, fn)
os.system(cmd)
if __name__ == "__main__":
suffixtree = SuffixTree("abcabxabcdabc$")
#suffixtree = SuffixTree("banana$")
#suffixtree = SuffixTree("abcd$")
suffixtree.build_suffixTree()
suffixtree.draw_graph("naive_suffix_tree")
|
19,854 | 14a762e35bbac1b5c1c922a83d724b6ea77e0e99 | import logging
import traceback
from django.conf import settings
from sparrow_cloud.dingtalk.sender import send_message
from sparrow_cloud.middleware.base.base_middleware import MiddlewareMixin
logger = logging.getLogger(__name__)
MESSAGE_LINE = """
##### <font color=\"info\"> 服务名称: {service_name}</font> #####
> 进程异常message:<font color=\"warning\">{exception_info}</font>
"""
class ExceptionMiddleware(MiddlewareMixin):
def process_exception(self, request, exception):
debug = settings.DEBUG
code = getattr(settings, "CLOUD_ERROR_NOTIFICATION_ROBOT", "cloud_error_notification_robot")
service_name = getattr(settings, "SERVICE_CONF", None).get("NAME", None)
if debug is True:
pass
else:
exception_info = traceback.format_exc()[-800:-1]
try:
msg = MESSAGE_LINE.format(service_name=service_name, exception_info=exception_info)
logger.info("sparrow_cloud log, service process_exception info : {}".format(msg))
send_message(msg=msg, code_list=[code], channel="wechat", message_type="markdown")
except Exception as ex:
logger.error("sparrow_cloud 发送服务异常信息通知失败,原因: {}".format(ex)) |
19,855 | 10b216cc9634d97c0676ba6410c29c68ad96318c | import datetime
import inspect
import numpy.testing as npt
import os.path
import pandas as pd
import pandas.util.testing as pdt
import sys
from tabulate import tabulate
import unittest
# #find parent directory and import model
# parentddir = os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))
# sys.path.append(parentddir)
from ..screenip_exe import Screenip
test = {}
class TestScreenip(unittest.TestCase):
"""
Unit tests for screenip.
"""
print("screenip unittests conducted at " + str(datetime.datetime.today()))
def setUp(self):
"""
Setup routine for screenip unittest.
:return:
"""
pass
# screenip2 = screenip_model.screenip(0, pd_obj_inputs, pd_obj_exp_out)
# setup the test as needed
# e.g. pandas to open screenip qaqc csv
# Read qaqc csv and create pandas DataFrames for inputs and expected outputs
def tearDown(self):
"""
Teardown routine for screenip unittest.
:return:
"""
pass
# teardown called after each test
# e.g. maybe write test results to some text file
def create_screenip_object(self):
# create empty pandas dataframes to create empty object for testing
df_empty = pd.DataFrame()
# create an empty screenip object
screenip_empty = Screenip(df_empty, df_empty)
return screenip_empty
def test_screenip_unit_fw_bird(self):
"""
unittest for function screenip.fw_bird:
:return:
"""
expected_results = pd.Series([0.0162, 0.0162, 0.0162], dtype='float')
result = pd.Series([], dtype='float')
# create empty pandas dataframes to create empty object for this unittest
screenip_empty = self.create_screenip_object()
try:
# for i in range(0,3):
# result[i] = screenip_empty.fw_bird()
screenip_empty.no_of_runs = len(expected_results)
screenip_empty.fw_bird()
result = screenip_empty.out_fw_bird
npt.assert_allclose(result, expected_results, rtol=1e-4, atol=0, err_msg='', verbose=True )
finally:
tab = [result, expected_results]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_screenip_unit_fw_mamm(self):
"""
unittest for function screenip.fw_mamm:
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
screenip_empty = self.create_screenip_object()
expected_results = pd.Series([0.172, 0.172, 0.172], dtype='float')
result = pd.Series([], dtype='float')
try:
screenip_empty.no_of_runs = len(expected_results)
result = screenip_empty.fw_mamm()
npt.assert_allclose(result, expected_results, rtol=1e-4, atol=0, err_msg='', verbose=True )
finally:
tab = [result, expected_results]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_screenip_unit_dose_bird(self):
"""
unittest for function screenip.dose_bird:
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
screenip_empty = self.create_screenip_object()
expected_results = pd.Series([1000000., 4805.50175, 849727.21122], dtype='float')
result = pd.Series([], dtype='float')
try:
#(self.out_fw_bird * self.solubility)/(self.bodyweight_assessed_bird / 1000.)
screenip_empty.out_fw_bird = pd.Series([10., 0.329, 1.8349], dtype='float')
screenip_empty.solubility = pd.Series([100., 34.9823, 453.83], dtype='float')
screenip_empty.bodyweight_assessed_bird = pd.Series([1.0, 2.395, 0.98], dtype='float')
result = screenip_empty.dose_bird()
npt.assert_allclose(result, expected_results, rtol=1e-4, atol=0, err_msg='', verbose=True )
finally:
tab = [result, expected_results]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_screenip_unit_dose_mamm(self):
"""
unittest for function screenip.dose_mamm:
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
screenip_empty = self.create_screenip_object()
expected_results = pd.Series([8000000., 48205.7595, 3808036.37889], dtype='float')
result = pd.Series([], dtype='float')
try:
#(self.out_fw_mamm * self.solubility)/(self.bodyweight_assessed_mammal / 1000)
screenip_empty.out_fw_mamm = pd.Series([20., 12.843, 6.998], dtype='float')
screenip_empty.solubility = pd.Series([400., 34.9823, 453.83], dtype='float')
screenip_empty.bodyweight_assessed_mammal = pd.Series([1., 9.32, 0.834], dtype='float')
result = screenip_empty.dose_mamm()
npt.assert_allclose(result, expected_results, rtol=1e-4, atol=0, err_msg='', verbose=True )
finally:
tab = [result, expected_results]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_screenip_unit_at_bird(self):
"""
unittest for function screenip.at_bird:
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
screenip_empty = self.create_screenip_object()
expected_results = pd.Series([1000., 687.9231, 109.3361], dtype='float')
result = pd.Series([], dtype='float')
try:
#(self.ld50_avian_water) * ((self.bodyweight_assessed_bird / self.bodyweight_tested_bird)**(self.mineau_scaling_factor - 1.))
screenip_empty.ld50_avian_water = pd.Series([2000., 938.34, 345.83], dtype='float')
screenip_empty.bodyweight_assessed_bird = pd.Series([100., 39.49, 183.54], dtype='float')
screenip_empty.ld50_bodyweight_tested_bird = pd.Series([200., 73.473, 395.485], dtype='float')
screenip_empty.mineau_scaling_factor = pd.Series([2., 1.5, 2.5], dtype='float')
result = screenip_empty.at_bird()
npt.assert_allclose(result, expected_results, rtol=1e-4, atol=0, err_msg='', verbose=True )
finally:
tab = [result, expected_results]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_screenip_unit_at_mamm(self):
"""
unittest for function screenip.at_mamm:
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
screenip_empty = self.create_screenip_object()
expected_results = pd.Series([11.89207, 214.0572, 412.6864], dtype='float')
result = pd.Series([], dtype='float')
try:
#(self.ld50_mammal_water) * ((self.bodyweight_tested_mammal / self.bodyweight_assessed_mammal)**0.25)
screenip_empty.ld50_mammal_water = pd.Series([10., 250., 500.], dtype='float')
screenip_empty.ld50_bodyweight_tested_mammal = pd.Series([200., 39.49, 183.54], dtype='float')
screenip_empty.bodyweight_assessed_mammal = pd.Series([100., 73.473, 395.485], dtype='float')
result = screenip_empty.at_mamm()
npt.assert_allclose(result, expected_results, rtol=1e-4, atol=0, err_msg='', verbose=True )
finally:
tab = [result, expected_results]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_screenip_unit_fi_bird(self):
"""
unittest for function screenip.fi_bird:
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
screenip_empty = self.create_screenip_object()
expected_results = pd.Series([0.012999, 0.026578, 0.020412], dtype='float')
result = pd.Series([], dtype='float')
try:
#0.0582 * ((bw_grams / 1000.)**0.651)
bw_grams = pd.Series([100., 300., 200.], dtype='float')
result = screenip_empty.fi_bird(bw_grams)
npt.assert_allclose(result, expected_results, rtol=1e-4, atol=0, err_msg='', verbose=True )
finally:
tab = [result, expected_results]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_screenip_unit_act(self):
"""
unittest for function screenip.test_act:
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
screenip_empty = self.create_screenip_object()
expected_results = pd.Series([10.5737, 124.8032, 416.4873], dtype='float')
result = pd.Series([], dtype='float')
try:
#(self.noael_mammal_water) * ((self.bodyweight_tested_mammal / self.bodyweight_assessed_mammal)**0.25)
screenip_empty.noael_mammal_water = pd.Series([10., 120., 400.], dtype='float')
screenip_empty.noael_bodyweight_tested_mammal = pd.Series([500., 385.45, 673.854], dtype='float')
screenip_empty.bodyweight_assessed_mammal = pd.Series([400., 329.45, 573.322], dtype='float')
result = screenip_empty.act()
npt.assert_allclose(result, expected_results, rtol=1e-4, atol=0, err_msg='', verbose=True )
finally:
tab = [result, expected_results]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_screenip_unit_det(self):
"""
unittest for function screenip.det
return:
"""
#
# '''
# Dose Equiv. Toxicity:
#
# The FI value (kg-diet) is multiplied by the reported NOAEC (mg/kg-diet) and then divided by
# the test animal's body weight to derive the dose-equivalent chronic toxicity value (mg/kg-bw):
#
# Dose Equiv. Toxicity = (NOAEC * FI) / BW
#
# NOTE: The user enters the lowest available NOAEC for the mallard duck, for the bobwhite quail,
# and for any other test species. The model calculates the dose equivalent toxicity values for
# all of the modeled values (Cells F20-24 and results worksheet) and then selects the lowest dose
# equivalent toxicity value to represent the chronic toxicity of the chemical to birds.
# '''
# try:
# # result =
# # self.assertEquals(result, )
# pass
# finally:
# pass
# return
#
#
# def test_det_duck(self):
# """
# unittest for function screenip.det_duck:
# :return:
# """
# try:
# # det_duck = (self.noaec_duck * self.fi_bird(1580.)) / (1580. / 1000.)
# screenip_empty.noaec_duck = pd.Series([1.], dtype='int')
# screenip_empty.fi_bird = pd.Series([1.], dtype='int')
# result = screenip_empty.det_duck()
# npt.assert_array_almost_equal(result, 1000., 4, '', True)
# finally:
# pass
# return
#
# def test_det_quail(self):
# """
# unittest for function screenip.det_quail:
# :return:
# """
# try:
# # det_quail = (self.noaec_quail * self.fi_bird(178.)) / (178. / 1000.)
# screenip_empty.noaec_quail = pd.Series([1.], dtype='int')
# screenip_empty.fi_bird = pd.Series([1.], dtype='int')
# result = screenip_empty.det_quail()
# npt.assert_array_almost_equal(result, 1000., 4, '', True)
# finally:
# pass
# return
#
# def test_det_other_1(self):
# """
# unittest for function screenip.det_other_1:
# :return:
# """
# try:
# #det_other_1 = (self.noaec_bird_other_1 * self.fi_bird(self.bodyweight_bird_other_1)) / (self.bodyweight_bird_other_1 / 1000.)
# #det_other_2 = (self.noaec_bird_other_2 * self.fi_bird(self.bodyweight_bird_other_1)) / (self.bodyweight_bird_other_1 / 1000.)
# screenip_empty.noaec_bird_other_1 = pd.Series([400.]) # mg/kg-diet
# screenip_empty.bodyweight_bird_other_1 = pd.Series([100]) # grams
# result = screenip_empty.det_other_1()
# npt.assert_array_almost_equal(result, 4666, 4)
# finally:
# pass
# return
#
# The following tests are configured such that:
# 1. four values are provided for each needed input
# 2. the four input values generate four values of out_det_* per bird type
# 3. the inputs per bird type are set so that calculations of out_det_* will result in
# each bird type having one minimum among the bird types;
# thus all four calculations result in one minimum per bird type
# create empty pandas dataframes to create empty object for this unittest
screenip_empty = self.create_screenip_object()
expected_results = pd.Series([4.2174, 4.96125, 7.97237, 10.664648], dtype='float')
result = pd.Series([], dtype='float')
try:
screenip_empty.bodyweight_bobwhite_quail = 178.
screenip_empty.bodyweight_mallard_duck = 1580.
screenip_empty.noaec_quail = pd.Series([100., 300., 75., 150.], dtype='float')
screenip_empty.noaec_duck = pd.Series([400., 100., 200., 350.], dtype='float')
screenip_empty.noaec_bird_other_1 = pd.Series([50., 200., 300., 250.], dtype='float')
screenip_empty.noaec_bird_other_2 = pd.Series([350., 400., 250., 100.], dtype='float')
screenip_empty.noaec_bodyweight_bird_other_1 = pd.Series([345.34, 453.54, 649.29, 294.56], dtype='float')
screenip_empty.noaec_bodyweight_bird_other_2 = pd.Series([123.84, 85.743, 127.884, 176.34], dtype='float')
screenip_empty.no_of_runs = len(expected_results)
result = screenip_empty.det()
npt.assert_allclose(result, expected_results, rtol=1e-4, atol=0, err_msg='', verbose=True )
finally:
tab = [result, expected_results]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_screenip_unit_acute_bird(self):
"""
unittest for function screenip.acute_bird:
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
screenip_empty = self.create_screenip_object()
expected_results = pd.Series([10., 5.22093, 0.479639], dtype='float')
result = pd.Series([], dtype='float')
try:
# self.out_acute_bird = self.out_dose_bird / self.out_at_bird
screenip_empty.out_dose_bird = pd.Series([100., 121.23, 43.994], dtype='float')
screenip_empty.out_at_bird = pd.Series([10., 23.22, 91.723], dtype='float')
result = screenip_empty.acute_bird()
npt.assert_allclose(result, expected_results, rtol=1e-4, atol=0, err_msg='', verbose=True )
finally:
tab = [result, expected_results]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_screenip_unit_acuconb(self):
"""
unittest for function screenip.acuconb:
Message stating whether or not a risk is present
:return:
"""
# if self.out_acuconb == -1:
# if self.out_acute_bird == None:
# raise ValueError\
# ('acute_bird variable equals None and therefor this function cannot be run.')
# if self.out_acute_bird < 0.1:
# self.out_acuconb = ('Drinking water exposure alone is NOT a potential concern for birds')
# else:
# self.out_acuconb = ('Exposure through drinking water alone is a potential concern for birds')
# create empty pandas dataframes to create empty object for this unittest
screenip_empty = self.create_screenip_object()
expected_results = pd.Series(["Exposure through drinking water alone is a potential concern "
"for birds", "Drinking water exposure alone is NOT a potential "
"concern for birds", "Exposure through drinking water alone is a "
"potential concern for birds"], dtype='object')
result = pd.Series([], dtype='object')
try:
screenip_empty.out_acute_bird = pd.Series([0.2, 0.09, 0.1], dtype='float')
result = screenip_empty.acuconb()
pdt.assert_series_equal(result, expected_results, True)
finally:
tab = [result, expected_results]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_screenip_unit_acute_mamm(self):
"""
unittest for function screenip.acute_mamm:
:return:
"""
# self.out_acute_mamm = self.out_dose_mamm / self.out_at_mamm
# create empty pandas dataframes to create empty object for this unittest
screenip_empty = self.create_screenip_object()
expected_results = pd.Series([10., 14.68657, 2.124852], dtype='float')
result = pd.Series([], dtype='float')
try:
screenip_empty.out_dose_mamm = pd.Series([100., 34.44, 159.349], dtype='float')
screenip_empty.out_at_mamm = pd.Series([10., 2.345, 74.993], dtype='float')
result = screenip_empty.acute_mamm()
npt.assert_allclose(result, expected_results, rtol=1e-4, atol=0, err_msg='', verbose=True )
finally:
tab = [result, expected_results]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_screenip_unit_acuconm(self):
"""
unittest for function screenip.acuconm:
Message stating whether or not a risk is present
:return:
"""
# if self.out_acuconm == -1:
# if self.out_acute_mamm == None:
# raise ValueError\
# ('acute_mamm variable equals None and therefor this function cannot be run.')
# if self.out_acute_mamm < 0.1:
# self.out_acuconm = ('Drinking water exposure alone is NOT a potential concern for mammals')
# else:
# self.out_acuconm = ('Exposure through drinking water alone is a potential concern for mammals')
# return self.out_acuconm
# create empty pandas dataframes to create empty object for this unittest
screenip_empty = self.create_screenip_object()
expected_results = pd.Series(["Drinking water exposure alone is NOT a potential concern "
"for mammals", "Exposure through drinking water alone is a "
"potential concern for mammals", "Drinking water exposure "
"alone is NOT a potential concern for mammals"], dtype='object')
result = pd.Series([], dtype='object')
try:
screenip_empty.out_acute_mamm = pd.Series([0.09, 0.2, 0.002], dtype='float')
result = screenip_empty.acuconm()
pdt.assert_series_equal(result, expected_results, True)
finally:
tab = [result, expected_results]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_screenip_unit_chron_bird(self):
"""
unittest for function screenip.chron_bird:
:return:
"""
#self.out_chron_bird = self.out_dose_bird / self.out_det
# create empty pandas dataframes to create empty object for this unittest
screenip_empty = self.create_screenip_object()
expected_results = pd.Series([0.5, 0.10891, 2.39857], dtype='float')
result = pd.Series([], dtype='float')
try:
screenip_empty.out_dose_bird = pd.Series([5., 1.32, 19.191], dtype='float')
screenip_empty.out_det = pd.Series([10., 12.12, 8.001], dtype='float')
result = screenip_empty.chron_bird()
npt.assert_allclose(result, expected_results, rtol=1e-4, atol=0, err_msg='', verbose=True )
finally:
tab = [result, expected_results]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_screenip_unit_chronconb(self):
"""
unittest for function screenip.chronconb:
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
screenip_empty = self.create_screenip_object()
expected_results = pd.Series(["Drinking water exposure alone is NOT "
"a potential concern for birds", "Exposure through "
"drinking water alone is a potential concern for "
"birds", "Drinking water exposure alone is NOT a "
"potential concern for birds"], dtype='object')
result = pd.Series([], dtype='object')
try:
screenip_empty.out_chron_bird = pd.Series([0.12, 3., 0.97], dtype='float')
result = screenip_empty.chronconb()
pdt.assert_series_equal(result, expected_results, True)
finally:
tab = [result, expected_results]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_screenip_unit_chron_mamm(self):
"""
unittest for function screenip.chron_mamm:
:return:
"""
# self.out_chron_mamm = self.out_dose_mamm / self.out_act
# create empty pandas dataframes to create empty object for this unittest
screenip_empty = self.create_screenip_object()
expected_results = pd.Series([2.0, 14.1333, 244.7245], dtype='float')
result = pd.Series([], dtype='float')
try:
screenip_empty.out_dose_mamm = pd.Series([8., 34.344, 23.983], dtype='float')
screenip_empty.out_act = pd.Series([4., 2.43, 0.098], dtype='float')
result = screenip_empty.chron_mamm()
npt.assert_allclose(result, expected_results, rtol=1e-4, atol=0, err_msg='', verbose=True )
finally:
tab = [result, expected_results]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_screenip_unit_chronconm(self):
"""
unittest for function screenip.chronconm:
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
screenip_empty = self.create_screenip_object()
expected_results = pd.Series(["Drinking water exposure alone is NOT a potential "
"concern for mammals", "Exposure through drinking water alone "
"is a potential concern for mammals", "Drinking water exposure "
"alone is NOT a potential concern for mammals"], dtype='object')
result = pd.Series([], dtype='object')
try:
screenip_empty.out_chron_mamm = pd.Series([0.5, 1.0, 0.09], dtype='float')
result = screenip_empty.chronconm()
pdt.assert_series_equal(result, expected_results, True)
finally:
tab = [result, expected_results]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
# unittest will
# 1) call the setup method,
# 2) then call every method starting with "test",
# 3) then the teardown method
if __name__ == '__main__':
unittest.main()
#pass
|
19,856 | c05b2982f433a12a07a72502ab6bbcb5a1d8c26a | #!/usr/bin/env python
import serial
import time
import subprocess
# Set Variables
count = 0
delay = 2
DEBUG = 0
maxcount = 2
#ser = serial.Serial('/dev/ttyACM0', 9600, timeout=5)
ser = serial.Serial('/dev/ttyUSB0', 9600, timeout=5)
#ser = serial.Serial('/dev/ttyUSB1', 9600, timeout=5)
#ser = serial.Serial('/dev/ttyAMA0', 9600, timeout=5)
#while True:
while (count < maxcount):
ser.flushInput()
print "Count= {0}".format(count)
#ok read all Analog
for i in range(6):
if DEBUG == 1:
print "\ti={0}".format(i)
ser.write(chr(48+i))
line = ser.readline()
# print "\t{0}".format(line),
print "\tA{0}=".format(i),
print "{0}".format(line),
if i == 0:
cht = open("/home/robin/ReadVoltage0", "wb")
cht.write(line);
cht.close()
if i == 1:
cht = open("/home/robin/ReadVoltage1", "wb")
cht.write(line);
cht.close()
if i == 2:
cht = open("/home/robin/ReadVoltage2", "wb")
cht.write(line);
cht.close()
if i == 3:
cht = open("/home/robin/ReadVoltage3", "wb")
cht.write(line);
cht.close()
if i == 4:
cht = open("/home/robin/ReadVoltage4", "wb")
cht.write(line);
cht.close()
if i == 5:
cht = open("/home/robin/ReadVoltage5", "wb")
cht.write(line);
cht.close()
# if float(line) < 12.00 and float(line) > 11.51:
# print "\t\tVoltage is low"
# elif float(line) < 11.50 and float(line) > 11.01:
# print "\t\tVoltage is CRITICAL"
# elif float(line) < 11.00:
# subprocess.call(["sudo", "shutdown", "-k"])
# subprocess.call(["sudo", "shutdown", "-k", "now"])
count = count +1
time.sleep(delay)
|
19,857 | a9350a1ea1341363d8abd0be94d57d96778147c7 | #training file
import json
import numpy as np
import torch
import torch.nn as nn
from torch.utils.data import Dataset, DataLoader
from model import NeuralNet
from ex import tokenize, stem, bow
with open('CB.json','r') as f:
intents = json.load(f)
all_words= []
tags = []
xy= []
for intent in intents['intents']:
tag=intent['tag']
tags.append(tag)
for pattern in intent['patterns']:
w=tokenize(pattern)
all_words.extend(w)
xy.append((w,tag))
word =['?','!']
all_words = [stem(w) for w in all_words if w not in word]
all_words = sorted(set(all_words))
tags=sorted(set(tags))
x=[]
y=[]
for (pattern_sentence, tag) in xy:
bag=bow(pattern_sentence, all_words)
x.append(bag)
label = tags.index(tag)
y.append(label)
x=np.array(x)
y=np.array(y)
class ChatDataset(Dataset):
def __init__(self):
self.n_samples = len(x)
self.x_data = x
self.y_data = y
#dataset[index]
def __getitem__(self,index):
return self.x_data[index], self.y_data[index] #returns as tuple
def __len__(self):
return self.n_samples
#Hyper parameter
batch_size = 8
hidden_size = 8
output_size = len(tags) #number of diff classes or texts
input_size = len(x[0]) #number of length of each bow and bow has the same length as all words array
print(input_size, len(all_words))
print(output_size, tags)
learn_rate = 0.001
num_epochs =1000
dataset = ChatDataset()
train_loader = DataLoader(dataset = dataset, batch_size = batch_size, shuffle = True, num_workers=0)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') #if we have GPU support
model = NeuralNet(input_size, hidden_size, output_size)
#loss and optimizer
criterion = nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(model.parameters(), lr=learn_rate) #optimize model
#training loop
for epoch in range(num_epochs):
for (words, labels) in train_loader:
words = words.to(device) #pushing to the device
labels = labels.long().to(device)
#forward
outputs = model(words) #words as input
loss = criterion(outputs, labels)
#backward and optimizer step
optimizer.zero_grad()#empty gradients first
loss.backward() #back propogation calc
optimizer.step()
if (epoch + 1) % 100 == 0:
print(f'epoch {epoch+1}/{num_epochs}, loss={loss.item():.4f}')
print(f'final loss, loss={loss.item():.4f}')
#save and load model and implementation
data = {
"model_state": model.state_dict(),
"input_size": input_size,
"output_size": output_size,
"hidden_size": hidden_size,
"all_words": all_words,
"tags":tags
}
FILE="data.pth"
torch.save(data,FILE)
print(f'training complete. file saved to {FILE} ') |
19,858 | 42c2abad1acddbee12a5cd12c26c9b4d8dc1cc99 | #/usr/bin/env python
"""
This file was generated automatically.
"""
from matplotlib import pyplot
pyplot.loglog([8, 12, 16, 24, 32, 48, 64], [0.008569002151489258, 0.03431415557861328, 0.13283991813659668, 0.9201431274414062, 3.735200881958008, 27.873284816741943, 1083.2681589126587], marker='x', label='He chains (conventional MP2) s=nan')
pyplot.loglog([8, 12, 16, 24, 32, 48], [0.6403460502624512, 1.8115911483764648, 4.302359104156494, 15.49633002281189, 41.71060109138489, 184.52525806427002], marker='o', label='He chains (LMP2) s=3.6')
pyplot.loglog([8, 12, 16, 24, 32, 48, 64], [0.005057096481323242, 0.019192934036254883, 0.07035684585571289, 0.48464083671569824, 1.9332208633422852, 14.653261184692383, 447.15065002441406], marker='x', label='H chains (conventional MP2) s=nan')
pyplot.loglog([8, 12, 16, 24], [4.286357879638672, 17.896310806274414, 48.510149002075195, 197.94836497306824], marker='o', label='H chains (LMP2) s=3.5')
pyplot.xlabel('Model size')
pyplot.ylabel('Run time (s)')
pyplot.grid()
pyplot.legend()
pyplot.show()
|
19,859 | c58bf9ebe12bcc3f749758303d932f7790cee21b | from django.shortcuts import render , redirect
from django.http import HttpResponse
from signup.models import Signup
import smtplib as sm ,validate_email as v,string,random
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from django.contrib.auth.hashers import check_password,make_password
from .models import Recoverdata
def login(request):
if request.method == "POST":
error_log = list()
username = request.POST['username']
password = request.POST['password']
try:
userinfo = Signup.objects.get(username = username)
username = userinfo.username
except Exception as e:
error_log.append(" Wrong credentials")
return render(request , "login/login.html" , context = {"error_log" : error_log})
else:
if check_password(password , userinfo.password):
# add username to the session
request.session["uid"] = userinfo.uid
request.session['items'] = list()
request.session["loginstatus"] = True
try:
red_req = request.session["redirect"]
del request.session["redirect"]
except Exception as e:
# set the sessions expiry date
request.session.set_expiry(0)
# redirect user to home page
return redirect("/")
else:
return redirect("{}".format(red_req))
else:
error_log.append("Wrong credentials")
return render(request , "login/login.html" , context = {"error_log" : error_log})
else:
try:
request.session["username"]
except Exception as e:
return render(request , "login/login.html")
else:
return redirect("/")
def forgotcredetials(request):
bug_hunter = []
if request.method == "POST":
try:
lostuser = request.POST['email']
except Exception as e:
bug_hunter.append("Invalid input")
return render(request , "login/forgot.html" , context = {"error":bug_hunter})
else:
try:
userdata = Signup.objects.get(email = lostuser)
except Signup.DoesNotExist as e:
bug_hunter.append("Email not registered with us")
return render(request , "login/forgot.html" , context = {"error":bug_hunter})
else:
if v.validate_email(lostuser):
hashcode = string.digits + string.ascii_letters + string.digits + string.digits
hashcode = "".join([random.choice(hashcode) for value in range(10)])
sender = "anornymous99@gmail.com"
receiver = lostuser
# message = """From: %s
# To: %s,
# Content-Type:text/html,
# Mime-version:1.0,
# Content-disposition: text,
# Subject:Vibes reset password is: %s,
# """%("devcodesv1@gmail.com",receiver , hashcode)
message = "Your recovery code is <strong>%s </strong><a href='devcodes.herokuapp.com/login/updatereset/'> reset link</a>"%hashcode
mess = MIMEMultipart("alternatives")
mess["From"] = "devcodesv1@gmail.com"
mess["To"] = receiver
mess["Subject"] = "Devcodes recovery code."
message = MIMEText( message, "html")
mess.attach(message)
try:
obj=sm.SMTP('smtp.gmail.com', 587)
obj.starttls()
obj.login("devcodesv1@gmail.com","admin@devcodesv1.1")
obj.sendmail(sender,receiver,mess.as_string())
obj.close()
except Exception as error:
print("Error: {}".format(error))
bug_hunter.append("Connection could not be established")
# bug_hunter.append(error)
return render(request , "login/forgot.html" , context = {"error":bug_hunter})
else:
Recoverdata.objects.create(uid_id = Signup.objects.get(email = receiver).uid , secret_code = hashcode)
print("Message sent successfully to {}".format(receiver))
print("Exiting the mail client program")
return render(request , "login/thanks.html")
else:
return render(request , "login/forgot.html" , context = {"error":bug_hunter})
else:
return render(request , "login/forgot.html" , context = {"error":bug_hunter})
def newcr(request):
bug_hunter = list()
if request.method=="POST":
newpass = make_password(request.POST["password"])
confirmnewpass = request.POST["confirmpassword"]
secretcode = request.POST["code"]
try:
recpassword = Recoverdata.objects.get(secret_code = secretcode)
dataobj = Signup.objects.filter(uid = recpassword.uid_id)
except (Recoverdata.DoesNotExist,Signup.DoesNotExist) as e:
bug_hunter.append("Incorrect information provided.")
else:
dataobj.update(password = newpass)
recpassword.delete()
return redirect("/login/")
else:
return render(request , "login/newcreds.html" , context = {"error":bug_hunter})
def logout(request):
# clear the session vars
request.session.clear()
request.session.flush()
request.session["loginstatus"] = False
return redirect("/")
|
19,860 | b38ec5fc1b619d31ffb3aa4aa7757dcde26d272d | __all__ = ['Produto']
|
19,861 | 68cd9a5deb3182fe87098b05c91f958a52694ee1 | """12. Дані про температуру повітря за декаду грудня зберігаються в масиві.
Визначити, скільки раз температура була вище середньої за цю декаду."""
A = [15,18,7,4,1,-6,-10,-17,-12,-25]
# Создание масива
sum = 0
kol=0
for n in range(len(A)):
sum+=A[n]
# вычисление суммы данных масива
sum = sum/10
# Получение среднего
for n in range(len(A)):
if A[n]>sum:
kol+=1
# Подсчёт по фильтру данных
print(kol) |
19,862 | d5e05f477a510b55d7386005d42696fe1c354cf3 | from typing import Any, Dict, List, Union
from ontogen.base import OwlEntity, BUILTIN_DATA_TYPES, DATATYPE_MAP
ENTITIES: Dict[str, OwlEntity] = {'rdfs:label': None}
def get_equivalent_datatype(entity_name: str) -> Union[type, str]:
return DATATYPE_MAP.get(entity_name, entity_name)
def check_restrictions(prefix: str, str_types: List[str], value: Any) -> bool:
t = type(value)
# check for builtin types
if t in BUILTIN_DATA_TYPES:
return True
p = set([f"{prefix}:{str_type}" for str_type in str_types]).intersection(ENTITIES.keys())
return len(p) > 0
|
19,863 | d11ea9e4f018541c5a8162e7b98d5257b3c3a721 | from .Workflow import *
from .Module import *
from .WfModule import *
from .ModuleVersion import *
|
19,864 | a81d971d4cda314a794030fd51e690779ebd0c51 | # Generated by Django 3.1 on 2020-08-13 03:07
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
dependencies = [
('davematthews', '0010_auto_20200811_2013'),
]
operations = [
migrations.AlterField(
model_name='frqassignment',
name='uuid',
field=models.UUIDField(default=uuid.UUID('85a72442-927f-4e46-a42b-841e1c3b7c9d')),
),
migrations.AlterField(
model_name='frqsubmission',
name='frq',
field=models.ForeignKey(blank=True, default=13, on_delete=django.db.models.deletion.CASCADE, to='davematthews.frqassignment'),
),
migrations.AlterField(
model_name='frqsubmission',
name='uuid',
field=models.UUIDField(default=uuid.UUID('238365f3-2687-437f-a1d3-674d447ada83')),
),
migrations.AlterField(
model_name='student',
name='uuid',
field=models.UUIDField(default=uuid.UUID('8fb33a89-88e9-49b8-b2fd-fecad4466548')),
),
migrations.AlterField(
model_name='submission',
name='uuid',
field=models.UUIDField(default=uuid.UUID('8f3b5021-1124-48fe-96d8-c284eb16c976')),
),
migrations.AlterField(
model_name='submission',
name='ws',
field=models.ForeignKey(blank=True, default=10, on_delete=django.db.models.deletion.CASCADE, to='davematthews.wsassignment'),
),
migrations.AlterField(
model_name='wsassignment',
name='uuid',
field=models.UUIDField(default=uuid.UUID('15dd627b-553d-40b1-968e-db40f66fb19c')),
),
]
|
19,865 | d78f564f79425785276452b278dbbb70c05732c5 | fname=input("Enter file name")
try:
fhand=open(fname)
except:
print("file can't open",fname)
counts=dict()
words=list()
for line in fhand:
if line.startswith('From'):
words=line.split()
# print(words)
if(len(words)>2):
word=words[5].split(':')
counts[word[0]]=counts.get(word[0],0)+1
print(sorted([(value,key) for key,value in counts.items()],reverse=True))
|
19,866 | cdcb31455d6c089dc671aff9a3d8916478744a12 | """
Capstone Project. Code to run on the EV3 robot (NOT on a laptop).
Author: Your professors (for the framework)
and Weizhou Liu.
Winter term, 2018-2019.
"""
import rosebot
import mqtt_remote_method_calls as com
import time
import shared_gui_delegate_on_robot as sgd
def main():
"""
This code, which must run on the EV3 ROBOT:
1. Makes the EV3 robot to various things.
2. Communicates via MQTT with the GUI code that runs on the LAPTOP.
"""
real_thing()
#run_test_go_stright_for_seconds()
#run_test_go_stright_for_inches_using_time()
#run_test_go_stright_for_inches_using_encoder()
#run_test_1()
#tone_and_pick_up(400,1000,30)
#run()
def real_thing():
robot=rosebot.RoseBot()
receiver=sgd.Receiver(robot)
mqtt_receiver=com.MqttClient(receiver)
mqtt_receiver.connect_to_pc()
while True:
time.sleep(0.01)
if receiver.is_time_to_stop:
break
def run_test_go_stright_for_seconds():
robot=rosebot.RoseBot()
robot.drive_system.go_straight_for_seconds(3,50)
def run_test_go_stright_for_inches_using_time():
robot=rosebot.RoseBot()
robot.drive_system.go_straight_for_inches_using_time(3,50)
def run_test_go_stright_for_inches_using_encoder():
robot=rosebot.RoseBot()
robot.drive_system.go_straight_for_inches_using_encoder(40,50)
def tone_and_pick_up(frequence,duration,delta):
robot=rosebot.RoseBot()
robot.arm_and_claw.calibrate_arm()
robot.drive_system.spin_clockwise_until_sees_object(40,25)
robot.drive_system.go(50,50)
distance=robot.sensor_system.ir_proximity_sensor.get_distance_in_inches()
while True:
robot.sound_system.tone_maker.play_tone(frequence,duration)
time.sleep(0.3)
if distance <= robot.sensor_system.ir_proximity_sensor.get_distance_in_inches():
frequence=frequence - delta
if distance >= robot.sensor_system.ir_proximity_sensor.get_distance_in_inches():
frequence = frequence + delta
if robot.sensor_system.ir_proximity_sensor.get_distance_in_inches() <= 2:
robot.drive_system.stop()
robot.drive_system.go_straight_for_inches_using_encoder(4, 25)
robot.arm_and_claw.move_arm_to_position(3500)
break
distance = robot.sensor_system.ir_proximity_sensor.get_distance_in_inches()
def run():
robot=rosebot.RoseBot()
robot.drive_system.spin_counterclockwise_until_sees_object(50,3000)
# -----------------------------------------------------------------------------
# Calls main to start the ball rolling.
# -----------------------------------------------------------------------------
main() |
19,867 | 455fa7c30846c941cfb3de2d0f54a82ea6bd3f35 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import model_utils.fields
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('ddny_braintree', '0010_auto_20150814_2242'),
]
operations = [
migrations.CreateModel(
name='BraintreeResult',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', model_utils.fields.AutoCreatedField(editable=False, verbose_name='created', default=django.utils.timezone.now)),
('modified', model_utils.fields.AutoLastModifiedField(editable=False, verbose_name='modified', default=django.utils.timezone.now)),
('error', models.OneToOneField(to='ddny_braintree.BraintreeError', null=True, on_delete=models.CASCADE)),
('transaction', models.OneToOneField(to='ddny_braintree.BraintreeTransaction', null=True, on_delete=models.CASCADE)),
],
options={
'abstract': False,
},
),
]
|
19,868 | 42419f48da8b59e9f2ac694ed495f9cf89071b08 | from rest_framework import serializers
from website.models import Post, LANGUAGE_CHOICES, STYLE_CHOICES
from django.contrib.auth.models import User #step4
from .models import Event
class SnippetSerializer(serializers.HyperlinkedModelSerializer): #for the API to identify which information of the model Post is used
author = serializers.ReadOnlyField(source='owner.username')
highlighted = serializers.HyperlinkedIdentityField(view_name='post-highlighted', format='html')
class Meta:
model=Post
fields = ('url', 'id', 'highlighted', 'author',
'title', 'text', 'linenos', 'language', 'style')
class UserSerializer(serializers.HyperlinkedModelSerializer):
snippets = serializers.HyperlinkedRelatedField(many=True, view_name='post-detail', read_only=True)
class Meta:
model = User
fields = ('url', 'id', 'username', 'snippets')
#ADDED
class SessionSerializer(serializers.HyperlinkedModelSerializer): #for the API to identify which information of the model Post is used
author = serializers.ReadOnlyField(source='owner.username')
class Meta:
model= Event
fields = '__all__'
|
19,869 | 143eeac3b7fd04fa3a1ae6537738f122da1b9575 | # -*- coding: utf-8 -*-
# @Time : 2019/6/25 19:35
# @Author : wangmengmeng
import unittest
import json
import warnings
import time
from common.template_2_x import Template
from common.logger import Logger
from common.calculate_ccr import Ccr
class TestCcr(unittest.TestCase):
log = Logger("TestCcr")
def setUp(self):
warnings.simplefilter("ignore", ResourceWarning)
self.log.get_log().debug("开始执行用例TestCcr...")
def tearDown(self):
self.log.get_log().debug("结束执行用例TestCcr...")
def test_opt_01(self):
# ccr与scr都不传,则ccr取默认值90
tem = Template()
# tem.send_data('opt_ccr', '不传ccr和scr', **tem.change_data)
engineid = tem.get_opt_engineid('opt_ccr', '不传ccr和scr', 1)
res = tem.get_opt_recipeInfo(engineid, 0)
outpatient = res['data']['outpatient']
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
self.assertEqual(outpatient['ccr'], "90.0(预设值)")
ids = [engineid]
tem.audit_multi(1, *ids)
res = tem.get_opt_recipeInfo(engineid, 1)
self.assertEqual(res['data']['outpatient']['ccr'], "90.0(预设值)")
def test_opt_two(self):
# 25岁 男 mg/dl scr为9.00mg/dL
tem = Template()
cal_ccr = Ccr(tem.get_ymd(0, 0), '1994-03-05')
# tem.send_data('opt_ccr', '传ccr_1.txt', **tem.change_data)
engineid = tem.get_opt_engineid('opt_ccr', '传ccr_1.txt', 1)
res = tem.get_opt_recipeInfo(engineid, 0)
outpatient = res['data']['outpatient']
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
expect = cal_ccr.ccr_calculate(sex='男', unit='mg/dl', age=cal_ccr.y, weight=60, scr=9)
print(expect)
self.assertEqual(outpatient['ccr'], "10.6481(计算值)")
ids = [engineid]
tem.audit_multi(1, *ids)
res = tem.get_opt_recipeInfo(engineid, 1)
self.assertEqual(res['data']['outpatient']['ccr'], "10.6481(计算值)")
def test_opt_03(self):
# 25岁 女 mg/dl scr为9.00mg/dL
tem = Template()
cal_ccr = Ccr(tem.get_ymd(0, 0), '1994-03-05')
# tem.send_data('opt_ccr', '传ccr_1.txt', **tem.change_data)
engineid = tem.get_opt_engineid('opt_ccr', '3', 1)
res = tem.get_opt_recipeInfo(engineid, 0)
outpatient = res['data']['outpatient']
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
expect = cal_ccr.ccr_calculate(sex='女', unit='mg/dl', age=cal_ccr.y, weight=60, scr=9)
print(expect)
self.assertEqual(outpatient['ccr'], "9.0509(计算值)")
ids = [engineid]
tem.audit_multi(1, *ids)
res = tem.get_opt_recipeInfo(engineid, 1)
self.assertEqual(res['data']['outpatient']['ccr'], "9.0509(计算值)")
def test_opt_04(self):
# 25岁 男 umol/l scr为9.00umol/l
tem = Template()
cal_ccr = Ccr(tem.get_ymd(0, 0), '1994-03-05')
# tem.send_data('opt_ccr', '传ccr_1.txt', **tem.change_data)
engineid = tem.get_opt_engineid('opt_ccr', '4', 1)
res = tem.get_opt_recipeInfo(engineid, 0)
outpatient = res['data']['outpatient']
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
expect = cal_ccr.ccr_calculate(sex='男', unit='umol/L', age=cal_ccr.y, weight=60, scr=9)
print(expect)
self.assertEqual(outpatient['ccr'], "937.2453(计算值)")
ids = [engineid]
tem.audit_multi(1, *ids)
res = tem.get_opt_recipeInfo(engineid, 1)
self.assertEqual(res['data']['outpatient']['ccr'], "937.2453(计算值)")
def test_opt_05(self):
# 25岁 女 umol/l scr为9.00umol/l
tem = Template()
cal_ccr = Ccr(tem.get_ymd(0, 0), '1994-03-05')
# tem.send_data('opt_ccr', '传ccr_1.txt', **tem.change_data)
engineid = tem.get_opt_engineid('opt_ccr', '5', 1)
res = tem.get_opt_recipeInfo(engineid, 0)
outpatient = res['data']['outpatient']
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
expect = cal_ccr.ccr_calculate(sex='女', unit='umol/L', age=cal_ccr.y, weight=60, scr=9)
print(expect)
self.assertEqual(outpatient['ccr'], "796.6585(计算值)")
ids = [engineid]
tem.audit_multi(1, *ids)
res = tem.get_opt_recipeInfo(engineid, 1)
self.assertEqual(res['data']['outpatient']['ccr'], "796.6585(计算值)")
def test_opt_06(self):
# 19岁 男 umol/l scr为9.00umol/l --测试不通过
tem = Template()
cal_ccr = Ccr(tem.get_ymd(0, 0), '2000-03-05')
# tem.send_data('opt_ccr', '传ccr_1.txt', **tem.change_data)
engineid = tem.get_opt_engineid('opt_ccr', '6', 1)
res = tem.get_opt_recipeInfo(engineid, 0)
outpatient = res['data']['outpatient']
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
expect = cal_ccr.ccr_default_weight(sex='男', unit='umol/L', age=cal_ccr.y, scr=9)
print(expect)
self.assertEqual(outpatient['ccr'], "986.1451(计算值)")
ids = [engineid]
tem.audit_multi(1, *ids)
res = tem.get_opt_recipeInfo(engineid, 1)
self.assertEqual(res['data']['outpatient']['ccr'], "986.1451(计算值)")
def test_opt_07(self):
# 16岁 女 umol/l scr为9.00umol/l
tem = Template()
cal_ccr = Ccr(tem.get_ymd(0, 0), '2003-03-05')
# tem.send_data('opt_ccr', '传ccr_1.txt', **tem.change_data)
engineid = tem.get_opt_engineid('opt_ccr', '8', 1)
res = tem.get_opt_recipeInfo(engineid, 0)
outpatient = res['data']['outpatient']
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
expect = cal_ccr.ccr_default_weight(sex='女', unit='umol/L', age=cal_ccr.y, scr=9)
print(expect)
self.assertEqual(outpatient['ccr'], "715.4086(计算值)")
ids = [engineid]
tem.audit_multi(1, *ids)
res = tem.get_opt_recipeInfo(engineid, 1)
self.assertEqual(res['data']['outpatient']['ccr'], "715.4086(计算值)")
def test_opt_09(self):
# 16岁 男 umol/l scr为9.00umol/l --测试不通过
tem = Template()
cal_ccr = Ccr(tem.get_ymd(0, 0), '2003-03-05')
# tem.send_data('opt_ccr', '传ccr_1.txt', **tem.change_data)
engineid = tem.get_opt_engineid('opt_ccr', '9', 1)
res = tem.get_opt_recipeInfo(engineid, 0)
outpatient = res['data']['outpatient']
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
expect = cal_ccr.ccr_default_weight(sex='男', unit='umol/L', age=cal_ccr.y, scr=9)
print(expect)
self.assertEqual(outpatient['ccr'], "956.6965(计算值)")
ids = [engineid]
tem.audit_multi(1, *ids)
res = tem.get_opt_recipeInfo(engineid, 1)
self.assertEqual(res['data']['outpatient']['ccr'], "956.6965(计算值)")
def test_opt_13(self):
# 19岁 女 umol/l scr为9.00umol/l
tem = Template()
cal_ccr = Ccr(tem.get_ymd(0, 0), '2000-03-05')
# tem.send_data('opt_ccr', '传ccr_1.txt', **tem.change_data)
engineid = tem.get_opt_engineid('opt_ccr', '7', 1)
res = tem.get_opt_recipeInfo(engineid, 0)
outpatient = res['data']['outpatient']
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
expect = cal_ccr.ccr_default_weight(sex='女', unit='umol/L', age=cal_ccr.y, scr=9)
print(expect)
self.assertEqual(outpatient['ccr'], "698.5194(计算值)")
ids = [engineid]
tem.audit_multi(1, *ids)
res = tem.get_opt_recipeInfo(engineid, 1)
self.assertEqual(res['data']['outpatient']['ccr'], "698.5194(计算值)")
def test_opt_14(self):
# 19岁 女 umol/l scr为9.00umol/l
tem = Template()
cal_ccr = Ccr(tem.get_ymd(0, 0), '2000-03-05')
# tem.send_data('opt_ccr', '传ccr_1.txt', **tem.change_data)
engineid = tem.get_opt_engineid('opt_ccr', '10', 1)
res = tem.get_opt_recipeInfo(engineid, 0)
outpatient = res['data']['outpatient']
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
# expect = cal_ccr.ccr_default_weight(sex='女', unit='umol/L', age=cal_ccr.y, scr=9)
# print(expect)
self.assertEqual(outpatient['ccr'], "9.0")
ids = [engineid]
tem.audit_multi(1, *ids)
res = tem.get_opt_recipeInfo(engineid, 1)
self.assertEqual(res['data']['outpatient']['ccr'], "9.0")
def test_opt_16(self):
# 两个任务里的检验都是9.0
tem = Template()
tem.send_data('opt_ccr', 'a2', **tem.change_data)
tem.send_data('opt_ccr', 'a3', **tem.change_data)
def test_opt_11(self):
# scr不在检验有效期
tem = Template()
tem.send_data('opt_ccr', '1', **tem.change_data)
def test_opt_12(self):
# scr在检验有效期
tem = Template()
tem.send_data('opt_ccr', '2', **tem.change_data)
def test_ipt_01(self):
# ccr与scr都不传,则ccr取默认值90--测试通过
tem = Template()
# tem.send_data('ipt_ccr', '1', **tem.change_data)
engineid = tem.get_ipt_engineid('ipt_ccr', '1', 1)
print(engineid)
res = tem.get_ipt_patient(engineid, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
patient = res['data']
self.assertEqual(patient['ccr'], "90.0(预设值)")
ids = [engineid]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid, 1)
patient = res['data']
self.assertEqual(patient['ccr'], "90.0(预设值)")
def test_ipt_02(self):
# 检验中传入不在检验有效期内的ccr,则ccr取默认值
tem = Template()
# tem.send_data('ipt_ccr', '2', **tem.change_data)
engineid = tem.get_ipt_engineid('ipt_ccr', '2', 1)
print(engineid)
res = tem.get_ipt_patient(engineid, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
patient = res['data']
self.assertEqual(patient['ccr'], "90.0(预设值)")
ids = [engineid]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid, 1)
patient = res['data']
self.assertEqual(patient['ccr'], "90.0(预设值)")
def test_ipt_06(self):
# 检验中传入在检验有效期内的ccr,则ccr取传入值 --测试通过
tem = Template()
# tem.send_data('ipt_ccr', '5', **tem.change_data)
engineid = tem.get_ipt_engineid('ipt_ccr', '5', 1)
print(engineid)
res = tem.get_ipt_patient(engineid, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
patient = res['data']
self.assertEqual(patient['ccr'], "3.0")
ids = [engineid]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid, 1)
patient = res['data']
self.assertEqual(patient['ccr'], "3.0")
def test_ipt_03(self):
# 同一患者两个任务,ccr需要能更新 --测试通过
tem = Template()
engineid1 = tem.get_ipt_engineid('ipt_ccr', 'a1', 1) # 任务一,不传身高 体重 取身高/体重/ccr预设值
res = tem.get_ipt_patient(engineid1, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
patient = res['data']
self.assertEqual(patient['ccr'], "90.0(预设值)")
tem.send_data('ipt_ccr', 'a2', **tem.change_data) # 传patient+生命体征
tem.send_data('ipt_ccr', 'a3', **tem.change_data) # 再传生命体征
engineid2 = tem.get_ipt_engineid('ipt_ccr', 'a4', 2) # 任务二有scr,取a3的身高体重,且a3的体重去计算ccr
res = tem.get_ipt_patient(engineid2, 0)
patient = res['data']
self.assertEqual(patient['ccr'], "38.0139(计算值)")
ids = [engineid1, engineid2]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid1, 1)
patient = res['data']
self.assertEqual(patient['ccr'], "90.0(预设值)")
res = tem.get_ipt_patient(engineid2, 1)
patient = res['data']
self.assertEqual(patient['ccr'], "38.0139(计算值)")
def test_ipt_04(self):
tem = Template()
# 传入scr,但是性别为0 未知的性别,则ccr取默认值 --测试通过
engineid = tem.get_ipt_engineid('ipt_ccr', '3', 1)
print(engineid)
res = tem.get_ipt_patient(engineid, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
patient = res['data']
self.assertEqual(patient['ccr'], "90.0(预设值)")
ids = [engineid]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid, 1)
patient = res['data']
self.assertEqual(patient['ccr'], "90.0(预设值)")
def test_ipt_05(self):
tem = Template()
# 传入scr,但是性别为9 未说明的性别,则ccr取默认值 --测试通过
engineid = tem.get_ipt_engineid('ipt_ccr', '4', 1)
print(engineid)
res = tem.get_ipt_patient(engineid, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
patient = res['data']
self.assertEqual(patient['ccr'], "90.0(预设值)")
ids = [engineid]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid, 1)
patient = res['data']
self.assertEqual(patient['ccr'], "90.0(预设值)")
def test_ipt_07(self):
# 同一xml就诊信息、生命体征都传身高体重,则取生命体征。非同一xml则就诊信息和生命体征取最新的一个
tem = Template()
tem.send_data('ipt_ccr', 'b1', **tem.change_data)
engineid = tem.get_ipt_engineid('ipt_ccr', 'b2', 2)
res = tem.get_ipt_patient(engineid, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
patient = res['data']
self.assertEqual(patient['ccr'], "38.0139(计算值)")
ids = [engineid]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid, 1)
patient = res['data']
self.assertEqual(patient['ccr'], "38.0139(计算值)")
def test_ipt_08(self):
# 检验增量传
tem = Template()
tem.send_data('ipt_ccr', 'c1', **tem.change_data)
engineid = tem.get_ipt_engineid('ipt_ccr', 'c2', 1)
res = tem.get_ipt_patient(engineid, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
patient = res['data']
self.assertEqual(patient['ccr'], "38.0139(计算值)")
ids = [engineid]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid, 1)
patient = res['data']
self.assertEqual(patient['ccr'], "38.0139(计算值)")
def test_ipt_09(self):
# 将生命体征数据作废,则体重只会从就诊信息取
tem = Template()
tem.send_data('ipt_ccr', 'd1', **tem.change_data)
engineid = tem.get_ipt_engineid('ipt_ccr', 'c2', 1)
res = tem.get_ipt_patient(engineid, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
patient = res['data']
self.assertEqual(patient['ccr'], "38.0139(计算值)")
ids = [engineid]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid, 1)
patient = res['data']
self.assertEqual(patient['ccr'], "38.0139(计算值)")
def test_ipt_10(self):
# 16岁 男 mg/dl scr为1.00mg/dL
tem = Template()
# tem.send_data('ipt_ccr', '6', **tem.change_data)
cal_ccr = Ccr(tem.get_ymd(-1, 0), '2003-03-05')
c1 = cal_ccr.ccr_default_weight(sex='男', unit='mg/dl', age=cal_ccr.y, scr=1)
print(c1)
engineid = tem.get_ipt_engineid('ipt_ccr', '6', 1)
res = tem.get_ipt_patient(engineid, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
self.assertEqual(res['data']['ccr'], "97.8222(计算值)")
ids = [engineid]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid, 1)
self.assertEqual(res['data']['ccr'], "97.8222(计算值)")
def test_ipt_11(self):
# 16岁 女 mg/dl scr为1.00mg/dL
tem = Template()
cal_ccr = Ccr(tem.get_ymd(-1, 0), '2003-03-05')
c1 = cal_ccr.ccr_default_weight(sex='女', unit='mg/dl', age=cal_ccr.y, scr=1)
print(c1)
engineid = tem.get_ipt_engineid('ipt_ccr', '7', 1)
res = tem.get_ipt_patient(engineid, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
self.assertEqual(res['data']['ccr'], "73.1505(计算值)")
ids = [engineid]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid, 1)
self.assertEqual(res['data']['ccr'], "73.1505(计算值)")
def test_ipt_12(self):
# 19岁 男 mg/dl scr为1.00mg/dL
tem = Template()
cal_ccr = Ccr(tem.get_ymd(-1, 0), '2000-03-05')
c1 = cal_ccr.ccr_default_weight(sex='男', unit='mg/dl', age=cal_ccr.y, scr=1)
print(c1)
engineid = tem.get_ipt_engineid('ipt_ccr', '8', 1)
res = tem.get_ipt_patient(engineid, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
self.assertEqual(res['data']['ccr'], "100.8333(计算值)")
ids = [engineid]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid, 1)
self.assertEqual(res['data']['ccr'], "100.8333(计算值)")
def test_ipt_13(self):
# 19岁 女 mg/dl scr为1.00mg/dL
tem = Template()
cal_ccr = Ccr(tem.get_ymd(-1, 0), '2000-03-05')
c1 = cal_ccr.ccr_default_weight(sex='女', unit='mg/dl', age=cal_ccr.y, scr=1)
print(c1)
engineid = tem.get_ipt_engineid('ipt_ccr', '9', 1)
res = tem.get_ipt_patient(engineid, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
self.assertEqual(res['data']['ccr'], "71.4236(计算值)")
ids = [engineid]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid, 1)
self.assertEqual(res['data']['ccr'], "71.4236(计算值)")
def test_ipt_14(self):
# 3岁 女 mg/dl scr为1.00mg/dL <4岁,返回预设值
tem = Template()
engineid = tem.get_ipt_engineid('ipt_ccr', '10', 1)
res = tem.get_ipt_patient(engineid, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
self.assertEqual(res['data']['ccr'], "90.0(预设值)")
ids = [engineid]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid, 1)
self.assertEqual(res['data']['ccr'], "90.0(预设值)")
def test_ipt_15(self):
# 3岁 男 mg/dl scr为1.00mg/dL <4岁,返回预设值
tem = Template()
engineid = tem.get_ipt_engineid('ipt_ccr', '11', 1)
res = tem.get_ipt_patient(engineid, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
self.assertEqual(res['data']['ccr'], "90.0(预设值)")
ids = [engineid]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid, 1)
self.assertEqual(res['data']['ccr'], "90.0(预设值)")
def test_ipt_16(self):
# 16岁 男 scr为1.00umol/L
tem = Template()
# tem.send_data('ipt_ccr', '6', **tem.change_data)
cal_ccr = Ccr(tem.get_ymd(-1, 0), '2003-03-05')
c1 = cal_ccr.ccr_default_weight(sex='男', unit='umol/L', age=cal_ccr.y, scr=1)
print(c1)
engineid = tem.get_ipt_engineid('ipt_ccr', '12', 1)
res = tem.get_ipt_patient(engineid, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
self.assertEqual(res['data']['ccr'], "8610.2689(计算值)")
ids = [engineid]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid, 1)
self.assertEqual(res['data']['ccr'], "8610.2689(计算值)")
def test_ipt_17(self):
# 16岁 女 umol/L scr为1.00umol/L
tem = Template()
cal_ccr = Ccr(tem.get_ymd(-1, 0), '2003-03-05')
c1 = cal_ccr.ccr_default_weight(sex='女', unit='umol/L', age=cal_ccr.y, scr=1)
print(c1)
engineid = tem.get_ipt_engineid('ipt_ccr', '13', 1)
res = tem.get_ipt_patient(engineid, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
self.assertEqual(res['data']['ccr'], "6438.6773(计算值)")
ids = [engineid]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid, 1)
self.assertEqual(res['data']['ccr'], "6438.6773(计算值)")
def test_ipt_18(self):
tem = Template()
engineid = tem.get_ipt_engineid('ipt_ccr', '14', 1)
res = tem.get_ipt_patient(engineid, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
self.assertEqual(res['data']['ccr'], "90.0(预设值)")
ids = [engineid]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid, 1)
self.assertEqual(res['data']['ccr'], "90.0(预设值)")
def test_ipt_19(self):
tem = Template()
tem.send_data('ipt_ccr', '15', **tem.change_data)
tem.send_data('ipt_ccr', '16', **tem.change_data)
# engineid = tem.get_ipt_engineid('ipt_ccr', '16', 1)
# res = tem.get_ipt_patient(engineid, 0)
# print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
# self.assertEqual(res['data']['ccr'], "90.0(预设值)")
# ids = [engineid]
# tem.audit_multi(3, *ids)
# res = tem.get_ipt_patient(engineid, 1)
# self.assertEqual(res['data']['ccr'], "90.0(预设值)")
def test_ipt_20(self):
# 一个xml传入两组药,如果两组药的使用时间有交叉,则两个任务都能取到ccr
tem = Template()
# tem.send_data('ipt_ccr', '17', **tem.change_data)
engineid = tem.get_ipt_engineid('ipt_ccr', '17', 1)
res = tem.get_ipt_patient(engineid, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
self.assertEqual(res['data']['ccr'], "3.0")
ids = [engineid]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid, 1)
self.assertEqual(res['data']['ccr'], "3.0")
def test_ipt_21(self):
# f2的检验,一组内两个药的生效时间分别为f3,d,第一个切片ccr90(预设值),第二个切片ccr为3,那么ccr取3
tem = Template()
tem.send_data('ipt_ccr', '18', **tem.change_data)
engineid = tem.get_ipt_engineid('ipt_ccr', '19', 1)
res = tem.get_ipt_patient(engineid, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
self.assertEqual(res['data']['ccr'], "3.0")
ids = [engineid]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid, 1)
self.assertEqual(res['data']['ccr'], "3.0")
def test_ipt_22(self):
# 医嘱生效时间前有两个检验,则取最新的检验,且如果ccr和血肌酐都有则取ccr
tem = Template()
cal_ccr = Ccr(tem.get_ymd(0, 0), '1994-03-05')
c1 = cal_ccr.ccr_calculate(sex='女', unit='mg/dL', age=cal_ccr.y,weight=60, scr=1)
print(c1)
tem.send_data('ipt_ccr', 'e1', **tem.change_data)
tem.send_data('ipt_ccr', 'e2', **tem.change_data)
engineid = tem.get_ipt_engineid('ipt_ccr', 'e3', 1)
res = tem.get_ipt_patient(engineid, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
# try:
self.assertEqual(res['data']['ccr'], "4.0")
# except AssertionError as e:
# print(e)
ids = [engineid]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid, 1)
self.assertEqual(res['data']['ccr'], "4.0")
def test_ipt_23(self):
tem = Template()
# 待审医嘱作为合并医嘱被删除后,原任务重新跑引擎,ccr值在已审页面展示正确
tem.send_data('ipt_delete', 'a1', **tem.change_data) # 发草药嘱
tem.send_data('ipt_delete', 'a2', **tem.change_data) # 发药嘱
tem.send_delete_1('ipt_delete', 'a3', **tem.change_data) # 删除草药嘱
# 待审页面获取药嘱的引擎id
param = {
"patientId": tem.change_data['{{ts}}']
}
url = tem.conf.get('auditcenter', 'address') + tem.conf.get('api', '查询待审住院任务列表')
res = tem.post_json(url, param)
engineid = res['data']['engineInfos'][0]['id']
res = tem.get_ipt_patient(engineid, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
# try:
self.assertEqual(res['data']['ccr'], "90.0(预设值)")
# except AssertionError as e:
# print(e)
ids = [engineid]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid, 1)
self.assertEqual(res['data']['ccr'], "90.0(预设值)")
def test_ipt_24(self):
tem = Template()
# 待审医嘱作为合并医嘱被删除后,原任务重新跑引擎,ccr值在已审页面展示正确
tem.send_data('ipt_delete', 'a2', **tem.change_data) # 发药嘱
tem.send_data('ipt_delete', 'a1', **tem.change_data) # 发草药嘱
tem.send_delete_1('ipt_delete', 'a4', **tem.change_data) # 删除药嘱
# 待审页面获取药嘱的引擎id
param = {
"patientId": tem.change_data['{{ts}}']
}
url = tem.conf.get('auditcenter', 'address') + tem.conf.get('api', '查询待审住院任务列表')
res = tem.post_json(url, param)
engineid = res['data']['engineInfos'][0]['id']
res = tem.get_ipt_patient(engineid, 0)
print(json.dumps(res, indent=2, sort_keys=False, ensure_ascii=False))
# try:
self.assertEqual(res['data']['ccr'], "90.0(预设值)")
# except AssertionError as e:
# print(e)
ids = [engineid]
tem.audit_multi(3, *ids)
res = tem.get_ipt_patient(engineid, 1)
self.assertEqual(res['data']['ccr'], "90.0(预设值)")
if __name__ == '__main__':
# suite = unittest.TestSuite() # 创建一个测试集合
unittest.main()
# suite.addTest(TestCcr("test_opt_one"))
# suite.addTest(TestCcr("test_opt_two"))
# suite.addTest(TestCcr("test_opt_three"))
# runner = unittest.TextTestRunner()
# runner.run(suite)
|
19,870 | 1a080d9bf71409101b2041f4e9018ae387bf812e | roman = {
'I': 1,
'V': 5,
'X': 10,
'L': 50,
'C': 100,
'D': 500,
'M': 1000
}
class Solution:
def romanToInt(self, s):
"""
:type s: str
:rtype: int
"""
ints = [roman[_] for _ in s]
last = 1000
total = 0
for i in ints:
total += i
if i > last:
total -= last + last
last = i
return total |
19,871 | 35a462626c0b6c9cf21b3a0094933ac85f4c2ca9 | '''
@Copyright (c) tkianai All Rights Reserved.
@Author : tkianai
@Github : https://github.com/tkianai
@Date : 2020-04-21 20:10:38
@FilePath : /RetinaFace.detectron2/retinaface/data/__init__.py
@Description :
'''
|
19,872 | 9326b66f660aec31a11eb77cb5f576d158abf064 |
# selenium
from selenium.webdriver import ActionChains
from selenium import webdriver
from selenium.webdriver.common.keys import Keys as keyboard
from selenium.webdriver.support.ui import Select
from selenium.webdriver import ActionChains
#Python - inbuilt
import time
# user-module
from Data import TestData1 as td
import EnvConfig as env
from ObjectRepo import Locators as loc
driver = webdriver.Chrome(env.DriverPATH)
driver.maximize_window()
driver.implicitly_wait(10)
driver.get("https://www.toolsqa.com/selenium-webdriver/mouse-hover-action/")
time.sleep(2)
mouseHOver_Tut_link = driver.find_element_by_xpath(loc.MOuseHover_Tutorials_Link)
ac = ActionChains(driver)
ac.move_to_element(mouseHOver_Tut_link)
ac.perform()
about=driver.find_element_by_xpath(loc.About)
ac.move_to_element(about).perform()
face_book=driver.find_element_by_xpath(loc.facebook)
face_book.click()
# bodyOFToolsQA_com = driver.find_element_by_xpath("//body[contains(@class,'post-template')]")
# bodyOFToolsQA_com.send_keys(keyboard.PAGE_DOWN)
# bodyOFToolsQA_com.send_keys(keyboard.PAGE_DOWN)
#
# keyboard_AC = ActionChains(driver)
# keyboard_AC.key_down(keyboard.CONTROL).send_keys('a').key_up(keyboard.CONTROL).perform()
# # keyboard_AC.
|
19,873 | 57f55c47c917c7ca15de3556f27379ed90a58268 | import os
import sys
import numpy as np
import zslib, geotifflib, datelib, masklib, floatlib
import matplotlib.pyplot as plt
import matplotlib
# Get arguments
args = sys.argv
glacier = args[1][:] # Options: Kanger, Helheim
##########################
# Get surface elevations #
##########################
if glacier == 'Kanger':
xmin = 468000.
xmax = 498000.
ymin = -2299000.
ymax = -2264000.
elif glacier == 'Helheim':
xmin = 283000.
xmax = 313000.
ymin = -2587000.
ymax = -2552000.
# DEMs
xdem,ydem,zdem,timedem,errordem = zslib.dem_grid(glacier,xmin,xmax,ymin,ymax,years='all',verticaldatum='geoid',return_error=True)
# Mask
xmask,ymask,zmask=masklib.load_grid(glacier,np.min(xdem),np.max(xdem),np.min(ydem),np.max(ydem),32)
######################
# Get Landsat images #
######################
# Image for plotting
if glacier == "Helheim":
imagetime = datelib.date_to_fracyear(2014,7,4)
ximage,yimage,image = geotifflib.readrgb(os.path.join(os.getenv("DATA_HOME"),"Imagery/Landsat/Helheim/TIF/20140704140535_LC82330132014185LGN00.tif"))
elif glacier == "Kanger":
imagetime = datelib.date_to_fracyear(2014,7,6)
ximage,yimage,image = geotifflib.readrgb(os.path.join(os.getenv("DATA_HOME"),"Imagery/Landsat/Kanger/TIF/20140706135251_LC82310122014187LGN00.tif"))
###############################
# Choose DEMs for subtraction #
###############################
if glacier == 'Helheim':
time1 = [2013,2,9]
ind1 = np.argmin(abs(datelib.date_to_fracyear(time1[0],time1[1],time1[2])-timedem))
time2 = [2013,10,31]
ind2 = np.argmin(abs(datelib.date_to_fracyear(time2[0],time2[1],time2[2])-timedem))
time3 = [2012,3,16]
ind3 = np.argmin(abs(datelib.date_to_fracyear(time3[0],time3[1],time3[2])-timedem))
time4 = [2012,12,5]
ind4 = np.argmin(abs(datelib.date_to_fracyear(time4[0],time4[1],time4[2])-timedem))
elif glacier == 'Kanger':
time1 = [2012,5,22]
time2 = [2012,12,17]
time3 = [2012,5,22]
time4 = [2012,12,17]
ind1 = np.argmin(abs(datelib.date_to_fracyear(time1[0],time1[1],time1[2])-timedem))
ind2 = np.argmin(abs(datelib.date_to_fracyear(time2[0],time2[1],time2[2])-timedem))
ind3 = np.argmin(abs(datelib.date_to_fracyear(time3[0],time3[1],time3[2])-timedem))
ind4 = np.argmin(abs(datelib.date_to_fracyear(time4[0],time4[1],time4[2])-timedem))
########################################################
# Find out if flotation condition changed between DEMs #
########################################################
xf,yf,zabovefloat = floatlib.extent(xdem,ydem,zdem[:,:,[ind1,ind2]],timedem[[ind1,ind2]],glacier,rho_i=917.0,rho_sw=1020.0,bedsource='cresis',verticaldatum='geoid')
cutoff = 0.
floatcond = np.zeros(len(xf))
floatcond[:] = float('nan')
for i in range(0,len(xf)):
if np.min(zabovefloat[i,:]) > cutoff:
floatcond[i] = 1 #grounded
elif np.max(zabovefloat[i,:]) < -1*cutoff:
floatcond[i] = -1 #floating
elif (np.max(zabovefloat[i,:]) > -1*cutoff) or (np.min(zabovefloat[i,:]) < cutoff):
floatcond[i] = 0 #changing basal conditions
fig = plt.figure(figsize=(3.75,2.4))
matplotlib.rc('font',family='Arial')
gs = matplotlib.gridspec.GridSpec(1,2)
cx = matplotlib.cm.get_cmap('RdBu_r',8)
plt.subplot(gs[0])
matplotlib.rcParams['contour.negative_linestyle'] = 'solid'
ax1 = plt.gca()
plt.imshow(image[:,:,0],extent=[np.min(ximage),np.max(ximage),np.min(yimage),np.max(yimage)],cmap='Greys_r',origin='lower',clim=[0,0.6])
norms = matplotlib.colors.BoundaryNorm(np.arange(-20,20,5),cx.N)
p=plt.imshow(np.ma.masked_array(zdem[:,:,ind2]-zdem[:,:,ind1],zmask),extent=[np.min(xdem),np.max(xdem),np.min(ydem),np.max(ydem)],origin='lower',clim=[-20,20],cmap='RdBu_r')
#plt.contour(np.ma.masked_array(zdem[:,:,ind2]-zdem[:,:,ind1],zmask),0,colors='k',extent=[np.min(xdem),np.max(xdem),np.min(ydem),np.max(ydem)],origin='lower',fontsize=8)
ax1.axes.set_xlim([xmin,xmax])
ax1.axes.set_ylim([ymin,ymax])
ax1.set_xticks([])
ax1.set_yticks([])
xmin1,xmax1 = plt.xlim()
ymin1,ymax1 = plt.ylim()
path = matplotlib.path.Path([[0.46*(xmax1-xmin1)+xmin,0.98*(ymax1-ymin1)+ymin1],
[0.98*(xmax1-xmin1)+xmin1,0.98*(ymax1-ymin1)+ymin1],
[0.98*(xmax1-xmin1)+xmin1,0.64*(ymax1-ymin1)+ymin1],
[0.46*(xmax1-xmin1)+xmin1,0.64*(ymax1-ymin1)+ymin1],
[0.46*(xmax1-xmin1)+xmin1,0.98*(ymax1-ymin1)+ymin1]])
patch = matplotlib.patches.PathPatch(path,edgecolor='k',facecolor='w',lw=1,zorder=3)
ax1.add_patch(patch)
cbaxes = fig.add_axes([0.3, 0.84, 0.16, 0.03])
cb = plt.colorbar(p,cax=cbaxes,orientation='horizontal',ticks=[-20,0,20])
cb.set_label('Difference \n (m)',size=10,fontname='arial')
minorticks = p.norm(np.arange(-20, 25, 10))
cb.ax.xaxis.set_ticks(minorticks, minor=True)
cb.ax.tick_params(labelsize=9)
cb.ax.tick_params(which='both',length=5)
ax1.text(0.05*(xmax1-xmin1)+xmin1,0.9*(ymax1-ymin1)+ymin1,'a',weight='bold',fontsize=10)
plt.subplot(gs[1])
matplotlib.rcParams['contour.negative_linestyle'] = 'solid'
ax1 = plt.gca()
plt.imshow(image[:,:,0],extent=[np.min(ximage),np.max(ximage),np.min(yimage),np.max(yimage)],cmap='Greys_r',origin='lower',clim=[0,0.6])
plt.plot(0,0,'r.',label='Grounded',markersize=5)
plt.plot(0,0,'b.',label='Floating',markersize=5)
plt.plot(0,0,'k.',label='Changing',markersize=5)
ind = np.where(floatcond == 1)
plt.plot(xf[ind],yf[ind],'r.',lw=0,markersize=2)
ind = np.where(floatcond == -1)
plt.plot(xf[ind],yf[ind],'b.',lw=0,markersize=2)
ind = np.where(floatcond == 0)
plt.plot(xf[ind],yf[ind],'k.',lw=0,markersize=2)
ax1.axes.set_xlim([xmin,xmax])
ax1.axes.set_ylim([ymin,ymax])
ax1.set_xticks([])
ax1.set_yticks([])
xmin1,xmax1 = plt.xlim()
ymin1,ymax1 = plt.ylim()
path = matplotlib.path.Path([[0.46*(xmax1-xmin1)+xmin,0.98*(ymax1-ymin1)+ymin1],
[0.98*(xmax1-xmin1)+xmin1,0.98*(ymax1-ymin1)+ymin1],
[0.98*(xmax1-xmin1)+xmin1,0.64*(ymax1-ymin1)+ymin1],
[0.46*(xmax1-xmin1)+xmin1,0.64*(ymax1-ymin1)+ymin1],
[0.46*(xmax1-xmin1)+xmin1,0.98*(ymax1-ymin1)+ymin1]])
patch = matplotlib.patches.PathPatch(path,edgecolor='k',facecolor='w',lw=1,zorder=3)
ax1.add_patch(patch)
plt.legend(loc=1,numpoints=1,frameon=False,labelspacing=0.07,handletextpad=0.5,handlelength=0.2,fontsize=10,borderpad=0)
ax1.plot([xmin1+0.56*(xmax1-xmin1),xmin+0.56*(xmax1-xmin1)+5e3],[ymin1+0.71*(ymax1-ymin1),ymin1+0.71*(ymax1-ymin1)],'k',linewidth=1.5,zorder=5)
ax1.plot([xmin1+0.56*(xmax1-xmin1),xmin1+0.56*(xmax1-xmin1)],[ymin1+0.71*(ymax1-ymin1),ymin1+0.69*(ymax1-ymin1)],'k',linewidth=1.5,zorder=5)
ax1.plot([xmin1+0.56*(xmax1-xmin1)+5e3,xmin1+0.56*(xmax1-xmin1)+5e3],[ymin1+0.71*(ymax1-ymin1),ymin1+0.69*(ymax1-ymin1)],'k',linewidth=1.5,zorder=5)
ax1.text(xmin1+0.60*(xmax1-xmin1)+5e3,ymin1+0.68*(ymax1-ymin1),'5 km',fontsize=10,fontname='arial')
ax1.text(0.05*(xmax1-xmin1)+xmin1,0.9*(ymax1-ymin1)+ymin1,'b',weight='bold',fontsize=10)
plt.tight_layout()
plt.subplots_adjust(hspace=0.03,wspace=0.03)
plt.savefig(os.path.join(os.getenv("HOME"),"Bigtmp/"+glacier+"_seasonal.pdf"),FORMAT='PDF',dpi=400)
plt.close()
|
19,874 | 48067d5b055f3ff83b81dac0dc6e2497b91c6f79 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function, division
from io import open
import os, sys
import glob
import string
import unicodedata
import random
import time
import math
import re
import csv
import pandas as pd
import numpy as np
from sklearn.metrics import f1_score, precision_score, accuracy_score
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import torch
import torch.nn as nn
from torch.nn import functional as F
from torch.autograd import Variable
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
#----------------------------------------------------------------
def cudify(x, cuda):
return x.cuda() if cuda else x
def init_tensor(t, *size):
if t==1:
return torch.randn(*size).float()
x = torch.zeros(*size).float()
if t==0:
return x
elif t==2:
torch.nn.init.xavier_normal_(x)
elif t==3:
torch.nn.init.xavier_uniform_(x)
return x
class CharacterEmbedding(nn.Module):
def __init__(self, embedding_dim, vocab, batch_first=True, cuda=True):
super(CharacterEmbedding, self).__init__()
self.embedding_dim = embedding_dim
self.vocab = vocab
self.batch_first = batch_first
self._cuda = cuda
self.embed = cudify(nn.Embedding(len(self.vocab), embedding_dim, padding_idx=0), cuda)
self.cos = nn.CosineSimilarity(dim=2)
def forward(self, X, L):
# utils.rnn lets you give (B,L,D) tensors where B is the batch size, L is the maxlength,
# if you use batch_first=True. Otherwise, give (L,B,D) tensors
if not self.batch_first:
X = X.transpose(0, 1) # (B,L,D) -> (L,B,D)
# embed your sequences
X = self.embed(X)
# pack them up nicely
X = pack_padded_sequence(X, L, batch_first=self.batch_first)
return X
def unpackToSequence(self, packed_output):
output, _ = pad_packed_sequence(packed_output, batch_first=self.batch_first)
words = self.unembed(output)
return words
#----------------------------------------------------------------
def masked_softmax(x, mask, dim=1, epsilon=1e-14):
exps = torch.exp(x - x.max(dim=dim, keepdim=True)[0])
masked_exps = exps * mask.float()
masked_sums = masked_exps.sum(dim, keepdim=True) + epsilon
return (masked_exps/masked_sums)
class Attention(nn.Module):
def __init__(self, p, cuda=True):
super(Attention, self).__init__()
self._cuda = cuda
self.attn_size = next(x for x in p.out if x > 1)
self.linear = cudify(nn.Linear(p.hidden_size * p.directions, self.attn_size, bias=False), cuda)
self.q = nn.Parameter(cudify(self.new_parameter(self.attn_size, 1), cuda))
def is_cuda(self):
return next(self.parameters()).is_cuda
def new_parameter(self, *size):
out = torch.FloatTensor(*size)
torch.nn.init.xavier_normal_(out)
return out
def forward(self, X, L):
batch_size = X.shape[0]
input_size = X.shape[1]
hidden_size = X.shape[2]
K = self.linear(X)
K = F.relu(K)
beta = torch.matmul(K, self.q).squeeze(dim=2)
# beta = F.relu(beta)
beta = torch.tanh(beta)
mask = torch.sum(abs(X), dim=2)>0
alpha = masked_softmax(beta, mask, dim=1)
output = torch.sum(X * alpha.view(batch_size, input_size, 1), dim=1)
return output
#----------------------------------------------------------------
class BaseRNN(nn.Module):
def __init__(self, rnn, p, cuda=True):
super(BaseRNN, self).__init__()
self.p = p
self.input_size = p.embedding_dim
self.hidden_size = p.hidden_size
self.nb_layers = p.nb_layers
self.dropout = p.dropout
self.directions = p.directions
self.act = p.act
self.out = p.out
self.hinit = p.hinit
self.output_size = self.hidden_size * self.directions * len(self.out)
self._cuda = cuda
self.rnn = rnn(self.input_size, self.hidden_size,
num_layers=self.nb_layers,
batch_first=True,
dropout= self.dropout if self.nb_layers>1 else 0,
bidirectional= self.directions>1
)
self.rnn = cudify(self.rnn, cuda)
if any([x > 1 for x in self.out]):
self.attn = Attention(p, cuda)
def is_cuda(self):
return next(self.parameters()).is_cuda
def init_tensor(self, batch_size):
return cudify(init_tensor(self.hinit, self.nb_layers * self.directions, batch_size, self.hidden_size), self.is_cuda())
def init_hidden(self, batch_size):
return self.init_tensor(batch_size)
# https://github.com/ranihorev/fastai/commit/5a67283a2594c789bfa321fb259f4dff473f5d49
def last_output(self, X, L):
idx = torch.arange(L.size(0))
fw = X[idx, L-1, :self.hidden_size]
if self.directions > 1:
bw = X[idx, 0, self.hidden_size:]
return torch.cat([fw, bw], 1)
return fw
def mean_pool(self, X, L):
return torch.div(torch.sum(X, dim=1).permute(1, 0), L.float()).permute(1, 0)
def max_pool(self, X, L):
return torch.max(X, dim=1)[0]
def select_pool(self, X, L, i):
if i == -1:
return self.last_output(X, L)
elif i == 0:
return self.mean_pool(X, L)
elif i == 1:
return self.max_pool(X, L)
else:
return self.attn(X, L)
def pool(self, X, L):
M = [self.select_pool(X, L, i) for i in self.out]
return torch.cat(M, 1)
def forward(self, X, L):
H = self.init_hidden(L.size(0))
X, H = self.rnn(X, H)
X, _ = torch.nn.utils.rnn.pad_packed_sequence(X, batch_first=True)
# return X, H
X = self.pool(X, L)
return X
class MyGRU(BaseRNN):
def __init__(self, p, cuda=True):
super(MyGRU, self).__init__(nn.GRU, p, cuda)
class MyLSTM(BaseRNN):
def __init__(self, p, cuda=True):
super(MyLSTM, self).__init__(nn.LSTM, p, cuda)
def init_hidden(self, batch_size):
return (self.init_tensor(batch_size),
self.init_tensor(batch_size)
)
class MyModel(nn.Module):
def __init__(self, p, cuda=True):
super(MyModel, self).__init__()
self.p = p
self._cuda = cuda
self.dropout = p.dropout
self.act = p.act
self.hidden_size = p.hidden_size
self.output_size = 2
self.directions = p.directions
self.embed = CharacterEmbedding(p.embedding_dim, p.vocab, cuda=cuda)
self.rnn = p.rnn(p, cuda=cuda)
self.drop_layer = nn.Dropout(p=self.dropout)
self.logsoftmax = nn.LogSoftmax(dim=1)
self.rnn_output_size = self.rnn.output_size
# Dense layers
in_size = self.rnn_output_size
self.lin_a = None
## add internal dense layers...?
if p.dense_size > 0:
out_size = p.dense_size
self.lin_a = cudify(nn.Linear(in_size, out_size), cuda)
in_size = out_size
# final dense layer...
self.lin_z = cudify(nn.Linear(in_size, self.output_size), cuda)
def is_cuda(self):
return next(self.parameters()).is_cuda
def drop(self, X):
return self.drop_layer(X) if self.dropout>0 else X
def forward(self, X, L):
# embedding layer
X = self.embed(X, L)
# # RNN layer
X = self.rnn(X, L)
X = self.drop(X)
# extra linear layers
if self.lin_a is not None:
X = self.lin_a(X)
X = F.relu(X)
# X = torch.tanh(X)
X = self.drop(X)
# final linear --> output layer
X = self.lin_z(X)
# final activation
X = self.act(X)
X = self.logsoftmax(X)
return X
|
19,875 | ab39ea5b9a7e41844d1de7f049f00f6879009716 | # -*- coding:utf-8 -*-
"""
class Ktools
用于 kerberos 操作 及 管理
requires libraries:
kerberos :
checkPassword
changePassword
kadmin :
get_principal
list_principal
del_principal
add_principal
principal:
expire
change_password
"""
import kerberos
import kadmin
import re
from time import time
from datetime import datetime
" timeout sec"
EXPIRE = 86400*24*90
class Ktools(object):
def __init__(self,daemon):
self.daemon = daemon
def authUser(self,user,password):
return kerberos.checkPassword(user,password,"",self.daemon)
def changePassword(self,user,old,new):
return kerberos.changePassword(user,old,new)
def kadmin(self,user,password):
" init kadmin object by admin user and password "
if not re.search("/admin$",user):
raise kadmin.KAdminError , "User %s is not AdminUser."%user
return Kadmin(user,self.daemon,password)
class Kadmin(object):
def __init__(self,user,daemon,password):
self.daemon = daemon
adminUser = "%s@%s"%(user,self.daemon)
self.admin = kadmin.init_with_password(adminUser,password)
def changePassword(self,user,password):
return user.changePassword(password)
def getUser(self,userName):
return Kuser(self.admin,userName)
def listUsers(self,):
return list( self.yieldUsers() )
def yieldUsers(self,):
for u in self.admin.list_principals():
u = re.findall(r"(.*?)@%s$"%self.daemon,u)[0]
yield u
def delUser(self,userName):
return self.admin.del_principal(userName)
def addUser(self,userName,password):
if re.search("/",userName):
raise kadmin.KAdminError,"Failed userName format by '/'."
return self.admin.create_principal(userName,password)
def addSuperUser(self,userName,password):
if not re.search("/admin$",userName):
raise kadmin.KAdminError,"UserName format need '%s/admin'."
return self.admin.create_principal(userName,password)
def addAppUser(self,userName,host,password):
pass
class Kuser(object):
"""
extends method
: expire
"""
def __init__(self,admin,userName):
self.admin = admin
self.user = admin.get_principal(userName)
def changePassword(self,password):
return self.user.change_password(password)
def lock(self,):
return self.user.expire("")
def unlock(self,):
" add some days by now()"
expireDatetime = datetime.fromtimestamp(time()+EXPIRE).__str__()
return self.user.expire(expireDatetime)
def __getattr__(self,key):
" extends kadmin principal object method "
return getattr(self.user,key)
if __name__ == "__main__":
" debug testing "
tools = Ktools("KISOPS.COM")
retval = tools.authUser("root/admin","11qq```")
print " auth root/admin by password result : %s "% retval
retval = tools.changePassword("root/admin","11qq```","11qq```")
admin = tools.kadmin("root/admin","11qq```")
print admin.listUsers()
print admin.delUser("liuyc/admin")
print admin.addSuperUser("liuyc/admin","321321")
u = admin.getUser("liuyc/admin")
print u.changePassword("321321")
print u.lock()
print u.unlock()
print u.expire("2013-01-01")
|
19,876 | 407222dc7601049368e8597cb1e640c36ade881f | import re
from itertools import chain
from .entities import Reaction
from .logger import log
from .utils import get_user_repr, url_regex
async def add_reaction(reactor, cmd, message):
# '/add_reaction phrase1,phrase2;URL'
text = message['text']
raw_text = re.sub(r"/%s" % cmd, '', text).strip()
splitted_text = raw_text.split(';', maxsplit=1)
if len(splitted_text) > 1:
patterns, reaction_content = splitted_text
reaction_content = reaction_content.strip()
else:
# image may be passed as file
patterns = splitted_text
reaction_content = None
patterns = patterns.split(',')
patterns = [p.strip().lower() for p in patterns if p]
reaction = {
'id': None,
'patterns': patterns,
'created_by': message['from'],
'created_at': message['date'],
'image_id': '',
'image_url': '',
'text': ''
}
if reaction_content and url_regex.match(reaction_content):
log.debug('URL: %s', reaction_content)
reaction['image_url'] = reaction_content
elif reaction_content:
reaction['text'] = reaction_content
reactions = [entity async for entity in Reaction.find_by_pattern(patterns)]
if reactions:
found_patterns = map(lambda r: r['patterns'], reactions)
reactor.response = {
'text': "There are some reactions already exist: '%s'" % ", ".join(chain.from_iterable(found_patterns))
}
return
await Reaction.create(reaction)
reactor.response = {
'text': "Saved reaction for `{}` by {}".format(", ".join(patterns), get_user_repr(message['from']))
}
def start(reactor, cmd, message):
reactor.response = {
'chat_id': message['from']['id'],
'text': "Чо-чо попячса"
}
def help(reactor, cmd, message):
reactor.response = {
'chat_id': message['from']['id'],
'parse_mode': "markdown",
'text': "*Commands are:*\n\n" +
"`/add_reaction` - add some reaction from bot (text or image) for phrases\n\n" +
"Format: /add_reaction phrase1,[phrase2],...[phraseN];URL\n\n" +
"`/start` - dummy command\n\n" +
"`/help` - This help text"
}
|
19,877 | 9f6fd4acbca37c9e15dc78ca6e3de9e069faf2bf | from typing import Optional
from databases import Database
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
from fastapi import Request
from src.core.config.app_settings import AppSettings
app_config = AppSettings()
database: Optional[Database] = None
engine = create_engine(app_config.db.pg_dsn)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
db_session = scoped_session(SessionLocal)
Base = declarative_base()
# Функция понадобится при внедрении зависимостей
def get_postgresql(request: Request) -> Request:
return request.state.db
|
19,878 | 8b9be28ebfdc491aa20ad90b06f8328768174d94 | from django.urls import path
from .views import (
QuizListView,
quiz_view,
quiz_data_view,
save_quiz_view,
register
)
from .forms import LoginForm, ChangePasswordForm, MyPasswordResetForm, MySetPasswordForm
from django.conf import settings
from django.conf.urls.static import static
from . import views
from django.contrib.auth import views as auth_views
app_name = 'quizes'
urlpatterns = [
path('', QuizListView.as_view(), name='main-view'),
path('<pk>/', quiz_view, name='quiz-view'),
path('<pk>/save/', save_quiz_view, name='save-view'),
path('<pk>/data/', quiz_data_view, name='quiz-data-view'),
path('accounts/login/', auth_views.LoginView.as_view(template_name="quizes/login.html",
authentication_form=LoginForm), name="login"),
path('accounts/register/', register, name="register"),
path('accounts/logout/',
auth_views.LogoutView.as_view(next_page='quizes:main-view'), name="logout"),
]
|
19,879 | b83a931d80ca4393e9b7aaff2c4ca05de938a550 | from math import sin, sqrt, radians
n, r = map(int, input().split())
o = 360 / (n * 2)
a = o / 2
c = 180 - o - a
A = (r * sin(radians(a))) / sin(radians(c))
O = (r * sin(radians(o))) / sin(radians(c))
area = 0.5 * A * r * sin(radians(o))
print(2 * n * area)
|
19,880 | 2406e61d7d9cb467b5cc90b42cb075e70415a072 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @File : 1029.py
# @Author: Negen
# @Date : 2018/10/29/029
# @Desc :
from random import randint
from collections import Counter
import re
"""
统计一百个0-9之间的随机整数各自出现的次数,返回字典
结果:{6: 18, 3: 7, 1: 7, 9: 10, 2: 9, 8: 13, 7: 9, 0: 10, 5: 8, 4: 9}
"""
def countNum():
numList = []
for i in range(0, 100):
num = randint(0, 9)
numList.append(num)
result = Counter(numList)
retDict = {k: v for k, v in sorted(result.items(), key=lambda k: k[0], reverse=False)}
return retDict
print("随机数出现次数字典:", countNum())
"""
处理字符串
a = 'aAsmr3idd4bgs7Dlsf9eAF'
"""
def handleStr(sourceStr):
print("输入的字符串为:", sourceStr)
#a提取字符串中的数字组成新串
numStr = "".join(re.findall('[\d+]', sourceStr))
print("提取的数字字符串为:",numStr)
"""
#b.统计字符串中的字母出现的次数,输出字典
"""
#提取非数字的字符并转换为小写字母
notNumList = [ch.lower() for ch in re.findall('[^\d+]', sourceStr)]
#统计各个字母出现的次数并按字母升序排列
result = sorted(Counter(notNumList).items(), key=lambda k:k[0], reverse=False)
countNumDic = {k:v for k,v in result}
print("各字符出现的次数:", countNumDic)
"""
#c.字符去重
"""
sourceStrList = [ch.lower() for ch in list(sourceStr)]
tempStr = "".join(sourceStrList)
#去重
for index,ch in enumerate(sourceStrList):
if ch.isdigit():
pass
else:
if ch in tempStr[index + 1:]:
tempStr = tempStr[:index + 1] + tempStr[index + 1:].replace(ch, ' ')
tempStr = tempStr.replace(' ', '')
print("去重后的字符串:", tempStr)
a = 'aAsmr3idd4bgs7Dlsf9eAF'
handleStr(a) |
19,881 | c889baf85c68e1c7746be82317acecea4e93e055 | from codecs import open
from os.path import realpath
import arrow
import glob2
from .xunitparser import parse
def get_files(pattern):
"""
Get all files match glob pattern.
Examples:
*.xml
**/*.xml
result*.xml
result/smoke*.xml
:param pattern: glob patterns https://pypi.python.org/pypi/glob2
:return:matched files
"""
files = [realpath(p) for p in glob2.glob(pattern)]
return list(set(files))
def open_xml(file):
return open(file, encoding='utf-8')
def read_file(file):
try:
with open(file, encoding='utf-8') as f:
return f.read
except UnicodeDecodeError as e:
return 'Failed to open file: {}: {}'.format(file, str(e))
def get_results(xml_files):
"""return a list of test results and info dict for multiple xml files"""
results = []
info = {'files': [], 'duration': 0, 'end_time': arrow.utcnow(), 'passed': True}
for xml in xml_files:
info['files'].append({'name': xml, 'content': read_file(xml)})
suite, result = parse(xml)
results.extend(getattr(result, 'tests'))
if len(result.tests) != len(result.passed) + len(result.skipped):
info['passed'] = False
# sum the time from testcase
for test in results:
info['duration'] += test.time.total_seconds()
info['start_time'] = info['end_time'].shift(seconds=-info['duration'])
info['start_time'] = info['start_time'].format()
info['end_time'] = info['end_time'].format()
return results, info
|
19,882 | 062941090ce0902a3f17fbfbae30d8a9f7c40c59 | # function declaration
def GreetMe(name):
print("Good Morning" + name)
def AddIntegers(a, b):
# print(a + b)
return a+b
# function call
GreetMe("Himanthi Siriwardane")
# AddIntegers(2, 3)
print(AddIntegers(2,3))
|
19,883 | 88af33cf7880e5e3af638eb6c0b7ded54049ed88 | #Exercise 47: Birth Date to Astrological Sign
day = int(input('Input birthday:'))
month = input('Input month of birth (march, july):')
if month == 'december':
astrosign = 'sagittarius' if (day < 22) else 'capricorn'
elif month == 'january':
astrosign = 'capricorn' if (day < 20) else 'aquarius'
elif month == 'february':
astrosign = 'aquarius' if (day < 19) else 'pisces'
elif month == 'march':
astrosign = 'pisces' if (day < 21) else 'aries'
elif month == 'april':
astrosign = 'aries' if (day < 20) else 'taurus'
elif month == 'may':
astrosign = 'taurus' if (day < 21) else 'gemini'
elif month == 'june':
astrosign = 'gemini' if (day < 21) else 'cancer'
elif month == 'july':
astrosign = 'cancer' if (day < 23) else 'leo'
elif month == 'august':
astrosign = 'leo' if (day < 23) else 'virgo'
elif month == 'september':
astrosign = 'virgo' if (day < 23) else 'libra'
elif month == 'october':
astrosign = 'libra' if (day < 23) else 'scorpio'
elif month == 'november':
astrosign = 'scorpio' if (day < 22) else 'sagittarius'
print('Your astrological sign is :',astrosign)
|
19,884 | d2202d1613d243c4ea515fbf822d43f337f12e22 | #!/usr/local/bin/python2.7
import argparse
import requests
import pprint
import json
import ConfigParser
import re
import os.path
import uuid
import sys
def requestCheck(args, response):
pp = pprint.PrettyPrinter(indent=4)
if args.verbose or response.status_code < 200 or response.status_code > 299:
print response
try:
pp.pprint( response.json() );
except:
if ( response.status_code != 204 ):
print " Error, Response body is empty and HTTP code is %s " % response.status_code
raise
else:
print "Expected Response: HTTP %s, no response body content." % response.status_code
if response.status_code < 200 or response.status_code > 299:
sys.exit(1)
|
19,885 | cd046328053b253932bb0171ee6d0481b08f253a | #http://www.codeskulptor.org/#user35_dRz3TzmJtMhxNLz.py
"""
Planner for Yahtzee
Simplifications: only allow discard and roll, only score against upper level
"""
# Used to increase the timeout, if necessary
import codeskulptor
codeskulptor.set_timeout(20)
def gen_all_sequences(outcomes, length):
"""
Iterative function that enumerates the set of all sequences of
outcomes of given length.
"""
answer_set = set([()])
for dummy_idx in range(length):
temp_set = set()
for partial_sequence in answer_set:
for item in outcomes:
new_sequence = list(partial_sequence)
new_sequence.append(item)
temp_set.add(tuple(new_sequence))
answer_set = temp_set
return answer_set
def score(hand):
"""
Compute the maximal score for a Yahtzee hand according to the
upper section of the Yahtzee score card.
hand: full yahtzee hand
Returns an integer score
"""
if (hand==()):
return 0
score_board=[0,0,0,0,0,0,0,0,0,0,0,0]
for dice in hand:
score_board[dice-1]+=dice
max_score=max(score_board)
return max_score
def expected_value(held_dice, num_die_sides, num_free_dice):
"""
Compute the expected value of the held_dice given that there
are num_free_dice to be rolled, each with num_die_sides.
held_dice: dice that you will hold
num_die_sides: number of sides on each die
num_free_dice: number of dice to be rolled
Returns a floating point expected value
"""
all_sequences = gen_all_sequences(range(1,num_die_sides+1), num_free_dice)
iter_seque=[]
score_seque=[]
for seq in all_sequences:
iter_seque.append(list(seq)+list(held_dice))
score_seque.append(score(iter_seque[-1]))
return float(sum(score_seque))/float(len(score_seque))
def gen_all_holds(hand):
"""
Generate all possible choices of dice from hand to hold.
hand: full yahtzee hand
Returns a set of tuples, where each tuple is dice to hold
"""
without_repeat = []
mask_seque = list(gen_all_sequences([0,1], len(hand)))
for dum_i in mask_seque:
without_repeat.append(())
for dum_i in range(len(mask_seque)):
for dum_j in range(len(mask_seque[dum_i])):
if (mask_seque[dum_i][dum_j]==1):
without_repeat[dum_i]=list(without_repeat[dum_i])
without_repeat[dum_i].append(hand[dum_j])
without_repeat[dum_i]=tuple(without_repeat[dum_i])
without_repeat = set(tuple(without_repeat))
return without_repeat
def strategy(hand, num_die_sides):
"""
Compute the hold that maximizes the expected value when the
discarded dice are rolled.
hand: full yahtzee hand
num_die_sides: number of sides on each die
Returns a tuple where the first element is the expected score and
the second element is a tuple of the dice to hold
"""
all_holds = list(gen_all_holds(hand))
expect=[]
for held_dice in all_holds:
expect.append(expected_value(held_dice, num_die_sides, len(hand)-len(held_dice)))
max_expect_index = expect.index(max(expect))
return (max(expect), (all_holds[max_expect_index]))
def run_example():
"""
Compute the dice to hold and expected score for an example hand
"""
num_die_sides = 6
hand = (1, 1, 1, 5, 6)
hand_score, hold = strategy(hand, num_die_sides)
print "Best strategy for hand", hand, "is to hold", hold, "with expected score", hand_score
#run_example()
#import poc_holds_testsuite
#poc_holds_testsuite.run_suite(gen_all_holds)
#import user35_oGFuhcPNLh_0 as score_testsuite
#score_testsuite.run_suite(score)
#expected value
# import user35_uLOFnLQSJV29rFh_5 as expected_value_testsuite
# expected_value_testsuite.run_suite(expected_value)
# import poc_holds_testsuite
# poc_holds_testsuite.run_suite(gen_all_holds)
#Here's some tests for strategy function:
# import user35_mGREPnDxbs_0 as strategy_testsuite
# strategy_testsuite.run_suite(strategy)
|
19,886 | 74e1190a5b84f0731c92ea7e4d12f4b185c9e942 | /home/eliasb/anaconda3/lib/python3.7/_weakrefset.py |
19,887 | bb6d2c4f0dadfbd1a4f8f918556d8d36bc1df9d5 | # import dependencies
from bs4 import BeautifulSoup as bs
from splinter import Browser
from splinter.exceptions import ElementDoesNotExist
import time
import pandas as pd
def init_browser():
executable_path = {"executable_path": "chromedriver.exe"}
return Browser("chrome", **executable_path, headless=False)
def scrape():
browser = init_browser()
# visit NASA's Mars Exploration Program news site
url_news = 'https://mars.nasa.gov/news'
browser.visit(url_news)
time.sleep(2)
# scrape page
html_news = browser.html
soup_news = bs(html_news, 'html.parser')
time.sleep(2)
# find latest news title and paragraph text
news_title = soup_news.find_all('div', class_='content_title')[0].text
news_p = soup_news.find_all('div', class_='article_teaser_body')[0].text
# visit JPL Featured Space Image page
url_image = 'https://www.jpl.nasa.gov/spaceimages/?search=&category=Mars'
browser.visit(url_image)
time.sleep(2)
# navigate to full image version of featured image
browser.click_link_by_id('full_image')
time.sleep(3)
# browser.click_link_by_partial_href('details')
browser.click_link_by_partial_text('more info')
time.sleep(2)
# scrape page
html_image = browser.html
soup_image = bs(html_image, 'html.parser')
# find featured image source url
partial_image_url = soup_image.find_all('div', class_='download_tiff')[1].find('a')['href']
featured_image_url = 'https:' + partial_image_url
# visit Mars Weather twitter page
url_weather = 'https://twitter.com/marswxreport'
browser.visit(url_weather)
time.sleep(2)
# scrape page
html_weather = browser.html
soup_weather = bs(html_weather, 'html.parser')
# find latest weather report
tweets = soup_weather.find_all('p', class_='tweet-text')
tweets_list = []
for tweet in tweets:
if tweet.text.split()[0] == 'InSight':
if tweet.find('a'):
pic_url = tweet.find('a').text
full_string = tweet.text
tweet_string = full_string.replace(pic_url, '')
tweets_list.append(tweet_string)
else:
tweets_list.append(tweet.text)
try:
mars_weather = tweets_list[0].replace('\n', ', ')
mars_weather = mars_weather.replace('InSight s', 'S')
mars_weather = mars_weather.replace(') low', '), low')
mars_weather = mars_weather.replace(') high', '), high')
except:
pass
# url for Mars Facts page
url_facts = 'https://space-facts.com/mars/'
# create dataframe containing facts
tables = pd.read_html(url_facts)
mars_facts = tables[0]
mars_facts.columns = ['Description','Value']
mars_facts = mars_facts.set_index('Description')
mars_facts_html = mars_facts.to_html()
# visit USGS Astrogeology page
url_hemisphere = 'https://astrogeology.usgs.gov/search/results?q=hemisphere+enhanced&k1=target&v1=Mars'
browser.visit(url_hemisphere)
time.sleep(2)
# scrape page
html_hemisphere = browser.html
soup_hemisphere = bs(html_hemisphere, 'html.parser')
# find image titles and urls
hemispheres = soup_hemisphere.find_all('div', class_='description')
hemisphere_image_urls = []
hemispheres
for hemisphere in hemispheres:
hemisphere_dict = {}
title = hemisphere.find('a').text
print(title)
url = 'https://astrogeology.usgs.gov' + hemisphere.find('a')['href']
browser.visit(url)
time.sleep(2)
html = browser.html
soup = bs(html, 'html.parser')
full_image = soup.find('img',class_='wide-image')['src']
image_url = 'https://astrogeology.usgs.gov' + full_image
print(f'{image_url}\n')
hemisphere_dict['title'] = title
hemisphere_dict['image_url'] = image_url
hemisphere_image_urls.append(hemisphere_dict)
browser.back()
time.sleep(2)
# store scraped data in a dictionary
scraped_data = {
'latest_news': [news_title,news_p],
'featured_image_url': featured_image_url,
'mars_weather': mars_weather,
'mars_facts': mars_facts_html,
'hemisphere_image_urls': hemisphere_image_urls
}
# quit browser after scraping
browser.quit()
# return results
return scraped_data
|
19,888 | 36d1682e13c54a5f2ca9b1b09881559f17ff6e4e | import tensorflow as tf
import datetime
# ======================================================================================
# Simple autograph
# ======================================================================================
x = tf.Variable(1.0, dtype=tf.float32)
# Define input type
@tf.function(input_signature=[tf.TensorSpec(shape=[], dtype=tf.float32)])
def add_print(a):
x.assign_add(a)
tf.print(x)
return x
# This works only for floats
add_print(tf.constant(3.0))
# ======================================================================================
# tf.Module helps creating autographs
# ======================================================================================
class DemoModule(tf.Module):
def __init__(self, init_value=tf.constant(0.0), name=None):
super(DemoModule, self).__init__(name=name)
with self.name_scope:
self.x = tf.Variable(init_value, dtype=tf.float32, trainable=True)
@tf.function(input_signature=[tf.TensorSpec(shape=[], dtype=tf.float32)])
def addprint(self, a):
self.x.assign_add(a)
tf.print(self.x)
return self.x
demo = DemoModule(init_value=tf.constant(1.0))
result = demo.addprint(tf.constant(5.0))
print(demo.variables)
print(demo.trainable_variables)
print(demo.submodules)
# A tf.Module object can be saved as a model
tf.saved_model.save(demo, "model/1", signatures={"serving_default": demo.addprint})
# Load a model
demo2 = tf.saved_model.load("model/1")
demo2.addprint(tf.constant(5.0))
# Run: `!saved_model_cli show --dir model/1 --all` to show info on saved model file
# ======================================================================================
# Write graph to tensorboard
# ======================================================================================
stamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
logdir = f"log/{stamp}"
writer = tf.summary.create_file_writer(logdir)
# To show graph in tensorfboard, start tracing it
tf.summary.trace_on(graph=True, profiler=True)
# Run the graph
demo = DemoModule(init_value=tf.constant(0.0))
result = demo.addprint(tf.constant(4.0))
# Write graph info to log
with writer.as_default():
tf.summary.trace_export(name="demomodule", step=0, profiler_outdir=logdir)
# ======================================================================================
# alternative way to add to tf.modul
# ======================================================================================
mymodule = tf.Module()
mymodule.x = tf.Variable(0.0)
@tf.function(input_signature=[tf.TensorSpec(shape=[], dtype=tf.float32)])
def addprint(a):
mymodule.x.assign_add(a)
tf.print(mymodule.x)
return mymodule.x
# Add function to module
mymodule.addprint = addprint
print(mymodule.variables)
tf.saved_model.save(
mymodule, "model/mymodule", signatures={"serving_default": mymodule.addprint}
)
mymodule2 = tf.saved_model.load("model/mymodule")
mymodule2.addprint(tf.constant(5.0))
# ======================================================================================
# tf.Module, tf.keras.Model and tf.keras.layers.Layer
# ======================================================================================
# A model is nothing more the a tf.Module object.
# All the following are subclasses of tf.Module
print(issubclass(tf.keras.Model, tf.Module))
print(issubclass(tf.keras.layers.Layer, tf.Module))
print(issubclass(tf.keras.Model, tf.keras.layers.Layer))
tf.keras.backend.clear_session()
model = tf.keras.models.Sequential()
model.add(tf.keras.layers.Dense(3, input_shape=(10,)))
model.add(tf.keras.layers.Dense(2))
model.add(tf.keras.layers.Dense(1))
# Print info on model/tf.Module
print(model.summary())
print(model.variables)
# Freeze variables in layer 0
model.layers[0].trainable = False
print(model.trainable_variables)
print(model.submodules)
print(model.layers)
print(model.name)
print(model.name_scope()) |
19,889 | f308fd33b5e963011705414982f92c0ec794ae22 | """
This module should contain project specific feature engineering functionality.
You should avoid engineering features in a notebook as it is not transferable later if you want to automate the
process. Add functions here to create your features, such functions should include those to generate specific features
along with any more generic functions.
Consider moving generic functions into the shared statoilds package.
"""
import pandas as pd
import multiprocessing as mp
import random
import string
import os
def my_feature_xxx(df: pd.DataFrame):
"""
Description goes here.
You might also add additional arguments such as column etc...
Would be nice with some test cases also :)
Args:
df: Dataframe upon which to operate
Returns:
A dataframe with the Xxx feature appended
"""
# CODE HERE
return df
def run_pipeline(directory):
"""
Run the main processing pipeline.
Returns:
A dataframe containing the output of the pipeline
"""
# io = IO(path)
# df = io.load_cleaned_file(download_always=False)
# df = add_choke_events(df)
# Add calls to features.Xxx here
#directory = main_directory
site=os.listdir(directory)
site_dicom={}
site_dicom_sub={}
site_sub_files={}
i,k,j=0,0,0
for filename in site:
site_dicom[i]=directory+'/'+filename+'/DICOM-raw'
temporary_path=os.listdir(site_dicom[i])
for another_file in temporary_path:
site_dicom_sub[j]=site_dicom[i]+'/'+another_file+'/scans'
temporary_path_1 = os.listdir(site_dicom_sub[j])
for another_file_1 in temporary_path_1:
site_sub_files[k]=site_dicom_sub[j]+'/'+another_file_1+'/'
k=k+1
j = j + 1
i=i+1
splitted={}
output_mif={}
for i in range (len(site_sub_files)):
splitted[i]=site_sub_files[i].split('/')
output_mif[i]=directory+'/'+splitted[i][5]+'/MIF-raw/'+splitted[i][5]+'_'+splitted[i][7]+'_'+splitted[i][9]+'.mif'
# save (or return) dataframe here?
return site_sub_files,output_mif
def convert_dcm2mif(site_sub_files,output_mif):
for i in range(len(site_sub_files)):
bashCommand = ("/home/visionlab/mrtrix3/bin/mrconvert " + site_sub_files[i]+" "+ output_mif[i])
os.system(bashCommand)
return
def Remove(duplicate):
final_list = []
for num in duplicate:
if num not in final_list:
final_list.append(num)
return final_list
def concatenate(output_mif,directory,flag):
site=os.listdir(directory)
site_mif = {}
site_mif_files = {}
site_mif_name={}
site_DTI={}
i, k, j = 0, 0, 0
for filename in site:
site_mif[i] = directory + '/' + filename + '/MIF-raw'
site_DTI[i] = directory + '/' + filename + '/DTI-conc'
temporary_path = os.listdir(site_mif[i])
for another_file in temporary_path:
site_mif_files[k] = site_mif[i] + '/' + another_file
site_mif_name[k] = another_file
k = k + 1
i = i + 1
#site_mif_files[0].split('/')[-1].split('_')[1]
site_mif_name_lst=list(site_mif_name.values())
indices={}
indices_1=list()
for k in range (len(site_mif_name)):
indices[k] = [i for i, s in enumerate(site_mif_name_lst) if site_mif_files[k].split('/')[-1].split('_')[1] in s]
temp=[i for i, s in enumerate(site_mif_name_lst) if site_mif_files[k].split('/')[-1].split('_')[1] in s]
indices_1.append(temp)
#[value for value in indices if value != indices[0]]
single_index={}
j=0
while True:
single_index[j]=indices_1[0]
indices_1 = [x for x in indices_1 if x != indices_1[0]]
j=j+1
if len(indices_1)==0:
break
#destination_con={}
list_dest_conc=list()
for item in single_index:
single_index[item]
s="/"
seq=site_mif_files[single_index[item][0]].split('/')
temp_0=s.join(seq[0:6])+'/DTI-conc/'
temp = site_mif_name[single_index[item][0]]
temp_1=temp.split('_')[0]+'_'+temp.split('_')[1]+"_DTI.mif"
destination_conc=temp_0+temp_1
list_dest_conc.append(destination_conc)
if flag == True:
bashCommand =("mrcat -axis 3 -force " + site_mif_files[single_index[item][0]] +" "+ site_mif_files[single_index[item][1]]+" "+site_mif_files[single_index[item][2]]+" "+ site_mif_files[single_index[item][3]]+" "+ destination_conc)
os.system(bashCommand)
return list_dest_conc
def denoising(list_dest_conc,flag_denoise):
list_dest_denoised=list()
for i in range (len(list_dest_conc)):
s = "/"
seq = list_dest_conc[i].split('/')
temp = s.join(seq[0:6])+'/DTI-denoised/'
temp_1=list_dest_conc[i].split('/')[-1].split('.')[0]+'_denoised.mif'
destination_denoised = temp + temp_1
list_dest_denoised.append(destination_denoised)
if flag_denoise == True:
bashCommand =("dwidenoise "+list_dest_conc[i]+' ' +destination_denoised)
os.system(bashCommand)
return list_dest_denoised
def gibbs_ringing(list_dest_denoise,flag_gibbs):
list_dest_gibbs=list()
for i in range (len(list_dest_denoise)):
s = "/"
seq = list_dest_denoise[i].split('/')
temp = s.join(seq[0:6])+'/DTI-gibbs/'
temp_1=list_dest_denoise[i].split('/')[-1].split('.')[0]+'_gibbs.mif'
destination_gibbs = temp + temp_1
list_dest_gibbs.append(destination_gibbs)
if flag_gibbs == True:
bashCommand =("mrdegibbs "+list_dest_denoise[i]+' ' +destination_gibbs)
os.system(bashCommand)
return list_dest_gibbs
def preproc(list_dest_gibbs,flag_preproc):
list_dest_preproc=list()
for i in range (len(list_dest_gibbs)):
s = "/"
seq = list_dest_gibbs[i].split('/')
temp = s.join(seq[0:6])+'/DTI-preproc/'
temp_1=list_dest_gibbs[i].split('/')[-1].split('.')[0]+'_preproc.mif'
destination_preproc = temp + temp_1
list_dest_preproc.append(destination_preproc)
if flag_preproc == True:
bashCommand =("dwipreproc -rpe_header -eddy_options \" --slm=linear \" "+list_dest_gibbs[i]+' ' +destination_preproc)
os.system(bashCommand)
return list_dest_preproc
def mask(list_dest_preproc,flag_mask):
list_dest_masked=list()
for i in range (len(list_dest_preproc)):
s = "/"
seq = list_dest_preproc[i].split('/')
temp = s.join(seq[0:6])+'/DTI-masked/'
temp_1=list_dest_preproc[i].split('/')[-1].split('.')[0]+'_mask.mif'
destination_masked = temp + temp_1
temp_2 = list_dest_preproc[i].split('/')[-1].split('.')[0] + '_masked.mif'
destination_masked_2 = temp + temp_2
list_dest_masked.append(destination_masked_2)
if flag_mask == True:
bashCommand =("dwi2mask " +list_dest_preproc[i]+' '+destination_masked)
os.system(bashCommand)
bashCommand = ("mrcalc " + list_dest_preproc[i] +" "+destination_masked +" -mult "+destination_masked_2 )
os.system(bashCommand)
return list_dest_masked
def bias_correction(list_dest_masked,flag_bias):
list_dest_bias=list()
for i in range (len(list_dest_masked)):
s = "/"
seq = list_dest_masked[i].split('/')
temp = s.join(seq[0:6])+'/DTI-bias/'
temp_1=list_dest_masked[i].split('/')[-1].split('.')[0]+'_bias.mif'
destination_masked = temp + temp_1
list_dest_bias.append(destination_masked)
if flag_bias == True:
bashCommand =("dwibiascorrect -ants " +list_dest_masked[i]+' '+destination_masked)
os.system(bashCommand)
print("ok")
return list_dest_bias
def remove_negative(list_dest_bias,flag_bias):
list_dest_bias=list()
for i in range (len(list_dest_masked)):
s = "/"
seq = list_dest_masked[i].split('/')
temp = s.join(seq[0:6])+'/DTI-bias/'
temp_1=list_dest_masked[i].split('/')[-1].split('.')[0]+'_bias.mif'
destination_masked = temp + temp_1
list_dest_bias.append(destination_masked)
if flag_bias == True:
bashCommand =("dwibiascorrect -ants " +list_dest_masked[i]+' '+destination_masked)
os.system(bashCommand)
print("ok")
return list_dest_bias
def negative_remove():
#img = nibabel.load('/home/visionlab/Desktop/work_dir/registration_folder/TRO_1yRz315_DTI_denoised_gibbs_preproc_masked_nifty.nii.gz')
#data = img.get_data()
#prova=(data-data.min())/(data.max()-data.min())
#data = data.clip(min=0) #del the min value
#clipped_img = nibabel.Nifti1Image(prova, img.affine, img.header)
#nibabel.save(clipped_img,'/home/visionlab/Desktop/work_dir/registration_folder/TRO_1yRz315_DTI_denoised_gibbs_preproc_masked_nifty_modified_0.nii.gz')
'''
import nipype.interfaces.mrtrix as mrt
mrconvert = mrt.MRConvert()
mrconvert.inputs.in_file = '/home/visionlab/Desktop/work_dir/registration_folder/TRO_1yRz315_DTI_denoised_gibbs_preproc_masked_bias.mif'
mrconvert.inputs.out_filename = '/home/visionlab/Desktop/work_dir/registration_folder/TRO_1yRz315_DTI_denoised_gibbs_preproc_masked_bias_0.nii'
mrconvert.run()
mrconvert '/home/visionlab/Desktop/work_dir/registration_folder/TRO_1yRz315_DTI_denoised_gibbs_preproc_masked_nifty_modified_non_negative.nii.gz' '/home/visionlab/Desktop/work_dir/registration_folder/TRO_1yRz315_DTI_denoised_gibbs_preproc_masked_nifty_modified__non_negative.mif' -grad '/home/visionlab/Desktop/work_dir/registration_folder/TRO_1yRz315_DTI_denoised_gibbs_preproc_masked_0_0_0.txt' -force
mrconvert '/home/visionlab/Desktop/work_dir/registration_folder/TRO_1yRz315_DTI_denoised_gibbs_preproc_masked.mif' '/home/visionlab/Desktop/work_dir/registration_folder/TRO_1yRz315_DTI_denoised_gibbs_preproc_masked_prova.nii.gz' -export_grad_mrtrix '/home/visionlab/Desktop/work_dir/registration_folder/TRO_1yRz315_DTI_denoised_gibbs_preproc_masked_0_0_0.txt'
/home/visionlab/mrtrix3-registration_multi_contrast/bin/population_template sh_b0000 sh_b0000.mif + sh_b1000 sh_b1000.mif -mask_dir mask -warp_dir warp -voxel_size 2
'''
return
|
19,890 | 5d67133c0292f7c6728aa9648e7817c1d3b7866c | import YuzuUpdater.Contants as const
import json
import os
class ConfigHandler:
def __init__(self, rootPath) -> None:
self.rootPath = rootPath
def createConfigFileJson(self):
if not os.path.exists(f'{self.rootPath}/{const.CONFIG_FILENAME}'):
self.writeOnConfigFile('{\n\t"version": "default"\n}')
return self.openConfigFile()
return self.openConfigFile()
def openConfigFile(self, mode="r+"):
return open(const.CONFIG_FILENAME, mode)
def writeOnConfigFile(self, json):
file = self.openConfigFile("w+")
file.write(json)
file.close()
def readJsonConfigFile(self):
file = self.createConfigFileJson()
jsonDic = json.load(file)
file.close()
return jsonDic
|
19,891 | 42af0f3ef8e69e24421215daef50a8d50ca65039 | '''
@ [Level 2] K번째 수 - Fail
@ Prob. https://programmers.co.kr/learn/courses/30/lessons/42748
Ref.
@ Algo: Sorting
@ Start day: 19. 09. 06
@ End day:
'''
def solution(citations):
for i in range(len(citations)):
if citations[i] >= len(citations) - i:
return len(citations) - i
print(solution([3, 0, 6, 1, 5]))
|
19,892 | 0a786ce1381073015cbd2b0071e0a385c2f2d0df | from efficientnet_pytorch import EfficientNet
from models.BasicModule import BasicModule
efficientnet_b1 = EfficientNet.from_name("efficientnet-b1")
efficientnet_b2 = EfficientNet.from_name("efficientnet-b2")
efficientnet_b3 = EfficientNet.from_name("efficientnet-b3")
efficientnet_b4 = EfficientNet.from_name("efficientnet-b4")
efficientnet_b5 = EfficientNet.from_name("efficientnet-b5")
efficientnet_b6 = EfficientNet.from_name("efficientnet-b6")
efficientnet_b7 = EfficientNet.from_name("efficientnet-b7")
class Efficientnet_b0(BasicModule):
def __init__(self):
super(Efficientnet_b0, self).__init__()
self.efficientnet_b0 = EfficientNet.from_name("efficientnet-b7", in_channels=1, num_classes=2)
def forward(self, x):
return self.efficientnet_b0(x) |
19,893 | 7353bfaa4771e44d394cc77d16eff71d3de7a7d3 | # 2651 자동차경주
fuel = int(input())
|
19,894 | e8315a69a9e28bf41056ab999de075ef38c78600 | import zmq
import sys
sys.path.insert(0, './newssubscriber')
import languageprocessor.language_processor as lp
port = "5556"
context = zmq.Context()
socket = context.socket(zmq.SUB)
socket.connect("tcp://localhost:%s" % port)
topic_filter = ""
socket.setsockopt_unicode(zmq.SUBSCRIBE, topic_filter)
l_p = lp.LanguageProcessor()
while True:
news_response = socket.recv_string()
news_response = news_response.replace('\\u200c', '')
l_p.process_tag_save(news_response)
|
19,895 | ae145cf7c788d9726d25aa75136b2154c6ba66cb | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Fri May 4 17:38:57 2018
@author: xies
"""
import numpy as np
import pandas as pd
import os
from Bio import SeqIO,AlignIO,Seq,SeqRecord
PSIPRED_DIR = '/data/cycd_targets/cycd_target_uniprot_wider_individuals'
seqs = []
for filename in os.listdir(PSIPRED_DIR):
if filename.endswith('.ss2'):
print 'Working on ', filename
fastaname,ext = os.path.splitext(filename)
filename = os.path.join(PSIPRED_DIR,filename)
#Load PSIPRED VFORMAT in a sane way to extract only relevant info
df = pd.read_csv(filename,header=0, delim_whitespace=True,skiprows=0,
names=['position','AA','prediction'],usecols=[0,1,2], index_col=0)
s = Seq.Seq(''.join(df.prediction))
entry = os.path.splitext(fastaname)[0]
seq = SeqIO.read( os.path.splitext(filename)[0] ,'fasta')
seq.seq = s
seqs.append(seq)
# Load the same GAPPED sequence file
# seq = SeqIO.read(os.path.join(GAPPED_DIR,fastaname),'fasta')
# seq_ss = copy.deepcopy(seq) # deepcopy to get a hard copy
# Find parts of sequence that's not the gapped character
# s = seq.seq.tostring()
# s_array = np.array(list(s))
# I = s_array != '-'
# s_array[I] = df.prediction
# seq_ss.seq = Seq.Seq(''.join(s_array))
# seq_ss.id = os.path.splitext(fastaname)[0]
# seqs.append(seq_ss)
#
SeqIO.write(seqs,os.path.join( PSIPRED_DIR,'psipred.fasta' ),'fasta')
|
19,896 | c4e25de231b06dc3af2eb469fbe2f71369742d27 | # Copyright (c) 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib import constants as lib_constants
from oslo_log import log as logging
from neutron.agent.l3 import dvr_local_router
from neutron.agent.l3 import dvr_snat_ns
from neutron.agent.l3 import router_info as router
from neutron.agent.linux import ip_lib
from neutron.agent.linux import iptables_manager
from neutron.common import utils as common_utils
LOG = logging.getLogger(__name__)
class DvrEdgeRouter(dvr_local_router.DvrLocalRouter):
def __init__(self, host, *args, **kwargs):
super(DvrEdgeRouter, self).__init__(host, *args, **kwargs)
self.snat_namespace = dvr_snat_ns.SnatNamespace(
self.router_id, self.agent_conf, self.driver, self.use_ipv6)
self.snat_iptables_manager = None
def get_gw_ns_name(self):
return self.snat_namespace.name
def external_gateway_added(self, ex_gw_port, interface_name):
super(DvrEdgeRouter, self).external_gateway_added(
ex_gw_port, interface_name)
if self._is_this_snat_host():
self._create_dvr_gateway(ex_gw_port, interface_name)
# NOTE: When a router is created without a gateway the routes get
# added to the router namespace, but if we wanted to populate
# the same routes to the snat namespace after the gateway port
# is added, we need to call routes_updated here.
self.routes_updated([], self.router['routes'])
elif self.snat_namespace.exists():
# This is the case where the snat was moved manually or
# rescheduled to a different agent when the agent was dead.
LOG.debug("SNAT was moved or rescheduled to a different host "
"and does not match with the current host. This is "
"a stale namespace %s and will be cleared from the "
"current dvr_snat host.", self.snat_namespace.name)
self.external_gateway_removed(ex_gw_port, interface_name)
def _list_centralized_floating_ip_cidrs(self):
# Compute a list of addresses this gw is supposed to have.
# This avoids unnecessarily removing those addresses and
# causing a momentarily network outage.
floating_ips = self.get_floating_ips()
return [common_utils.ip_to_cidr(ip['floating_ip_address'])
for ip in floating_ips
if ip.get(lib_constants.DVR_SNAT_BOUND)]
def external_gateway_updated(self, ex_gw_port, interface_name):
if not self._is_this_snat_host():
# no centralized SNAT gateway for this node/agent
LOG.debug("not hosting snat for router: %s", self.router['id'])
if self.snat_namespace.exists():
LOG.debug("SNAT was rescheduled to host %s. Clearing snat "
"namespace.", self.router.get('gw_port_host'))
self.driver.unplug(interface_name,
namespace=self.snat_namespace.name,
prefix=router.EXTERNAL_DEV_PREFIX)
self.snat_namespace.delete()
return
if not self.snat_namespace.exists():
# SNAT might be rescheduled to this agent; need to process like
# newly created gateway
return self.external_gateway_added(ex_gw_port, interface_name)
else:
preserve_ips = self._list_centralized_floating_ip_cidrs()
self._external_gateway_added(ex_gw_port,
interface_name,
self.snat_namespace.name,
preserve_ips)
def _external_gateway_removed(self, ex_gw_port, interface_name):
super(DvrEdgeRouter, self).external_gateway_removed(ex_gw_port,
interface_name)
if not self._is_this_snat_host() and not self.snat_namespace.exists():
# no centralized SNAT gateway for this node/agent
LOG.debug("not hosting snat for router: %s", self.router['id'])
return
self.driver.unplug(interface_name,
namespace=self.snat_namespace.name,
prefix=router.EXTERNAL_DEV_PREFIX)
def external_gateway_removed(self, ex_gw_port, interface_name):
self._external_gateway_removed(ex_gw_port, interface_name)
if self.snat_namespace.exists():
self.snat_namespace.delete()
def internal_network_added(self, port):
super(DvrEdgeRouter, self).internal_network_added(port)
# TODO(gsagie) some of this checks are already implemented
# in the base class, think how to avoid re-doing them
if not self._is_this_snat_host():
return
sn_port = self.get_snat_port_for_internal_port(port)
if not sn_port:
return
ns_name = dvr_snat_ns.SnatNamespace.get_snat_ns_name(self.router['id'])
interface_name = self._get_snat_int_device_name(sn_port['id'])
self._internal_network_added(
ns_name,
sn_port['network_id'],
sn_port['id'],
sn_port['fixed_ips'],
sn_port['mac_address'],
interface_name,
lib_constants.SNAT_INT_DEV_PREFIX,
mtu=sn_port.get('mtu'))
def _set_snat_interfce_mtu(self, port):
if not self._is_this_snat_host():
return
sn_port = self.get_snat_port_for_internal_port(port)
if not sn_port:
return
ns_name = dvr_snat_ns.SnatNamespace.get_snat_ns_name(self.router['id'])
interface_name = self._get_snat_int_device_name(sn_port['id'])
self.driver.set_mtu(interface_name, port['mtu'], namespace=ns_name,
prefix=lib_constants.SNAT_INT_DEV_PREFIX)
def internal_network_updated(self, port):
super(DvrEdgeRouter, self).internal_network_updated(port)
if port:
self._set_snat_interfce_mtu(port)
def _dvr_internal_network_removed(self, port):
super(DvrEdgeRouter, self)._dvr_internal_network_removed(port)
if not self.ex_gw_port:
return
sn_port = self.get_snat_port_for_internal_port(port, self.snat_ports)
if not sn_port:
return
if not self._is_this_snat_host():
return
snat_interface = self._get_snat_int_device_name(sn_port['id'])
ns_name = self.snat_namespace.name
prefix = lib_constants.SNAT_INT_DEV_PREFIX
if ip_lib.device_exists(snat_interface, namespace=ns_name):
self.driver.unplug(snat_interface, namespace=ns_name,
prefix=prefix)
def _plug_snat_port(self, port):
interface_name = self._get_snat_int_device_name(port['id'])
self._internal_network_added(
self.snat_namespace.name, port['network_id'],
port['id'], port['fixed_ips'],
port['mac_address'], interface_name,
lib_constants.SNAT_INT_DEV_PREFIX,
mtu=port.get('mtu'))
def initialize(self, process_monitor):
self._create_snat_namespace()
super(DvrEdgeRouter, self).initialize(process_monitor)
def _create_dvr_gateway(self, ex_gw_port, gw_interface_name):
# connect snat_ports to br_int from SNAT namespace
# NOTE(ataraday): Check if snat namespace exists, create if not -
# workaround for race between router_added and router_updated
# notifications https://launchpad.net/bugs/1881995
if not self.snat_namespace.exists():
self._create_snat_namespace()
for port in self.get_snat_interfaces():
self._plug_snat_port(port)
self._external_gateway_added(ex_gw_port, gw_interface_name,
self.snat_namespace.name,
preserve_ips=[])
self.snat_iptables_manager = iptables_manager.IptablesManager(
namespace=self.snat_namespace.name,
use_ipv6=self.use_ipv6)
self._initialize_address_scope_iptables(self.snat_iptables_manager)
def _create_snat_namespace(self):
"""Create SNAT namespace."""
# TODO(mlavalle): in the near future, this method should contain the
# code in the L3 agent that creates a gateway for a dvr. The first step
# is to move the creation of the snat namespace here
if self._is_this_snat_host():
self.snat_namespace.create()
def _get_snat_int_device_name(self, port_id):
long_name = lib_constants.SNAT_INT_DEV_PREFIX + port_id
return long_name[:self.driver.DEV_NAME_LEN]
def _is_this_snat_host(self):
host = self.router.get('gw_port_host')
if not host:
LOG.debug("gw_port_host missing from router: %s",
self.router['id'])
return host == self.host
def _handle_router_snat_rules(self, ex_gw_port, interface_name):
super(DvrEdgeRouter, self)._handle_router_snat_rules(
ex_gw_port, interface_name)
if not self._is_this_snat_host():
return
if not self.get_ex_gw_port():
return
if not self.snat_iptables_manager:
LOG.debug("DVR router: no snat rules to be handled")
return
with self.snat_iptables_manager.defer_apply():
self._empty_snat_chains(self.snat_iptables_manager)
# NOTE: float-snat should be added for the
# centralized floating-ips supported by the
# snat namespace.
self.snat_iptables_manager.ipv4['nat'].add_rule(
'snat', '-j $float-snat')
self._add_snat_rules(ex_gw_port, self.snat_iptables_manager,
interface_name)
def _should_update_snat_routing_table(self):
if self.get_ex_gw_port() and self._is_this_snat_host():
# NOTE: For now let us apply the static routes both in SNAT
# namespace and Router Namespace, to reduce the complexity.
if self.snat_namespace.exists():
return True
else:
LOG.error("The SNAT namespace %s does not exist for "
"the router.", self.snat_namespace.name)
return False
def update_routing_table(self, operation, route):
if self._should_update_snat_routing_table():
ns_name = self.snat_namespace.name
self._update_routing_table(operation, route, ns_name)
super(DvrEdgeRouter, self).update_routing_table(operation, route)
def update_routing_table_ecmp(self, route_list):
if self._should_update_snat_routing_table():
ns_name = self.snat_namespace.name
self._update_routing_table_ecmp(route_list, ns_name)
super(DvrEdgeRouter, self).update_routing_table_ecmp(route_list)
def delete(self):
super(DvrEdgeRouter, self).delete()
if self.snat_namespace.exists():
self.snat_namespace.delete()
def process_address_scope(self):
super(DvrEdgeRouter, self).process_address_scope()
if not self._is_this_snat_host():
return
if not self.snat_iptables_manager:
LOG.debug("DVR router: no snat rules to be handled")
return
# Prepare address scope iptables rule for dvr snat interfaces
internal_ports = self.get_snat_interfaces()
ports_scopemark = self._get_port_devicename_scopemark(
internal_ports, self._get_snat_int_device_name)
# Prepare address scope iptables rule for external port
external_port = self.get_ex_gw_port()
if external_port:
external_port_scopemark = self._get_port_devicename_scopemark(
[external_port], self.get_external_device_name)
for ip_version in (lib_constants.IP_VERSION_4,
lib_constants.IP_VERSION_6):
ports_scopemark[ip_version].update(
external_port_scopemark[ip_version])
with self.snat_iptables_manager.defer_apply():
self._add_address_scope_mark(
self.snat_iptables_manager, ports_scopemark)
def _delete_stale_external_devices(self, interface_name):
if not self.snat_namespace.exists():
return
ns_ip = ip_lib.IPWrapper(namespace=self.snat_namespace.name)
for d in ns_ip.get_devices():
if (d.name.startswith(router.EXTERNAL_DEV_PREFIX) and
d.name != interface_name):
LOG.debug('Deleting stale external router device: %s', d.name)
self.driver.unplug(
d.name,
namespace=self.snat_namespace.name,
prefix=router.EXTERNAL_DEV_PREFIX)
def get_snat_external_device_interface_name(self, ex_gw_port):
long_name = router.EXTERNAL_DEV_PREFIX + ex_gw_port['id']
return long_name[:self.driver.DEV_NAME_LEN]
def get_centralized_fip_cidr_set(self):
"""Returns the fip_cidr set for centralized floatingips."""
ex_gw_port = self.get_ex_gw_port()
# Don't look for centralized FIP cidrs if gw_port not exists or
# this is not snat host
if (not ex_gw_port or not self._is_this_snat_host() or
not self.snat_namespace.exists()):
return set()
interface_name = self.get_snat_external_device_interface_name(
ex_gw_port)
return set([addr['cidr'] for addr in ip_lib.get_devices_with_ip(
self.snat_namespace.name,
name=interface_name)])
def get_router_cidrs(self, device):
"""Over-ride the get_router_cidrs function to return the list.
This function is overridden to provide the complete list of
floating_ip cidrs that the router hosts.
This includes the centralized floatingip cidr list and the
regular floatingip cidr list that are bound to fip namespace.
"""
fip_cidrs = super(DvrEdgeRouter, self).get_router_cidrs(device)
centralized_cidrs = self.get_centralized_fip_cidr_set()
return fip_cidrs | centralized_cidrs
def remove_centralized_floatingip(self, fip_cidr):
"""Function to handle the centralized Floatingip remove."""
if not self.get_ex_gw_port():
return
if not self._is_this_snat_host():
return
interface_name = self.get_snat_external_device_interface_name(
self.get_ex_gw_port())
device = ip_lib.IPDevice(
interface_name, namespace=self.snat_namespace.name)
device.delete_addr_and_conntrack_state(fip_cidr)
self.process_floating_ip_nat_rules_for_centralized_floatingip()
def add_centralized_floatingip(self, fip, fip_cidr):
"""Function to handle the centralized Floatingip addition."""
if not self.get_ex_gw_port():
return
if not self._is_this_snat_host():
return
interface_name = self.get_snat_external_device_interface_name(
self.get_ex_gw_port())
try:
ip_lib.add_ip_address(fip_cidr, interface_name,
namespace=self.snat_namespace.name)
except ip_lib.IpAddressAlreadyExists:
pass
except RuntimeError:
LOG.warning("Unable to configure IP address for centralized "
"floating IP: %s", fip['id'])
return lib_constants.FLOATINGIP_STATUS_ERROR
self.process_floating_ip_nat_rules_for_centralized_floatingip()
# Send a GARP message on the external interface for the
# centralized floatingip configured.
ip_lib.send_ip_addr_adv_notif(self.snat_namespace.name,
interface_name,
fip['floating_ip_address'])
return lib_constants.FLOATINGIP_STATUS_ACTIVE
def _centralized_floating_forward_rules(self, floating_ip, fixed_ip):
to_source = '-s %s/32 -j SNAT --to-source %s' % (fixed_ip, floating_ip)
if self.snat_iptables_manager.random_fully:
to_source += ' --random-fully'
return [('PREROUTING', '-d %s/32 -j DNAT --to-destination %s' %
(floating_ip, fixed_ip)),
('OUTPUT', '-d %s/32 -j DNAT --to-destination %s' %
(floating_ip, fixed_ip)),
('float-snat', to_source)]
def _set_floating_ip_nat_rules_for_centralized_floatingip(self, fip):
if fip.get(lib_constants.DVR_SNAT_BOUND):
fixed = fip['fixed_ip_address']
fip_ip = fip['floating_ip_address']
for chain, rule in self._centralized_floating_forward_rules(
fip_ip, fixed):
self.snat_iptables_manager.ipv4['nat'].add_rule(
chain, rule, tag='floating_ip')
def process_floating_ip_nat_rules_for_centralized_floatingip(self):
self.snat_iptables_manager.ipv4['nat'].clear_rules_by_tag(
'floating_ip')
floating_ips = self.get_floating_ips()
for fip in floating_ips:
self._set_floating_ip_nat_rules_for_centralized_floatingip(fip)
self.snat_iptables_manager.apply()
def process_floating_ip_nat_rules(self):
if self._is_this_snat_host():
if not self.snat_iptables_manager:
LOG.debug("DVR router: no snat rules to be handled")
else:
self.process_floating_ip_nat_rules_for_centralized_floatingip()
# Cover mixed dvr_snat and compute node, aka a dvr_snat node has both
# centralized and distributed floating IPs.
super(DvrEdgeRouter, self).process_floating_ip_nat_rules()
|
19,897 | fe652fa47c883b3f550ff4f5dc87a3c2ddfd298e | from numlib import get_next_prime, is_prime
from utilities import memoize
def compute_prime_sum(n, min_prime):
if n <= 1:
return 0
elif n == 2:
return 1
else:
prime = min_prime
count = 0
while (prime*2 <= n):
count += compute_prime_sum(n - prime, prime)
prime = get_next_prime(prime)
if is_prime(n):
count += 1
return count
def main():
n = 50
m = compute_prime_sum(n, 2)
while (m <= 5000):
n += 1
m = compute_prime_sum(n, 2)
print n, m
main() |
19,898 | 332c58b6e5ed595c2ec7f496bf1b618fefe3fe55 | from grundy import Grundy
from ao_star import AOStar
game = Grundy(6)
strategy_engine = AOStar(game)
strategy_engine.search()
# strategy.print_tree()
|
19,899 | 499695509457670700fdb2266e3c018fe982a66e | from django.conf.urls import url
from show_update_list.views import *
app_name="show_update_list"
urlpatterns=[
url(r'^Z2_mini_G4/',Z2_mini_G4.as_view(),name='Z2_mini_G4'),
url(r'^ISV_sales_tool/',ISV_sales_tool.as_view(),name='ISV_sales_tool'),
url(r'^monitor/',Monitor.as_view(),name='monitor'),
url(r'^amo/',AMO.as_view(),name='amo'),
url(r'^pao_inner/',Pao_inner.as_view(),name='pao_inner'),
url(r'^pao_out/',Pao_out.as_view(),name='pao_out'),
url(r'^custom_deploy/',Custom_Install.as_view(),name='custom_deploy'),
url(r'^customadded_service/',Added_Service.as_view(),name='customadded_service'),
url(r'^register/',Register.as_view(),name='register'),
url(r'^login/',Login.as_view(),name='login'),
url(r'^logout/',Logout.as_view(),name='logout'),
url(r'^edit/',Edit.as_view(),name='edit'),
url(r'^z_save/',Z_Save.as_view(),name='z_save'),
url(r'^monitor_edit/',Monitor_Edit.as_view(),name='monitor_edit'),
url(r'^monitor_save/',Monitor_Save.as_view(),name='monitor_save'),
url(r'^monitor_del/',Monitor_Del.as_view(),name='monitor_del'),
url(r'^amo_edit/',Amo_Edit.as_view(),name='amo_edit'),
url(r'^amo_save/',Amo_Save.as_view(),name='amo_save'),
url(r'^amo_del/',AMO_Del.as_view(),name='amo_del'),
url(r'^pao_inner_edit/',Pao_inner_Edit.as_view(),name='pao_inner_edit'),
url(r'^pao_inner_save/',Pao_inner_Save.as_view(),name='pao_inner_save'),
url(r'^pao_inner_del/',Pao_Inner_Del.as_view(),name='pao_inner_del'),
url(r'^cds_work_edit/',CDS_Work_Edit.as_view(),name='cds_work_edit'),
url(r'^cds_work_save/',CDS_Work_Save.as_view(),name='cds_work_save'),
url(r'^cds_work_del/',CDS_Work_Del.as_view(),name='cds_work_del'),
url(r'^cds_install_deit/',CDS_Install_Edit.as_view(),name='cds_install_deit'),
url(r'^cds_install_save/',CDS_Install_Save.as_view(),name='cds_install_save'),
url(r'^cds_install_del/',CDS_Install_Del.as_view(),name='cds_install_del'),
url(r'^pps_edit/',PPS_Edit.as_view(),name='pps_edit'),
url(r'^pps_save/',PPS_Save.as_view(),name='pps_save'),
url(r'^pps_del/',PPS_Del.as_view(),name='pps_del'),
url(r'^add_sof_save/',Additional_software_Save.as_view(),name='add_sof_save'),
url(r'^add_sof_edit/',Additional_software_Edit.as_view(),name='add_sof_edit'),
url(r'^add_sof_del/',Additional_software_Edit.as_view(),name='add_sof_del'),
url(r'^admin_auth/',Admin.as_view(),name='admin_auth'),
url(r'^add_z_category/',Add_Z_category.as_view(),name='add_z_category'),
url(r'^add_other_category/',Add_Other_category.as_view(),name='add_other_category'),
url(r'^del_z_category/',Del_Z_category.as_view(),name='del_z_category'),
url(r'^del_other_category/',Del_other_category.as_view(),name='del_other_category'),
url(r'^admin_z_product/',Admin_Z_Product.as_view(),name='admin_z_product'),
url(r'^admin_z_product_del/',Admin_Z_Product_del.as_view(),name='admin_z_product_del'),
url(r'^admin_other_category/',Admin_Other_Category.as_view(),name="admin_monitor_category"),
url(r'^admin_minitor_add/',Admin_Monitor.as_view(),name='admin_minitor_add'),
url(r'^admin_amo_add/',Admin_AMO.as_view(),name='admin_amo_add'),
url(r'^admin_pao_add/',Admin_Pao.as_view(),name='admin_pao_add'),
url(r'^admin_custom_made_install_add/',Admin_custom_Made_install.as_view(),name='admin_custom_made_install_add'),
url(r'^admin_work_service_add/',Admin_Work_Service.as_view(),name='admin_work_service_add'),
url(r'^admin_additional_software_add/',Admin_Additional_software.as_view(),name='admin_additional_software_add'),
url(r'^admin_PPS_CarePack_List_Price_add/',Admin_PPS_CarePack_List_Price.as_view(),name='admin_PPS_CarePack_List_Price_add'),
url(r'^save_user_rules/',saveUserRules.as_view(),name='save_user_rules'),
url(r'^new_user_rules/',saveUserRules2.as_view(),name='new_user_rules'),
url(r'^user_label_show/',showUserLabel.as_view(),name='user_label_show'),
url(r'^del_user_label/',DelUserLabel.as_view(),name='del_user_label'),
url(r'^user_rules_show/',showUserRules.as_view(),name='user_rules_show'),
url(r'^users/',Users.as_view(),name='users'),
url(r'^disconut/',ShowDiscount.as_view(),name='disconut'),
url(r'^add_disconut/',AddDiscount.as_view(),name='add_disconut'),
url(r'^user_disconut/',User_disccount.as_view(),name='user_disconut'),
url(r'^user_edit/',Vip_edit.as_view(),name='user_edit'),
url(r'^test/',Test.as_view(),name='test'),
url(r'^test2/',Test2.as_view(),name='test2'),
url(r'^show_category/',ShowCategory.as_view(),name='show_category'),
url(r'^show_assemble_step/',ShowAssembleStep.as_view(),name='show_assemble_step'),
url(r'^add_assemble_step/',AddAssembleStep.as_view(),name='add_assemble_step'),
url(r'^del_assemble_step/',DelAssembleStep.as_view(),name='del_assemble_step'),
url(r'^show_assemble_pro/',ShowAssemblePdoduct.as_view(),name='show_assemble_pro'),
url(r'^add_limit/',AddLimit.as_view(),name='add_limit'),
url(r'^del_limit/',DelLimit.as_view(),name='del_limit'),
url(r'^show_monitor_pro/',ShowMonitorPro.as_view(),name='show_monitor_pro'),
url(r'^add_monitor_type/',AddMonitorType.as_view(),name='add_monitor_type'),
url(r'^del_monitor_type/',DelMonitorType.as_view(),name='del_monitor_type'),
url(r'^monitor_pro/',DelLimit.as_view(),name='monitor_pro'),
url(r'^amo_step/',DelLimit.as_view(),name='amo_step'),
url(r'^amo_pro/',DelLimit.as_view(),name='amo_pro'),
url(r'^show_amo_pro/',ShowAmoPro.as_view(),name='show_amo_pro'),
url(r'^add_amo_step/',AddAmoStep.as_view(),name='add_amo_step'),
url(r'^del_amo_step/',DelAmoStep.as_view(),name='del_amo_step'),
url(r'^pdf/',PDF.as_view(),name='pdf'),
url(r'^index_save_user/',IndexSaveRules.as_view(),name='index_save_user'),
url(r'^index_export_excel/',IndexExportExcel.as_view(),name='index_export_excel'),
url(r'',Index.as_view(),name='index'),
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.