text stringlengths 38 1.54M |
|---|
import unittest
from bricklane_platform.models.payments import Payment, CardPayment, BankPayment
from ..fixture import get_path
from bricklane_platform.services.payment_processor import PaymentProcessor
def create_stub_payment(mock_is_successful):
payment = CardPayment()
payment.is_successful = lambda: mock_is_successful
return payment
class TestPaymentProcessor(unittest.TestCase):
def setUp(self):
self.payment_processor = PaymentProcessor()
def test_get_payments(self):
fixture = get_path("card_payments_mixed.csv")
payments = self.payment_processor.get_payments(fixture, "card")
self.assertEqual(len(payments), 3)
self.assertEqual(payments[0].card.card_id, 30)
self.assertEqual(payments[1].card.card_id, 45)
self.assertEqual(payments[2].card.card_id, 10)
def test_get_payments_empty(self):
fixture = get_path("card_payments_empty.csv")
payments = self.payment_processor.get_payments(fixture, "card")
self.assertEqual(len(payments), 0)
def test_verify_payments(self):
payment1 = create_stub_payment(mock_is_successful=True)
payment2 = create_stub_payment(mock_is_successful=False)
payment3 = create_stub_payment(mock_is_successful=True)
result = self.payment_processor.verify_payments([payment1, payment2, payment3])
self.assertEqual(result, [payment1, payment3])
def test_get_available_card_handler(self):
"""Test getting a payment handler that is implemented"""
payment_handler_class = self.payment_processor.payment_handler.create_instance("card")
self.assertEqual(payment_handler_class, CardPayment)
def test_get_available_bank_handler(self):
"""Test getting a payment handler that is implemented"""
payment_handler_class = self.payment_processor.payment_handler.create_instance("bank")
self.assertEqual(payment_handler_class, BankPayment)
|
import bs4, requests, pyperclip
url = pyperclip.paste()
productData = []
def geteMAGData(productUrl):
res = requests.get(productUrl)
try :
res.raise_for_status()
except requests.exceptions.HTTPError:
print('Service Unavailable from URL: ' + productUrl + '\n\n')
soup = bs4.BeautifulSoup(res.text, 'html.parser')
productName = soup.select('head > title')
productName = productName[0].text.strip().replace(' - eMAG.ro', '', 1)
pr = soup.select('''#page-skin > div.container > div > div:nth-of-type(2) >
div.col-sm-5.col-md-7.col-lg-7 > div > div > div.col-sm-12.col-md-6.col-lg-5 >
form > div.product-highlight.product-page-pricing > p.product-new-price''')
strPrice = pr[0].text.strip().replace(' Lei', '')
strPrice = strPrice.replace('.', '', 1)
try :
price = float(strPrice) /100
price = str(price) + ' Lei'
dictObj = {'name': productName, 'price': price}
productData.append(dictObj)
except IndexError:
print('Something went wrong')
geteMAGData(url)
for item in productData:
print('Name of product: ' + item['name'])
print('Price of product: ' + item['price'])
|
#Amy Doan ID:1895125
# When inputing month, it must be spelled correctly to avoid errors
month_list = {"January" : "1", "February" : "2", "March" : "3", "April" : "4", "May" : "5", "June" : "6", "July" : "7", "August" : "8", "September" : "9", "October" : "10", "November" : "11", "December" : "12"} # List of the month
dates = ""
date_list =[]
with open("inputDates.txt","r") as file: # Open file
date_list = file.read().splitlines() # Read each line
for dates in date_list: #Loop dates from the date_list
date_parse = dates.find(',') #Finding dates that contains ,
date_period = dates.find('.') #Finding dates with period
if date_parse != -1 and date_period == -1: # True if there is a comma and no period
dates = dates.split() # Splits into 3 part
dates[1] = dates[1].replace(',','/') #replacing comma with /
dates = month_list[dates[0]] + '/' + dates[1] + dates[2] # Adding month, day and yea with /
print(dates, file=open("parsedDates.txt","a")) # Output into a file
file.close() |
from django.conf.urls import url
from . import views
urlpatterns = [
# Analysis App
url(r'^network/(?P<group_pk>\d+)/$',
views.group_network, name='group-network'),
# Longitudinal user F in Group
url(r'^analysis/group/(?P<group_pk>\d+)/user/(?P<user_pk>\d+)/$',
views.analysis_group_user, name='analysis-group-user'),
url(r'^analysis/group/(?P<group_pk>\d+)/user/$',
views.analysis_group_user, name='analysis-group-user'),
# Longitudinal Group F Avg
url(r'^analysis/group/(?P<group_pk>\d+)/$',
views.analysis_group, name='analysis-group'),
# Sitewide
url(r'mark2cure/stats/',
views.mark2cure_stats, name='mark2cure-stats-api'),
# Tasks
url(r'task/stats/',
views.user_task_stats, name='task-stats-api'),
url(r'ner/stats/',
views.ner_stats, name='ner-stats-api'),
url(r're/stats/',
views.re_stats, name='re-stats-api'),
# - [NER] (TODO) Move into Task
url(r'ner/(?P<document_pk>\d+)/$',
views.ner_document, name='ner_document'),
url(r'ner/quest/(?P<quest_pk>\d+)/$',
views.ner_quest_read, name='ner-quest-read-api'),
# - [RE]
# - [Dashboard] Named Entity Recognition (NER)
url(r'^ner/list/(?P<group_pk>\d+)/contributors/$',
views.ner_list_item_contributors, name='ner-quest-contributors-api'),
url(r'^ner/list/(?P<group_pk>\d+)/quests/$',
views.ner_list_item_quests, name='ner-quest-api'),
url(r'^ner/list/(?P<group_pk>\d+)/$',
views.ner_list_item, name='ner-group-api'),
url(r'^ner/list/$',
views.ner_list, name='ner-list-api'),
# - [Dashboard] Relationship Extraction (RE)
url(r're/list',
views.re_list, name='re-list-api'),
# - [Dashboard] User Scoreboard
url(r'^leaderboard/users/(?P<day_window>\d+)/$',
views.leaderboard_users, name='leaderboard-users'),
url(r'^leaderboard/teams/(?P<day_window>\d+)/$',
views.leaderboard_teams, name='leaderboard-teams'),
# - [Training]
url(r'^training/$',
views.training, name='training'),
url(r'^training/(?P<task_type>\w+)/$',
views.training_details, name='training-details'),
]
|
import timeit
from timeit import default_timer as timer
from torchvision.utils import save_image
import torch
metrics = []
class Test:
def __init__(self, model, data_loader, criterion, metric, device):
self.model = model
self.data_loader = data_loader
self.criterion = criterion
self.metric = metric
self.device = device
def run_epoch(self, iteration_loss=False):
self.model.eval()
epoch_loss = 0.0
self.metric.reset()
for step, batch_data in enumerate(self.data_loader):
# Get the inputs and labels
start1 = timer()
inputs = batch_data[0]#.to(self.device)
end1 = timer()
labels = batch_data[1]#.to(self.device)
with torch.no_grad():
# Forward propagation
start2 = timer()
outputs = self.model(inputs)
end2 = timer()
print("each batch takes:")
print(end1 + end2 - start1 - start2)
# Loss computation
loss = self.criterion(outputs, labels)
# Keep track of loss for current epoch
epoch_loss += loss.item()
# Keep track of evaluation the metric
self.metric.add(outputs.detach(), labels.detach())
metrics.append(self.metric.value())
if iteration_loss:
print("[Step: %d] Iteration loss: %.4f" % (step, loss.item()))
return epoch_loss / len(self.data_loader), self.metric.value() |
import os
import sys
import string
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from keras.models import Model
from keras.layers import Dense, Embedding, Input, LSTM
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
from keras.optimizers import Adam, SGD
'''
download the word vectors : http://nlp.stanford.edu/data/glove.6b.zip
'''
#Some configuration
MAX_SEQUENCE_LENGTH = 100
MAX_VOCAB_SIZE = 3000
EMBEDDING_DIM = 50
VALIDATION_SPLIT = 0.2
BATCH_SIZE = 128
EPOCHS = 100
LATENT_DIM = 25
# LOAD IN DATA
input_texts = []
target_texts = []
for line in open("/poetry.txt"):
line = line.rstrip()
if not line:
continue
input_line = '<sos> ' + line
target_line = line + ' <eos>'
input_texts.append(input_line)
target_texts.append(target_line)
all_lines = input_texts + target_texts
# Convert the sentences (string) into integers
tokenizer = Tokenizer(num_words=MAX_VOCAB_SIZE, filters='')
tokenizer.fit_on_texts(all_lines)
input_sequnces = tokenizer.texts_to_sequences(input_texts)
target_sequnces = tokenizer.texts_to_sequences(target_texts)
#find max seq len
max_sequence_length_from_data = max(len(s) for s in input_sequnces)
print("max sequence length: ", max_sequence_length_from_data)
#get word -> integer mapping
word2idx = tokenizer.word_index
print('Found %s unique tokens.'%len(word2idx))
assert('<sos>' in word2idx)
assert('<eos>' in word2idx)
# Pad sequences so that we get N x T matrix
max_sequence_length = min(max_sequence_length_from_data,MAX_SEQUENCE_LENGTH)
input_sequnces = pad_sequences(input_sequnces, maxlen=max_sequence_length,padding='post')
target_sequnces = pad_sequences(target_sequnces, maxlen=max_sequence_length,padding='post')
#load in pre-trained word vectors
print('Loading word vectors...')
word2vec = {}
with open(os.path.join('/glove.6B.%sd.txt' %EMBEDDING_DIM)) as f:
# is just a space-seperated text file in the format:
# word vec[0] vec[1] vec[2]....
for line in f:
values = line.split()
word = values[0]
vec = np.asarray(values[1:], dtype='float32')
word2vec[word] = vec
print('Foundc %s word vectors.'%len(word2vec))
#preparing embedding matrix
print("Filling pre-trained embeddings...")
num_words = min(MAX_VOCAB_SIZE, len(word2idx)+1)
embedding_matrix = np.zeros((num_words, EMBEDDING_DIM))
for word, i in word2idx.items():
if i < MAX_VOCAB_SIZE:
embedding_vector = word2vec.get(word)
if embedding_vector is not None:
# words not found in embedding index will be all zeros
embedding_matrix[i] = embedding_vector
# one hot the targets (cant use sparse cross-entropy)
one_hot_targets = np.zeros((len(input_sequnces), max_sequence_length, num_words))
for i, target_sequnce in enumerate(target_sequnces):
for t, word in enumerate(target_sequnce):
if word > 0:
one_hot_targets[i,t,word] = 1
# load pre-trained word embeddings into an embdeding layer
# note that we set trainable = False so as to keep the embeddings fixed
embdeding_layer = Embedding(num_words, EMBEDDING_DIM,
weights = [embedding_matrix],
trainable = False)
print("Building Model....")
#create an LSTM network with a single LSTM
input_ = Input(shape = (max_sequence_length,))
initial_h = Input(shape = (LATENT_DIM,))
initial_c = Input(shape = (LATENT_DIM,))
x = embdeding_layer(input_)
lstm = LSTM(LATENT_DIM, return_state=True, return_sequences=True)
x,_,_ = lstm(x, initial_state=[initial_h,initial_c],) # dont need the states here
dense = Dense(num_words, activation='softmax')
output_ = dense(x)
model = Model([input_, initial_h, initial_c], output_)
model.compile(loss='categorical_crossentropy',
optimizer=Adam(lr=0.01),
metrics=['accuracy'])
print(model.summary())
print("training model...")
z = np.zeros((len(input_sequnces), LATENT_DIM))
r = model.fit([input_sequnces, z, z], one_hot_targets,
batch_size=BATCH_SIZE,epochs=EPOCHS,
validation_split=VALIDATION_SPLIT)
# making a sampling model
input2 = Input(shape=(1,)) # we will only input one word at a time
x = embdeding_layer(input2)
x, h, c = lstm(x, initial_state=[initial_h,initial_c]) # now we need states to feed in
output2 = dense(x)
sampling_model = Model([input2, initial_h, initial_c], [output2, h, c])
#reverse word2idx dictionary to get back words
# during prediction
idx2word = {v:k for k, v in word2idx.items()}
def sample_line():
# initial inputs
np_input = np.array([[ word2idx['<sos>'] ]])
h = np.zeros((1, LATENT_DIM))
c = np.zeros((1, LATENT_DIM))
#so we know when to quit
eos = word2idx['<eos>']
# store the output sentence here
output_sentence = []
for _ in range(max_sequence_length):
o, h, c = sampling_model.predict([np_input,h, c])
#print("o.shape: ", o.shape, o[0,0,:10])
#idx = np.argmax(o[0,0])
probs = o[0,0]
if np.argmax(probs) == 0:
print("wtf")
probs[0] = 0
probs /= probs.sum()
idx = np.random.choice(len(probs), p=probs)
if idx == eos:
break
#acculate output
output_sentence.append(idx2word.get(idx, '<WTF %d>' % idx))
#make the next input into model
np_input[0,0] = idx
return ' '.join(output_sentence)
# generate a 4 line poem
while True:
for _ in range(4):
print(sample_line())
ans = input("-----generate another? [y/n]------")
if ans and ans[0].lower().startswith('n'):
break
|
"""
******************************************************************************
* Purpose: Program To Implement DQueue to check Palindrome Checker.
*
* @author: Manjunath Mugali
* @version: 3.7
* @since: 23-01-2019
*
*******************************************************************************
"""
import re
from builtins import ValueError
from Utility_DataStructure.TestDQueue import TestDQueue
l1 = TestDQueue()
class DQueue:
try:
print("Enter the String :")
str2 = input() # read string from user
str2.strip()
while str2.isdigit() or len(str2) <= 0:
print("Please Provide Input as String")
str2 = input()
str1 = re.sub('[^A-Za-z]', '', str2)
for i in range(len(str1)):
l1.enQueueRear(str1[i]) # push front one one character in DQueue
print()
print()
print("Removing Elements From Rear:")
str2 = ""
for i in range(len(str1)):
str2 = str2 + l1.removeFromFront() # remove from Rear one one by character from DQueue
if str2.upper() == str1.upper(): # check string is Palindrome or not
print("String is palindrome")
else:
print("Not Palindrome")
except ValueError:
print("----------------------Oops Something Went Wrong----------------------")
|
N=int(input()) #x ranges from 1 to N
#print(num)
k=300
i=1
max=N
min=0
for i in range (0,k):
if i==0:
y=1
print(y)
character=input()
if(character=='L'):
s='evenlie'
else:
s='oddlie'
elif s=='oddlie':
y=int((max+min)/2)
print(y)
character=input()
if(character=='E'):
break
elif(character=='G' and i%2==0):
min=y
elif(character=='L' and i%2==0):
max=y
elif s=='evenlie':
y=int((max+min)/2)
print(y)
character=input()
if(character=='E'):
break
elif(character=='G' and i%2!=0):
min=y
elif(character=='L' and i%2!=0):
max=y
#print(y,"ans")
|
import FWCore.ParameterSet.Config as cms
from DQMServices.Core.DQMEDAnalyzer import DQMEDAnalyzer
l1tcsctpg = DQMEDAnalyzer('L1TCSCTPG',
disableROOToutput = cms.untracked.bool(True),
csctpgSource = cms.InputTag("muonCSCDigis","MuonCSCCorrelatedLCTDigi"),
verbose = cms.untracked.bool(False),
DQMStore = cms.untracked.bool(True)
)
|
import tweepy
def authenticate_user():
try:
consumer_key="hhYq78kZ6VkAp4Q4pXzuKCOkA"
consumer_secret="A468W3FnFd9WcL2PXYeRO0iLWnu90761HkKHijXRXqmgtR1bpk"
access_token="374363728-BCe1rusHWiVPBHDCQ5RoketbfaNePuHXTeJja7W6"
access_token_secret="cMyNaQwVzxtXKqSG0jjlI1H6avEoMbvZ36pB7Zr5dPEN0"
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.secure = True
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
return api
except:
return None
def authenticate_test_app():
try:
consumer_key="RY4A89PLnuPKx8hIQRsGaQC0y"
consumer_secret="iRO1tw96GuiCnb92WFg7Tmo60HgUgY7xHPLxsfMK6e4OpcqfPN"
access_token="374363728-Vvr26iYvkW0HqLeDi4NMAe6ZmLwb8XCehaAEi7aH"
access_token_secret="Q5qbv5S4ugYO7aiDI8D8IOnpIwU96r6aBZJdfLaxwgOI6"
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.secure = True
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
return api
except:
return None
def authenticate_api_athlete():
try:
consumer_key="prpZDRZO2wMFw4l2Dy4tOoqWH"
consumer_secret="O6P84x182pjZbIx4ENqFmCeeFUXu3x6u0enG3ItMNY8AYPkIEn"
access_token="374363728-Vb3rn6HaVn0jlgHzTmiuU1HxxouDbp6JQtWFnm1V"
access_token_secret="ZgloLWGdOfXADOYao2zLfJ2QxrAjiDzTygeBpoT2bdAPU"
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.secure = True
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
return api
except:
return None
def authenticate_api_frank():
try:
consumer_key="IzxpLmPU7bcH6mkoEPhrDgCRz"
consumer_secret="vqo6m3JU1TKRhSGwDHg8IxuvOZ4IGiiXdFxn8kgwuRTb5ijogZ"
access_token="100311379-VJOuaxitBtDCq2WK11HXo3ROQSzDME7Ut1vL3AoI"
access_token_secret="Ga3VCILMJkBd898Ewd3excvzicDPpepiRKvX0z9YMLM1q"
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.secure = True
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
return api
except:
return None
def autheticate_api_frank_1():
try:
consumer_key="nM0kCPyrB06NA1EGH8kaQ"
consumer_secret="hUaKXcXaVnnYQMpG46MkQaWEZW8uKCy8kU5piBY8R0"
access_token="100311379-N9n7YrjaCYgdIBAe8d1jv90NkbcLL6TF9HQOZUJ5"
access_token_secret="DiUsftqZiEXMZoNDSrxLzKdm9oCEcy1rSwMrXkHIQK7Nb"
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.secure = True
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
return api
except:
return None
def authenticate_test_stream():
try:
consumer_key="wwk0TZueG6EcXn5Qj72ZaeiaD"
consumer_secret="AVRBLJu1al3XeQU7FC3yuAYNYiR3kSY8zkpxjiU3wQem75fdim"
access_token="374363728-jTdIw2mWdJLXyD5oF3hkVnMXzyT4zlVckF1p5uTU"
access_token_secret="iSmgkZFAiYrebXTu2JOoUYFnCtosrUCw8i6IcW19Awe7m"
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
#auth.secure = True
auth.set_access_token(access_token, access_token_secret)
#api = tweepy.API(auth)
return auth
except:
return None
|
def apply(L, f):
"""
Applies function given by f to each element in L
Parameters
----------
L : list containing the operands
f : the function
Returns
-------
result: resulting list
"""
result = []
for i in range(len(L)):
result.append(f(L[i]))
return result
L = [1, -2, -5, 6.2]
print apply(L, abs) # [1, 2, 5, 6.2]
# abs is applied on elements passed in L
print apply(L, int) # [1, -2, -5, 6]
def sqr(n):
return n**2
for i in map(sqr,[1, -2, -5, 6.2]):
print(i) |
#!/usr/bin/env python
import numpy as np
import matplotlib.pyplot as plt
# Make sure that caffe is on the python path:
caffe_root = '/work/personal/caffe/' # this file is expected to be in {caffe_root}/examples
import sys
sys.path.insert(0, caffe_root + 'python')
import caffe
def vis_square(data):
"""Take an array of shape (n, height, width) or (n, height, width, 3)
and visualize each (height, width) thing in a grid of size approx. sqrt(n) by sqrt(n)"""
# normalize data for display
data = (data - data.min()) / (data.max() - data.min())
# force the number of filters to be square
n = int(np.ceil(np.sqrt(data.shape[0])))
padding = (((0, n ** 2 - data.shape[0]),
(0, 1), (0, 1)) # add some space between filters
+ ((0, 0),) * (data.ndim - 3)) # don't pad the last dimension (if there is one)
data = np.pad(data, padding, mode='constant', constant_values=1) # pad with ones (white)
# tile the filters into an image
data = data.reshape((n, n) + data.shape[1:]).transpose((0, 2, 1, 3) + tuple(range(4, data.ndim + 1)))
data = data.reshape((n * data.shape[1], n * data.shape[3]) + data.shape[4:])
plt.imshow(data); plt.axis('off')
plt.savefig('visualize-filters.png')
def main():
import pdb; pdb.set_trace()
net = caffe.Net('test.prototxt',
'models/scratch_iter_90000.caffemodel',
caffe.TEST)
filters = net.params['conv1'][0].data
vis_square(filters.transpose(0, 2, 3, 1))
if __name__ == "__main__":
main()
|
import os
from app import app, db, lm
from flask import Flask, render_template, request, flash, session, redirect, url_for, send_from_directory, g
from forms import ContactForm, SignupForm, SigninForm
from flask.ext.mail import Message, Mail
from flask.ext.login import current_user
from models import db, User
import facebook
from flask_oauth import OAuth
from werkzeug import secure_filename
import urllib
mail = Mail()
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
@app.before_request
def before_request():
g.user=current_user
if g.user.is_authenticated():
db.session.add(g.user)
db.session.commit()
@app.route('/')
def home():
return render_template('home.html')
@app.route('/about')
def about():
return render_template('about.html')
@app.route('/contact', methods=['GET', 'POST'])
def contact():
form = ContactForm()
if request.method == 'POST':
if form.validate() == False:
flash ('All fields are required')
return render_template ('contact.html', form=form)
else:
msg = Message(form.subject.data, sender='someone@example.com', recipients=['your@youremail.com'])
msg.body = """
%s <%s>
%s
""" %(form.subject.data, form.email.data, form.message.data)
mail.send(msg)
return render_template('contact.html', success=True)
elif request.method == 'GET':
return render_template('contact.html', form=form)
@app.route('/favicon.ico')
def favicon():
return send_from_directory(os.path.join(app.root_path, 'static'), 'ico/favicon.ico')
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
@app.route('/')
def index():
return render_template('index.html')
@app.route('/signup', methods = ['GET', 'POST'])
def signup():
form = SignupForm()
if 'email' in session:
return redirect(url_for('profile'))
if request.method == 'POST':
if form.validate()== False:
return render_template('signup.html', form=form)
else:
newuser = User(form.firstname.data, form.lastname.data, form.email.data, form.password.data)
db.session.add(newuser)
db.session.commit()
session['email'] = newuser.email
return redirect(url_for('profile'))
elif request.method == 'GET':
return render_template('signup.html', form=form)
@app.route('/signin', methods=['GET', 'POST'])
def signin():
form = SigninForm()
if 'email' in session:
return redirect(url_for('profile'))
if request.method == 'POST':
if form.validate() == False:
return render_template('signin.html', form=form)
else:
session['email'] = form.email.data
return redirect(url_for('profile'))
elif request.method == 'GET':
return render_template('signin.html', form = form)
@app.route('/signout')
def signout():
if 'email' not in session:
pop_login_session()
return redirect(url_for('signin'))
session.pop('email', None)
pop_login_session()
return redirect(url_for('home'))
# Facebook Authentication
FACEBOOK_APP_ID = '691001537654739'
FACEBOOK_APP_KEY = '5db41d64579acb1840d8c3f26476c9ca'
oauth = OAuth()
fb = oauth.remote_app('facebook',
base_url='https://graph.facebook.com/',
request_token_url=None,
access_token_url='/oauth/access_token',
authorize_url='https://www.facebook.com/dialog/oauth',
consumer_key=FACEBOOK_APP_ID,
consumer_secret=FACEBOOK_APP_KEY,
request_token_params={'scope': ('email, ')}
)
@fb.tokengetter
def get_facebook_token():
return session.get('facebook_token')
def pop_login_session():
session.pop('logged_in', None)
session.pop('facebook_token', None)
@app.route("/facebook_login")
def facebook_login():
return fb.authorize(callback=url_for('facebook_authorized',
next=request.args.get('next'), _external=True))
@app.route("/facebook_authorized")
@fb.authorized_handler
def facebook_authorized(resp):
next_url = request.args.get('next') or url_for('index')
if resp is None or 'access_token' not in resp:
return redirect(next_url)
elif 'email' not in session:
fb_access_token = resp['access_token']
graph = facebook.GraphAPI(fb_access_token)
fb_details = graph.get_object('me')
fb_photo = graph.get_object('me/picture')
# print fb_photo
firstname = fb_details['first_name']
lastname = fb_details['last_name']
email = fb_details['email']
id = fb_details['id']
user = User(firstname, lastname, email, id)
db.session.add(user)
db.session.commit()
session['email'] = user.email
session['logged_in'] = True
session['facebook_token'] = (resp['access_token'], '')
return redirect(next_url)
# Profile
@app.route('/profile')
def profile():
if 'email' not in session:
return redirect(url_for('signup'))
user = User.query.filter_by(email = session['email']).first()
if user is None:
return redirect(url_for('signin'))
else:
user = g.user
posts = [ #fake post
{
'author': {'firstname' : 'John'},
'body' : 'Beautiful day'
}
]
return render_template('profile.html',
title = 'Profile',
user = user,
posts = posts,
filename = 'profile.png')
# Database Testing
""" @app.route('/testdb')
def testdb():
if db.session.query("1").from_statement("SELECT 1").all():
return 'It works'
else:
return 'Something is broken.' """
# Uploads
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS']
def photo_file(filename):
filename = 'profile.png'
return filename
@app.route('/upload', methods=['GET','POST'])
def upload():
if request.method == 'POST':
file = request.files['file']
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
filename = photo_file(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return redirect(url_for('send_file', filename = filename))
return redirect(url_for('profile'))
# @app.route('/show/<filename>')
# def uploaded_file(filename):
# filename = '/Users/akhilaryan/developer/newflaskapp/app/uploads/' + filename
# return render_template('profile.html', filename=filename)
@app.route('/upload/<filename>')
def send_file(filename):
print app.config['UPLOAD_FOLDER']
return render_template('profile.html', filename='profile.png')
# return send_from_directory(app.config['UPLOAD_FOLDER'], filename)
|
import pygame
from pygame.locals import *
import sys
class Display():
def __init__(self, wait):
self.screen = pygame.display.set_mode((700, 502))
self.clock = pygame.time.Clock()
self.wait = wait
def print(self, array, highlights):
self.screen.fill((0, 0, 0))
bar_size = (700 / len(array)) / 1.4
space = 0.4 * bar_size
x = space // 2
for i, val in enumerate(array):
y = round((500 * val) / len(array))
# green bars for current bars. white for all others
color = (0, 255, 0) if i in highlights else (255, 255, 255)
pygame.draw.rect(self.screen, color, pygame.Rect(round(x), 501 - y, round(bar_size), y))
x += bar_size + space
pygame.display.update()
#check if window was closed
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
elif event.type == KEYUP and event.key == K_q:
pygame.quit()
sys.exit()
if self.wait: pygame.time.wait(self.wait)
def draw_completed_array(self, array):
bar_size = (700 / len(array)) / 1.4
space = 0.4 * bar_size
# we have 500 ms for the whole array
wait_time = 500 // len(array)
# this is just because the switching looks ugly otherwise
# because the last switched indexes are still green
self.print(array, ())
x = space // 2
for i, val in enumerate(array):
y = round((500 * val) / len(array))
pygame.draw.rect(self.screen, (0, 255, 0), pygame.Rect(round(x), 501 - y, round(bar_size), y))
x += bar_size + space
pygame.time.wait(wait_time)
pygame.display.update()
|
from sacrerouge.metrics.decomposed_rouge.categorizers.categorizer import Categorizer, TupleCategorizer
from sacrerouge.metrics.decomposed_rouge.categorizers.dep import DependencyCategorizer, DependencyVerbRelationsCategorizer
from sacrerouge.metrics.decomposed_rouge.categorizers.ner import NERCategorizer
from sacrerouge.metrics.decomposed_rouge.categorizers.np_chunks import NPChunkCategorizer
from sacrerouge.metrics.decomposed_rouge.categorizers.pos import POSCategorizer
from sacrerouge.metrics.decomposed_rouge.categorizers.stopwords import StopwordCategorizer
|
import os
import re
import random
import hashlib
import hmac
from string import letters
from operator import is_not
from functools import partial
from random import randint
from time import sleep
from datetime import date
from protorpc import messages
import webapp2
import jinja2
from google.appengine.ext import db
from google.appengine.ext import ndb
template_dir = os.path.join(os.path.dirname(__file__), 'templates')
jinja_env = jinja2.Environment(loader = jinja2.FileSystemLoader(template_dir),
autoescape = True)
secret = open("secret.txt",'r').read()
def render_str(template, **params):
t = jinja_env.get_template(template)
return t.render(params)
def make_secure_val(val):
return '%s|%s' % (val, hmac.new(secret, val).hexdigest())
def check_secure_val(secure_val):
val = secure_val.split('|')[0]
if secure_val == make_secure_val(val):
return val
class WebHandler(webapp2.RequestHandler):
def write(self, *a, **kw):
self.response.out.write(*a, **kw)
def render_str(self, template, **params):
params['user'] = self.user
return render_str(template, **params)
def render(self, template, **kw):
self.write(self.render_str(template, **kw))
def set_secure_cookie(self, name, val):
cookie_val = make_secure_val(val)
self.response.headers.add_header(
'Set-Cookie',
'%s=%s; Path=/' % (name, cookie_val))
def read_secure_cookie(self, name):
cookie_val = self.request.cookies.get(name)
return cookie_val and check_secure_val(cookie_val)
def login(self, user):
self.set_secure_cookie('user_id', str(user.key().id()))
def logout(self):
self.response.headers.add_header('Set-Cookie', 'user_id=; Path=/')
def initialize(self, *a, **kw):
webapp2.RequestHandler.initialize(self, *a, **kw)
uid = self.read_secure_cookie('user_id')
self.user = uid and User.by_id(int(uid))
def render_post(response, post):
response.out.write('<b>' + post.subject + '</b><br>')
response.out.write(post.content)
def make_salt(length = 5):
return ''.join(random.choice(letters) for x in xrange(length))
def make_pw_hash(name, pw, salt = None):
if not salt:
salt = make_salt()
h = hashlib.sha256(name + pw + salt).hexdigest()
return '%s,%s' % (salt, h)
def valid_pw(name, password, h):
salt = h.split(',')[0]
return h == make_pw_hash(name, password, salt)
def users_key(group = 'default'):
return db.Key.from_path('users', group)
class User(db.Model):
name = db.StringProperty(required = True)
pw_hash = db.StringProperty(required = True)
online = db.BooleanProperty()
email = db.StringProperty()
def render(self):
return render_str("friend.html", f = self)
def game_render(self):
return render_str("game-friend.html", f = self)
@classmethod
def by_id(cls, uid):
return cls.get_by_id(uid, parent = users_key())
@classmethod
def by_name(cls, name):
u = cls.all().filter('name =', name).get()
return u
@classmethod
def register(cls, name, pw, email = None):
pw_hash = make_pw_hash(name, pw)
return User(parent = users_key(),
name = name,
pw_hash = pw_hash,
email = email)
@classmethod
def login(cls, name, pw):
u = cls.by_name(name)
if u and valid_pw(name, pw, u.pw_hash):
u.online = True
u.put()
return u
@classmethod
def logout(cls, u):
if u:
u.online = False
u.put()
class Friends(db.Model):
from_user = db.ReferenceProperty(User, collection_name = "from_user")
to_user = db.ReferenceProperty(User, collection_name = "to_user")
status = db.BooleanProperty()
def render(self):
return render_str("friend.html", f = self)
@classmethod
def relation(cls, user):
relation = {}
for to in user.from_user:
relation[to.to_user] = to.status
for frm in user.to_user:
relation[frm.from_user] = frm.status
return relation
@classmethod
def friends(cls, user):
relation = Friends.relation(user)
friends = {k: v for k, v in relation.iteritems() if v is True}.keys()
return friends
@classmethod
def pending(cls, user):
relation = Friends.relation(user)
pending = {k: v for k, v in relation.iteritems() if v is False}.keys()
return pending
@classmethod
def online(cls, user):
# related_users = Friends.relation(user).keys()
# online = list(users for users in related_users if users.online is True)
online = list(users for users in Friends.friends(user) if users.online is True)
return sorted(online, key=lambda user: user.name)
@classmethod
def offline(cls, user):
# related_users = Friends.relation(user).keys()
# offline = list(users for users in related_users if users.online is False)
offline = list(users for users in Friends.friends(user) if users.online is False)
return sorted(offline, key=lambda user: user.name)
@classmethod
# Returns all Friends() Class Objects where the user given, is set as the to_user; i.e for whom the invitation is sent
def recieved(cls, user):
pending_users = Friends.pending(user)
recieved = [recieve.from_user.filter("to_user =", user).get() for recieve in pending_users]
return filter(partial(is_not, None), recieved) # Removes Null for when no object was retrieved
@classmethod
# Returns all Friends() Class Objects where the user given, is set as the from_user; i.e who sent the invitation
def sent(cls, user):
pending_users = Friends.pending(user)
sent = [send.to_user.filter("from_user =", user).get() for send in pending_users]
return filter(partial(is_not, None), sent) # Removes Null for when no object was retrieved
@classmethod
def delete(cls):
cls.get.delete()
class MainFront(WebHandler):
def get(self):
self.render('front.html')
class FriendsPage(WebHandler):
def get(self):
if self.user:
recieved = sorted([f.from_user for f in Friends.recieved(self.user)], key=lambda user: user.name)
online = Friends.online(self.user)
offline = Friends.offline(self.user)
sent = sorted([f.to_user for f in Friends.sent(self.user)], key=lambda user: user.name)
# print [rec.from_user.name for rec in recieved]
# print [on.name for on in online]
# print [off.name for off in offline]
# print [snt.to_user.name for snt in sent]
self.render("friends.html", online = online, offline = offline, recieved = recieved, sent = sent)
else:
self.redirect("/login")
class NewFriend(WebHandler):
def get(self):
if self.user:
self.render("newfriend.html")
else:
self.redirect("/login")
def post(self):
if not self.user:
self.redirect('/')
target_input = self.request.get('target')
if target_input:
target_user = User.by_name(target_input)
if target_user:
if target_user.name != self.user.name:
accepted = [user for user in Friends.friends(self.user) if user.name == target_user.name]
recieved = [target for target in Friends.recieved(self.user) if target.to_user.name == self.user.name and target.from_user.name == target_user.name]
sent = [target.to_user for target in Friends.sent(self.user) if target.to_user.name == target_user.name and target.from_user.name == self.user.name]
# print [acc.name for acc in accepted]
# print [rec.from_user.name for rec in recieved]
# print [snt.name for snt in sent]
# return
if accepted:
return self.render("newfriend.html", status="accepted", target=target_input)
elif recieved:
for friendships in recieved:
friendships.status = True
friendships.put()
return self.render("newfriend.html", status="recieved", target=target_input)
elif sent:
return self.render("newfriend.html", status="past_sent", target=target_input)
else:
f = Friends(from_user = self.user, to_user = target_user, status = False)
f.put()
return self.render("newfriend.html", status="new_sent", target=target_input)
else:
error = "Cannot send an invite to be friends with yourself."
self.render("newfriend.html", error=error)
else:
error = "User with the given username, does not exist."
self.render("newfriend.html", target_input=target_input, error=error)
else:
error = "Enter the username, to send an invite to be friends."
self.render("newfriend.html", target_input=target_input, error=error)
USER_RE = re.compile(r"^[a-zA-Z0-9_-]{3,20}$")
def valid_username(username):
return username and USER_RE.match(username)
PASS_RE = re.compile(r"^.{3,20}$")
def valid_password(password):
return password and PASS_RE.match(password)
EMAIL_RE = re.compile(r'^[\S]+@[\S]+\.[\S]+$')
def valid_email(email):
return not email or EMAIL_RE.match(email)
class Signup(WebHandler):
def get(self):
self.render("signup-form.html")
def post(self):
have_error = False
self.username = self.request.get('username')
self.password = self.request.get('password')
self.verify = self.request.get('verify')
self.email = self.request.get('email')
params = dict(username = self.username,
email = self.email)
if not valid_username(self.username):
params['error_username'] = "That's not a valid username."
have_error = True
if not valid_password(self.password):
params['error_password'] = "That wasn't a valid password."
have_error = True
elif self.password != self.verify:
params['error_verify'] = "Your passwords didn't match."
have_error = True
if not valid_email(self.email):
params['error_email'] = "That's not a valid email."
have_error = True
if have_error:
self.render('signup-form.html', **params)
else:
self.done()
def done(self, *a, **kw):
raise NotImplementedError
class Register(Signup):
def done(self):
#make sure the user doesn't already exist
u = User.by_name(self.username)
if u:
msg = 'That user already exists.'
self.render('signup-form.html', error_username = msg)
else:
u = User.register(self.username, self.password, self.email)
u.online = True
u.put()
self.login(u)
self.redirect('/')
class Login(WebHandler):
def get(self):
self.render('login-form.html')
def post(self):
username = self.request.get('username')
password = self.request.get('password')
u = User.login(username, password)
if u:
self.login(u)
self.redirect('/')
else:
msg = 'Invalid login'
self.render('login-form.html', error = msg)
class Logout(WebHandler):
def get(self):
if self.user:
User.logout(self.user)
self.logout()
self.redirect('/')
class UserStat(WebHandler):
def get(self):
if self.user:
User.logout(self.user)
class Battleships(db.Model):
player1 = db.ReferenceProperty(User, collection_name = "Battleships_p1")
player2 = db.ReferenceProperty(User, collection_name = "Battleships_p2")
board_ship1 = db.IntegerProperty()
board_ship2 = db.IntegerProperty()
board_hit1 = db.BooleanProperty()
board_hit2 = db.BooleanProperty()
ships1 = db.IntegerProperty()
ships2 = db.IntegerProperty()
rows = db.IntegerProperty(required = True, default = 10)
columns = db.IntegerProperty(required=True, default=10)
turn = db.IntegerProperty(required=True, default=0)
shiptypes = db.IntegerProperty(required=True, default =5)
player1_turn = db.BooleanProperty(required=True, default=False)
player2_turn = db.BooleanProperty(required=True, default=False)
game_over = db.BooleanProperty(required=True, default=False)
cpu = db.BooleanProperty() #BOT FUNCTION no bot yet
state = db.IntegerProperty()
msg = db.StringProperty()
class BattleshipsMenu(WebHandler):
def get(self):
if self.user:
self.render('battleships-menu.html')
else:
self.redirect("/login")
def post(self):
if not self.user:
self.redirect("/login")
player = self.request.get('player')
if player!="":
user_p2 = User.by_name(player)
if user_p2 is None:
error = "Player Not Found"
self.render("battleships-menu.html", error = error)
elif user_p2.name == self.user.name:
error = "Cannot play on your own. Try playing with a bot."
self.render("battleships-menu.html", error = error)
else:
opp = Battleships(player1 = self.user, player2 = user_p2, cpu = False, p1_turn = True, state = 0, waiter = True)
opp.put()
self.redirect('/battleshipsgame/%s' % str(opp.key().id()))
else:
opp = Battleships(player1 = self.user, cpu = True, p1_turn = True, state = 0)
opp.put()
self.redirect('/battleshipsgame/%s' % str(opp.key().id()))
class BattleshipsGame(WebHandler):
def get(self, session_id):
if self.user:
sid = db.Key.from_path('Battleships', int(session_id))
battleships = db.get(sid)
if battleships:
if battleships.state == 6:
battleships.msg=""
print "\n\nG:WUBBA LUBBA DUB DUB\n[",self.user.name,"]\n[1]:\t", battleships.msg,"\n[S]:\t", battleships.state,"\n\n"
print battleships.state
print "MRPPYBTTHLE"
player2 = "Bot Bit" if battleships.cpu else battleships.player2.name
if self.user.name == battleships.player1.name or (not battleships.cpu and self.user.name == player2):
waiting = not battleships.cpu and (self.user.name == battleships.player1.name and not battleships.p1_turn) or (self.user.name == player2 and battleships.p1_turn) #and battleships.state == 0
print "\n\nG1:WUBBA LUBBA DUB DUB\n[",self.user.name,"]\n[1]:\t", battleships.msg,"\n[wait]:\t", waiting,"\n[S]:\t", battleships.state,"\n\n"
print "Ren"
self.render("battleships-game.html", wait=waiting, state=battleships.state, msg=battleships.msg, player1=battleships.player1.name, player2="Bot Bit" if battleships.cpu else battleships.player2.name)
print "Ren"
if battleships.state > 1 or waiting: #and battleships.state < 10:
sleep(2)
else:
self.redirect("/login")
else:
self.redirect("/battleships")
else:
self.redirect("/login")
def post(self, session_id ):
def clear(self):
os.system('tput reset') #clears the terminal window, does not just add new lines but deletes whats been written
sid = db.Key.from_path('Battleships', int(session_id))
battleships = db.get(sid)
#############################################################################################
class Snakes(db.Model):
player1 = db.ReferenceProperty(User, collection_name = "snakes_p1")
player2 = db.ReferenceProperty(User, collection_name = "snakes_p2")
score1 = db.IntegerProperty()
score2 = db.IntegerProperty()
cpu = db.BooleanProperty()
p1_turn = db.BooleanProperty()
state = db.IntegerProperty()
msg = db.StringProperty()
rec_wish = db.IntegerProperty()
class SnakesMenu(WebHandler):
def get(self):
if self.user:
self.render('game-menu.html', game = "Snakes and Ladders", online = Friends.online(self.user), offline = Friends.offline(self.user))
else:
self.redirect("/login")
def post(self):
if not self.user:
self.redirect("/login")
player = self.request.get('player')
if player!="":
user_p2 = User.by_name(player)
if user_p2 is None:
error = "Player Not Found"
self.render("game-menu.html", error = error, game = "Snakes and Ladders", online = Friends.online(self.user), offline = Friends.offline(self.user))
elif user_p2.name == self.user.name:
error = "Cannot play on your own. Try playing with a bot."
self.render("game-menu.html", error = error, game = "Snakes and Ladders", online = Friends.online(self.user), offline = Friends.offline(self.user))
else:
opp = Snakes(player1 = self.user, score1=1, score2=1, player2 = user_p2, cpu = False, p1_turn = True, state = 0, waiter = True)
opp.put()
self.redirect('/snakesgame/%s' % str(opp.key().id()))
else:
opp = Snakes(player1 = self.user, score1=1, score2=1, cpu = True, p1_turn = True, state = 0)
opp.put()
self.redirect('/snakesgame/%s' % str(opp.key().id()))
class SnakesGame(WebHandler):
def height(cls, num):
return 735-int((num-1)/10)*79.44444444
def left(cls, num):
ld = 79.44444444 * (9 if int(num%10)==0 else int(num%10)-1)
ld = (715 - ld) if int((num-1)/10)%2 == 1 else ld
return 5+ld
def get(self, session_id):
if self.user:
sid = db.Key.from_path('Snakes', int(session_id))
snakes = db.get(sid)
if snakes:
if snakes.state == 6:
snakes.msg=""
# print "\n\nG:WUBBA LUBBA DUB DUB\n[",self.user.name,"]\n[1]:\t", snakes.score1,"\n[2]:\t", snakes.score2,"\n[msg]:\t", snakes.msg,"\n[S]:\t", snakes.state,"\n\n"
# print snakes.state
# print "MRPPYBTTHLE"
snakes.score1 = 100 if snakes.score1>100 else snakes.score1
snakes.score2 = 100 if snakes.score2>100 else snakes.score2
snakes.put()
p1_h = self.height(snakes.score1)
p1_l = self.left(snakes.score1)
p2_h = self.height(snakes.score2)
p2_l = self.left(snakes.score2)
player2 = "Bot Bit" if snakes.cpu else snakes.player2.name
if self.user.name == snakes.player1.name or (not snakes.cpu and self.user.name == player2):
waiting = not snakes.cpu and (self.user.name == snakes.player1.name and not snakes.p1_turn) or (self.user.name == player2 and snakes.p1_turn) #and snakes.state == 0
# print "\n\nG1:WUBBA LUBBA DUB DUB\n[",self.user.name,"]\n[1]:\t", snakes.score1,"\n[2]:\t", snakes.score2,"\n[msg]:\t", snakes.msg,"\n[wait]:\t", waiting,"\n[S]:\t", snakes.state,"\n\n"
# print "Ren"
self.render("snakes-game.html", wait=waiting, state=snakes.state, msg=snakes.msg, player1=snakes.player1.name, player2="Bot Bit" if snakes.cpu else snakes.player2.name, score1=snakes.score1, score2=snakes.score2, heid=p1_h, leid=p1_l, heib=p2_h, leib=15+p2_l)
# print "Ren"
if snakes.state > 1 or waiting: #and snakes.state < 10:
sleep(2)
else:
self.redirect("/login")
else:
self.redirect("/snakes")
else:
self.redirect("/login")
#############################################################################
def post(self, session_id):
def wish_dice(self, db):
if db.rec_wish > 6 and db.rec_wish!=None:
db.msg = "Yeah, don't get your hopes up about that one. Rolling</td><td><marquee>...</marquee>"
elif db.rec_wish < 1 and db.rec_wish!=None:
db.msg = "Cheer up, I'm sure you'll do better than that. Rolling</td><td><marquee>...</marquee>"
else:
db.msg = "One way to find out. Rolling</td><td><marquee>...</marquee>"
db.state = 1
db.put()
def roll_dice(self, db, num):
d = randint(1,6)
# print "\n\nR1:WUBBA LUBBA DUB DUB\n[1]:\t", db.score1,"\n[2]:\t", db.score2,"\n[S]:\t", db.state,"\n\n"
if self.user.name == db.player1.name:
db.score1 += d
num = db.score1
elif self.user.name == db.player2.name:
db.score2 += d
num = db.score2
# print "\n\nYO SHANDA\t\n\n"# % snakes.rec_wish if not snakes.rec_wish is None
# print type(snakes.rec_wish)
# print "\n\n"
# print "\n\nR2:WUBBA LUBBA DUB DUB\n[1]:\t", db.score1,"\n[2]:\t", db.score2,"\n[S]:\t", db.state,"\n\n"
db.msg = "...You rolled a " + str(d) + ". "
if db.rec_wish:
if(d == num):
db.msg = db.msg + "Lucky you, nice roll."
elif(abs(num-d) == 1):
db.msg = db.msg + "Oof...almost, but not quite."
else:
db.msg = db.msg + "Don't worry; you'll get it next time."
db.state = 2
db.put()
def check_for_snakes_and_ladders(self, db):
ladders = {1:38,4:14,9:31,21:42,28:84,51:67,71:91,80:100}
snakes = {98:79,95:75,93:73,87:24,64:60,62:19,17:7}
db.msg = ""
# print "\n\nS1:WUBBA LUBBA DUB DUB\n[1]:\t", db.score1,"\n[2]:\t", db.score2,"\n[S]:\t", db.state,"\n\n"
if self.user.name == db.player1.name:
if ladders.has_key(db.score1):
db.score1 = ladders[db.score1]
db.msg = "Its a ladder, Climb up :)"
elif snakes.has_key(db.score1):
db.score1 = snakes[db.score1]
db.msg = "Its a snake!!, Come down :("
db.msg += ' %d more to go. Keep it up!' % int(100-db.score1)
elif self.user.name == db.player2.name:
if ladders.has_key(db.score2):
db.score2 = ladders[db.score2]
db.msg = "Its a ladder, Climb up :)"
elif snakes.has_key(db.score2):
db.score2 = snakes[db.score2]
db.msg = "Its a snake!!, Come down :("
db.msg += ' %d more to go. Keep it up!' % int(100-db.score2)
# print "\n\nS2:WUBBA LUBBA DUB DUB\n[1]:\t", db.score1,"\n[2]:\t", db.score2,"\n[S]:\t", db.state,"\n\n"
db.state = 3
db.put()
def cpu(self, db):
d = randint(1,6)
db.score2 += d
db.score2 = 100 if db.score2 > 100 else db.score2
ladders = {1:38,4:14,9:31,21:42,28:84,51:67,71:91,80:100}
snakes = {98:79,95:75,93:73,87:24,64:60,62:19,17:7}
db.msg = 'Bot Bit rolled a %d, totalling to %d.' % (d, db.score2)
if ladders.has_key(db.score2):
db.score2 = ladders[db.score2]
db.msg += ' Looks like someone is moving up to %d.' % db.score2
elif snakes.has_key(db.score2):
db.score2 = snakes[db.score2]
db.msg += ' Moving down to %d does not seem to have been planned.' % db.score2
db.msg += ' Bot Bit has %d left to win.' % int(100-db.score2)
db.put()
# print "\n\nC:WUBBA LUBBA DUB DUB\n[1]:\t", db.score1,"\n[2]:\t", db.score2,"\n[S]:\t", db.state,"\n\n"
sid = db.Key.from_path('Snakes', int(session_id))
snakes = db.get(sid)
def win_output(self, db):
if db.score1>=db.score2:
db.msg = "%s has won the game! " % db.player1.name
if self.user.name == db.player1.name:
db.msg += "Well Played!"
elif not db.cpu:
if self.user.name == db.player2.name:
db.msg += "Better luck next time."
else:
if not db.cpu:
db.msg = "%s has won the game! " % db.player2.name
if self.user.name == db.player2.name:
db.msg += "Well Played!"
if self.user.name == db.player1.name:
db.msg += "Better luck next time."
else:
db.msg = "Bot Bit has won the game! Better luck next time."
db.put()
# waiting = not snakes.cpu and (self.user.name == snakes.player1.name and not snakes.p1_turn) or (self.user.name == snakes.player2.name and snakes.p1_turn)
# if waiting:
# print "TOO OLD"
# self.redirect('/snakesgame/%s' % str(snakes.key().id()))
# return
if snakes.state == 0:
if self.request.get('wish'):
snakes.rec_wish = int(self.request.get('wish'))
snakes.put()
else:
self.redirect('/snakesgame/%s' % str(snakes.key().id()))
p1_h = self.height(snakes.score1)
p1_l = self.left(snakes.score1)
p2_h = self.height(snakes.score2)
p2_l = self.left(snakes.score2)
player2 = "Bot Bit" if snakes.cpu else snakes.player2.name
waiting= not snakes.cpu and ((self.user.name == snakes.player1.name and not snakes.p1_turn) or (self.user.name == player2 and snakes.p1_turn))
#print snakes.state
if self.user:
if self.user.name == snakes.player1.name or (not snakes.cpu and self.user.name == snakes.player2.name):
# print "\n\nCHecker"
# print (snakes.state == 4 or snakes.state == 5)
# print snakes.state
if (snakes.score1>=100 or snakes.score2>=100) and (snakes.state < 5 or snakes.state == 50):
# print "\nHERE 2\n"
if snakes.state == 50:
snakes.state = 6
# print "\n\n", self.user.name, " will give the final turn"
snakes.p1_turn = not snakes.p1_turn
else:
snakes.state = 5
win_output(self, snakes)
snakes.put()
self.redirect('/snakesgame/%s' % str(snakes.key().id()))
elif snakes.state == 0:
# print "\n\nP1:WUBBA LUBBA DUB DUB\n[",self.user.name,"]\n[1]:\t", snakes.score1,"\n[2]:\t", snakes.score2,"\n[msg]:\t", snakes.msg,"\n[wait]:\t", waiting,"\n[S]:\t", snakes.state,"\n\n"
wish_dice(self, snakes)
self.redirect('/snakesgame/%s' % str(snakes.key().id()))
elif snakes.state == 1:
# print "\n\nP2:WUBBA LUBBA DUB DUB\n[",self.user.name,"]\n[1]:\t", snakes.score1,"\n[2]:\t", snakes.score2,"\n[msg]:\t", snakes.msg,"\n[wait]:\t", waiting,"\n[S]:\t", snakes.state,"\n\n"
roll_dice(self, snakes, snakes.rec_wish)
self.redirect('/snakesgame/%s' % str(snakes.key().id()))
elif snakes.state == 2:
# print "\n\nP3:WUBBA LUBBA DUB DUB\n[",self.user.name,"]\n[1]:\t", snakes.score1,"\n[2]:\t", snakes.score2,"\n[msg]:\t", snakes.msg,"\n[wait]:\t", waiting,"\n[S]:\t", snakes.state,"\n\n"
check_for_snakes_and_ladders(self, snakes)
self.redirect('/snakesgame/%s' % str(snakes.key().id()))
elif snakes.state == 3:
# print "\n\nP4:WUBBA LUBBA DUB DUB\n[",self.user.name,"]\n[1]:\t", snakes.score1,"\n[2]:\t", snakes.score2,"\n[msg]:\t", snakes.msg,"\n[wait]:\t", waiting,"\n[S]:\t", snakes.state,"\n\n"
if snakes.cpu:
cpu(self, snakes)
if snakes.score2 >= 100:
win_output(self, snakes)
self.redirect('/snakesgame/%s' % str(snakes.key().id()))
else:
snakes.msg = "Waiting for opponent</td><td><marquee>...</marquee>"
snakes.state = 4
snakes.put()
self.redirect('/snakesgame/%s' % str(snakes.key().id()))
elif snakes.state == 4 or snakes.state == 5 or snakes.state == 6:
# print "\nHERE 3\n"
#snakes.state = 40 if snakes.state == 4 else 50
if snakes.state == 6:
win_output(self, snakes)
# print "\nHERE 3:\t", snakes.msg,"\n\n"
snakes.state *= 10
snakes.rec_wish = None
# if not snakes.cpu:
# snakes.msg = "Waiting for opponent</td><td><marquee>...</marquee>"
snakes.put()
self.redirect('/snakesgame/%s' % str(snakes.key().id()))
elif snakes.state == 40:
if not snakes.cpu:
snakes.p1_turn = not snakes.p1_turn
snakes.state = 0
snakes.put()
self.redirect('/snakesgame/%s' % str(snakes.key().id()))
elif snakes.state == 50:
if snakes.cpu:
snakes.delete()
self.redirect("/snakes/")
elif snakes.state == 60:
snakes.state = 70
snakes.put()
self.redirect('/snakesgame/%s' % str(snakes.key().id()))
elif snakes.state == 70:
snakes.delete()
self.redirect("/snakes/")
else:
self.redirect("/login")
# print "\n\nPE:WUBBA LUBBA DUB DUB\n[",self.user.name,"]\n[1]:\t", snakes.score1,"\n[2]:\t", snakes.score2,"\n[msg]:\t", snakes.msg,"\n[wait]:\t", waiting,"\n[S]:\t", snakes.state,"\n\n"
else:
self.redirect("/login")
app = webapp2.WSGIApplication([('/?', MainFront),
('/friends', FriendsPage),
('/newfriend', NewFriend),
('/signup', Register),
('/login', Login),
('/logout', Logout),
('/user', UserStat),
('/battleships', BattleshipsMenu),
('/battleshipsgame/([0-9]+)', BattleshipsGame),
('/snakes/?', SnakesMenu),
('/snakesgame/([0-9]+)', SnakesGame),
],
debug=True) |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-11-07 21:16
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('MainAPP', '0046_additionalcalculations_scale'),
]
operations = [
migrations.AddField(
model_name='sitesettings',
name='ETH_DHCP',
field=models.BooleanField(default=True, help_text='Includes the server in the DHCP pool', verbose_name='Enable DHCP on the LAN network'),
),
migrations.AlterField(
model_name='sitesettings',
name='ETH_GATE',
field=models.GenericIPAddressField(default='1.1.1.1', help_text='This is the gateway IP of the LAN network that is providing the internet access.', protocol='IPv4', verbose_name='Gateway of the LAN network'),
),
migrations.AlterField(
model_name='sitesettings',
name='ETH_IP',
field=models.GenericIPAddressField(default='1.1.1.2', help_text='This is the IP for the LAN network that is providing the internet access.', protocol='IPv4', verbose_name='IP address for the LAN network'),
),
]
|
# author: Rebecca Ramnauth
# last update: 4 March 2020
from datetime import datetime
import json
import matplotlib.pyplot as plt
import numpy as np
def reaction_rate(items, sender):
other_last_sent = 0
reaction_sum = 0
for item in items[1:]:
if item.get('sender_name') != sender:
other_last_sent = item.get('timestamp_ms')
#print(item.get('sender_name'), other_last_sent)
else:
reaction_sum += abs(other_last_sent - item.get('timestamp_ms'))
#print("reaction sum = ", reaction_sum, "; queries = ", len(items), "rate = ", reaction_sum/len(items))
return reaction_sum / len(items)
# incorrect measure of typing speed --
# calculates time distance between current msg and previous msg sent
# divided by the length of the current msg
# divided by 1000 to convert milliseconds to seconds
# return typing speed of @sender in characters per second
def get_typing_speed(messages, sender):
speed_sum = 0
items = filter_messages_by_sender(messages, sender)
timestamp_prev = items[0].get('timestamp_ms')
for item in items:
time_difference = timestamp_prev - item.get('timestamp_ms')
timestamp_prev = item.get('timestamp_ms')
average_time = float(time_difference) / len(item.get('content'))
speed_sum += average_time
return speed_sum / len(items) / 1000
def filter_messages_by_sender(items, sender):
filtered = []
for item in items:
if item.get('sender_name') == sender:
filtered.append(item)
return filtered
def test_plot():
fig = plt.figure() # an empty figure with no axes
fig.suptitle('No axes on this figure') # Add a title so we know which it is
fig, ax_lst = plt.subplots(2, 2)
def main():
with open('run_1.json', 'r') as f:
messages_dict = json.load(f)
messages = messages_dict.get('messages')
timespent_content = []
timestamp_prev = messages[0].get('timestamp_ms')
for message in messages:
content = message.get('content')
sender = message.get('sender_name')
difference = timestamp_prev - message.get('timestamp_ms')
timestamp_prev = message.get('timestamp_ms')
timespent_content.append([difference, content, sender])
# print(difference, content, sender)
#print("typing speed: ", get_typing_speed(messages, 'Bfl Participant'))
print(reaction_rate(messages, 'Bfl Human'))
#test_plot()
# print("reaction rate: ", get_reaction_rate(messages, 'Bfl Participant'))
if __name__ == "__main__":
main() |
from scrapy.http import FormRequest
from scrapy.spider import Spider
from scrapy.utils.response import open_in_browser
import scrapy
class Post_Spider(Spider):
name = "Post"
allowed_domains = ["github.com"]
start_urls = ["https://github.com/login"]
def parse(self, response):
token = response.css('input[name="authenticity_token"]::attr(value)')[0].extract()
data = {'authenticity_token': token,
'login': 'ali-gillani',
'password': 'aliali786'}
yield FormRequest.from_response(response,
formdata=data,
callback=self.parse_repo
)
def parse_repo(self, response):
"""This will fetch all the teams and repos of the user"""
open_in_browser(response)
self.record = {
'Teams': response.css('span.width-fit::text').extract(),
'All repositories': response.css('a.d-flex::attr(href)').extract(),
}
yield self.record
yield scrapy.Request(url='https://github.com/ali-gillani/test/pulls', callback=self.parse_pull)
def parse_pull(self, response):
"""This will fetch all the pull requests in the-lab"""
open_in_browser(response)
number_of_elements = len(response.css('a.link-gray-dark::text').extract())
for number in range(0, number_of_elements):
pull_request= {
'Pull name': response.css('a.link-gray-dark::text')[number].extract(),
'Pull link': response.css('a.link-gray-dark::attr(href)')[number].extract()
}
yield pull_request |
import pickle
import socket
import sys
import logging
import threading
import time
import random
import Queue
class LeaderElection():
def __init__(self,mlist,elect_queue):
#super(master_node, self).__init__()
# Election messages are sent over below port
self.ehost = ''
self.eport = 10040
#Reference membership list
self.mlist = mlist
self.current_node = socket.gethostbyname(socket.gethostname())
#Elect Queue to send election result to other threads
self.elect_queue = elect_queue
#Use election_id to eliminate recurring election runs
self.election_id = None
#Node_id is last octect of IPv4 address
ip_split = self.current_node.split(".")
self.current_id = int(ip_split[len(ip_split)-1])
# To check if election is in progress
self.election_in_progress = False
#To check for election ack
self.election_ack = False
#To check for co-ordination message
self.rcvd_coordination = False
#Track current leader
self.leader = None
self.election_hdle = None
# Function to create and bind socket
def create_socket_bind(self):
peer_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
peer_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
try:
peer_socket.bind((self.ehost, self.eport))
except (socket.error,socket.gaierror) as err_msg:
logging.exception(err_msg)
peer_socket.close()
sys.exit()
return peer_socket
# Function to accept connection, process the command from Datanode/client
def recv_cmd(self,peer_socket,lock):
logging.info("Election thread Waiting to receive")
while True:
peer_socket.listen(20);
(conn_socket,(client_ip,client_port)) = peer_socket.accept()
msg = (conn_socket.recv(8192))
rcv_msg = pickle.loads(msg)
#Handle initial leader election
#Initiate new leader when master node fail is detected
if(rcv_msg['cmd'] == 'initiate'):
logging.info("Received Fail detect/initiate message")
if(self.election_id != rcv_msg['id']):
self.current_state = 'election'
self.election_id = rcv_msg['id']
if (self.election_in_progress == False):
self.election_in_progress = True
self.election_ack = False
self.rcvd_coordination = False
self.election_hdle = threading.Thread(target=self.election_msg_hdler(lock,))
self.election_hdle.daemon = True
self.election_hdle.start()
conn_socket.sendall("Initiated")
#Receive and handle election message
elif (rcv_msg['cmd'] == 'election'):
logging.info("Received Election message ")
ip_split = client_ip.split(".")
client_id = int(ip_split[len(ip_split)-1])
#Send Ack to received election message
send_pkt = pickle.dumps({
'cmd':'election_ack'
})
self.send_query_master(send_pkt,(client_ip,int(self.eport)))
logging.info("Sent Ack to election initiator")
# print "Sent Ack to election initiator " + str(client_ip)
#Start a new election run if new election has not begun yet
if (self.election_id != rcv_msg['id']):
self.election_id = rcv_msg['id']
if (self.election_in_progress == False):
self.election_in_progress = True
self.election_ack = False
self.rcvd_coordination = False
self.election_hdle = threading.Thread(target=self.election_msg_hdler(lock,))
self.election_hdle.daemon = True
self.election_hdle.start()
else:
logging.info("Duplicate Election Message, No new election run taken")
#Receive and handle election_ack message
elif (rcv_msg['cmd'] == 'election_ack'):
logging.info("Received Election Ack message ")
self.election_ack = True
# print "Receive Ack from " + str(client_ip)
#Receive and handle coordinate message
elif (rcv_msg['cmd'] == 'coordinate'):
logging.info("Received cordinate message ")
# print "Received cordinate message from" + str(client_ip)
if (self.rcvd_coordination == False):
#New leader is communicated
self.elect_queue.put(client_ip)
self.leader = client_ip
self.rcvd_coordination = True
self.election_ack = True
self.election_in_progress = False
if self.election_hdle is not None:
self.election_hdle.join()
self.election_hdle = None
conn_socket.close()
peer_socket.close()
def election_msg_hdler(self,lock):
#ID is defined as last octet of IPv4 address
ip_split = self.current_node.split(".")
current_id = int(ip_split[len(ip_split)-1]);
lst = self.mlist.lst
#List of higher ID VM's
vm_list = []
#Find VM's with higher ID
for i in range(len(lst)):
host = lst[i]['host']
ip_split = host.split(".")
host_id = int(ip_split[len(ip_split)-1]);
if (host_id > current_id):
vm_list.append(host)
#Send Election message to higher ID VM's
send_pkt = pickle.dumps({
'cmd':'election',
'id' : self.election_id
})
if len(vm_list) > 0:
for each_vm in vm_list:
# print "X.Send Election Message to" + str(each_vm)
node_id = (each_vm, int(self.eport))
self.send_query(send_pkt,node_id)
#Wait for election acknowledge
self.election_ack_wait(lock)
#No higher ID node's exist, current node is leader
#Send co-ordination message to all nodes
else:
send_pkt = pickle.dumps({
'cmd':'coordinate',
'id' : self.election_id
})
for i in range(len(lst)):
host = lst[i]['host']
# print "1.Send Coordination Message"
node_id = (host, int(self.eport))
self.send_query(send_pkt,node_id)
#Wait for AcK message from any one higher order node
def election_ack_wait(self,lock):
size_mlist = len(self.mlist.lst)
# Set wait timeout to be function of node_id, lower node_id's wait longer than higher node_ids
# Fix for thread not yielding
mult = 16 - int(self.current_id)
if mult > 0:
t1 = mult
else:
t1 = 5
if size_mlist > 0:
wait_timeout = (2.8/(2*size_mlist - 1)) * t1
else:
wait_timeout = (0.120) * t1
# Sleep, wait for ack
# print "Wait for ack timeout"
time.sleep(wait_timeout)
lock.acquire()
ack_rcvd = self.election_ack
crd_rcvd = self.rcvd_coordination
lock.release()
#If received ACK, wait for co-ordination
if (ack_rcvd == True and crd_rcvd == False):
#wait to receive leader info
self.coordination_wait(wait_timeout,lock)
#Current Node is leader, no ACK received after timeout
elif (ack_rcvd == False and crd_rcvd == False):
send_pkt = pickle.dumps({
'cmd':'coordinate',
'id' : self.election_id
})
lst = self.mlist.lst
#Send coordinate messages to all nodes
for i in range(len(lst)):
host = lst[i]['host']
# print "2.Send Coordination Message"
node_id = (host, int(self.eport))
self.send_query(send_pkt,node_id)
else:
#end of thread
pass
#wait to receive leader info
def coordination_wait(self, wait_timeout,lock):
#Co-ordination timeout to be 3 times the wait_ack timeout
final_timeout = wait_timeout * 3.0
#Sleep for timeout
#print "Wait for co-ordination timeout"
time.sleep(final_timeout)
# If no coordinator message has arrived,
# initiate another election
# Send election message to itself
lock.acquire()
crd_rcvd = self.rcvd_coordination
lock.release()
if(crd_rcvd == False):
send_pkt = pickle.dumps({
'cmd':'election',
'id' :self.current_node
})
node_id = ('', int(self.eport))
# print "Re-election"
lock.acquire()
self.election_in_progress = False
lock.release()
self.send_query(send_pkt,node_id)
# Function establishes socket
# Sends commands to other nodes in election process
def send_query(self,query, master):
command = query
ret = ''
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(master)
send_msg = (command)
sock.sendall(send_msg)
sock.close()
except (socket.error,socket.gaierror) as err_msg:
logging.info(str(err_msg))
# print err_msg
return ret
# Function establishes socket
# Sends commands to other nodes in election process
def send_query_master(self,query, master):
command = query
ret = ''
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(master)
send_msg = (command)
sock.sendall(send_msg)
sock.close()
except (socket.error,socket.gaierror) as err_msg:
logging.info(str(err_msg))
# print err_msg
return ret
def run(self):
logging.basicConfig(filename = "leader.log", level = logging.INFO, filemode = "w")
lock = threading.Lock()
peer_socket= self.create_socket_bind()
# Start election messaging thread
election_thread = threading.Thread(target=self.recv_cmd,args=(peer_socket,lock,))
election_thread.daemon = True
election_thread.start()
|
name = input('Please, write your name:')
while True:
operator = input(f'{name.capitalize()} Виберіть операцію з +; -; *; /; //; %; **; round; square of number or/'
f'if your want exit please push 0:')
if operator == '+':
number1 = (input('Please, write first digit:'))
number2 = (input('Please, write second digit:'))
if number1.isdigit and number2.isdigit:
print(f'{number1} + {number2} = {int(number1) + int(number2)}')
else:
print(f'{number1} or {number2} is not digits.')
elif operator == '-':
number1 = input('Please, write first digit:')
number2 = input('Please, write second digit:')
if number1.isdigit() and number2.isdigit():
print(f'{number1} - {number2} = {int(number1) - int(number2)}')
else:
print(f'{number1} or {number2} is not digits.')
elif operator == '*':
number1 = input('Please, write first digit:')
number2 = input('Please, write second digit:')
if number1.isdigit() and number2.isdigit():
print(f'{number1} * {number2} = {int(number1) * int(number2)}')
else:
print(f'{number1} or {number2} is not digits.')
elif operator == '/':
number1 = input('Please, write first digit:')
number2 = input('Please, write second digit:')
if number1.isdigit() and number2.isdigit():
print(f'{number1} / {number2} = {int(number1) / int(number2)}')
else:
print(f'{number1} or {number2} is not digits.')
elif operator == '//':
number1 = input('Please, write first digit:')
number2 = input('Please, write second digit:')
if number1.isdigit() and number2.isdigit():
print(f'{number1} // {number2} = {int(number1) // int(number2)}')
else:
print(f'{number1} or {number2} is not digits.')
elif operator == '%':
number1 = input('Please, write first digit:')
number2 = input('Please, write second digit:')
if number1.isdigit() and number2.isdigit():
print(f'{number1} % {number2} = {int(number1) % int(number2)}')
else:
print(f'{number1} or {number2} is not digits.')
elif operator == '**':
number1 = input('Please, write your digit:')
number2 = input('Please, write degree of number:')
if number1.isdigit() and number2.isdigit():
print(f'{number1} ** {number2} = {int(number1) ** int(number2)}')
else:
print(f'{number1} or {number2} is not digits.')
elif operator == 'square of number':
number1 = input('Please, write your digit:')
if number1.isdigit():
print(f'{number1} * {number1} = {int(number1) * int(number1)}')
else:
print(f'{number1} or {number1} is not digits.')
elif operator == 'round':
number1 = float(input('Please, write your digit:'))
if number1 == number1:
print(round(number1))
else:
print(f'{number1} is not digits.')
elif operator == '0':
break
print('Thank you for a spend your time!')
|
import torch
import torch.nn as nn
import torch.nn.functional as F
def conv3x3x3(in_planes, out_planes, stride):
# 3x3x3 convolution with padding
return nn.Conv3d(
in_planes,
out_planes,
kernel_size=3,
stride=stride,
padding=1)
def upconv3x3x3(in_planes, out_planes, stride):
return nn.ConvTranspose3d(
in_planes,
out_planes,
kernel_size=3,
stride=1,
padding=1,
output_padding=1)
def conv_block_3d(in_dim, out_dim, activation):
return nn.Sequential(
nn.Conv3d(in_dim, out_dim, kernel_size=3, stride=1, padding=1),
nn.BatchNorm3d(out_dim),
activation,)
def conv_trans_block_3d(in_dim, out_dim, activation, stride=2):
return nn.Sequential(
nn.ConvTranspose3d(in_dim, out_dim, kernel_size=3, stride=stride, padding=1, output_padding=1),
nn.BatchNorm3d(out_dim),
activation,)
def max_pooling_3d():
return nn.MaxPool3d(kernel_size=2, stride=2, padding=0)
def conv_block_2_3d(in_dim, out_dim, activation, stride=1):
return nn.Sequential(
conv_block_3d(in_dim, out_dim, activation),
nn.Conv3d(out_dim, out_dim, kernel_size=3, stride=stride, padding=1),
nn.BatchNorm3d(out_dim),)
|
import pandas as pd
from tqdm import tqdm
from magics_with_UCM_region import choose_region
def print_to_csv_age(recommender1, recommender2, filename):
target_file = pd.read_csv('dataset/data_target_users_test.csv')
range_target_users = list(target_file["user_id"])
cold_file = pd.read_csv('myFiles/coldUsers_id.csv')
cold_users = list(cold_file["cold_users_id"])
ages = pd.read_csv('dataset/data_UCM_age.csv')
ages1 = ages.set_index("row", drop=False)
users_with_age = list(ages["row"])
output_file = open('output/'+filename+'.csv', mode='w')
print('user_id,item_list', file=output_file)
for user_id in tqdm(range_target_users):
if user_id in cold_users:
if user_id in users_with_age:
age_of_user = ages1.loc[user_id, 'col']
list_topPop_for_age = open('myFiles/TopPopForAge' + '{}'.format(age_of_user) + '.txt')
items_rec = list_topPop_for_age.read().split('\n')
else:
items_rec = recommender2.recommend(user_id, at=10)
else:
items_rec = recommender1.recommend(user_id, at=10)
sarr = [str(a) for a in items_rec]
print('{},'.format(user_id) + ' '.join(sarr), file=output_file)
output_file.close()
def print_to_csv_region(recommender1, recommender2, filename):
target_file = pd.read_csv('dataset/data_target_users_test.csv')
range_target_users = list(target_file["user_id"])
cold_file = pd.read_csv('myFiles/coldUsers_id.csv')
cold_users = list(cold_file["cold_users_id"])
more_region_file = pd.read_csv('myFiles/users_with_more_regions.csv')
more_region = list(more_region_file["user_id"])
region = pd.read_csv('dataset/data_UCM_region.csv')
region1 = region.groupby(['row', 'data'])['col'].apply(lambda x: ','.join(x.astype(str))).reset_index()
region2 = region1.set_index('row', drop=False)
users_with_region = list(region["row"])
#print(region2)
cold_users_with_more_region = []
for i in more_region:
if i in cold_users:
cold_users_with_more_region.append(i)
output_file = open('output/'+filename+'.csv', mode='w')
print('user_id,item_list', file=output_file)
for user_id in tqdm(range_target_users):
if user_id in cold_users:
if user_id in users_with_region:
if user_id in cold_users_with_more_region:
temp = region2.loc[user_id, 'col']
temp_list = [int(i) for i in temp if i != ',']
region_of_user = choose_region(temp_list)
else:
region_of_user = region2.loc[user_id, 'col']
list_topPop_for_region = open('myFiles/TopPopForRegion' + '{}'.format(region_of_user) + '.txt')
items_rec = list_topPop_for_region.read().split('\n')
else:
items_rec = recommender2.recommend(user_id, at=10)
else:
items_rec = recommender1.recommend(user_id, at=10)
sarr = [str(a) for a in items_rec]
print('{},'.format(user_id) + ' '.join(sarr), file=output_file)
output_file.close()
def print_to_csv_age_and_region(recommenderHybrid, recommenderTP, recommenderHybridSemiCold, recommenderUserCBF, filename):
target_file = pd.read_csv('dataset/data_target_users_test.csv')
range_target_users = list(target_file["user_id"])
cold_file = pd.read_csv('myFiles/coldUsers_id.csv')
cold_users = list(cold_file["cold_users_id"])
semi_cold_file = pd.read_csv('myFiles/semiColdUsers_id.csv')
semi_cold_users = list(semi_cold_file['semi_cold_users_id'])
warm_user_file = pd.read_csv('myFiles/warm_users_id.csv')
warm_users = list(warm_user_file['warm_users_id'])
ages = pd.read_csv('dataset/data_UCM_age.csv')
ages1 = ages.set_index("row", drop=False)
users_with_age = list(ages["row"])
both = pd.read_csv('region_user_combination/both.csv')
both_list = list(both['user_with_both'])
region = pd.read_csv('dataset/data_UCM_region.csv')
region1 = region.groupby(['row', 'data'])['col'].apply(lambda x: ','.join(x.astype(str))).reset_index()
region2 = region1.set_index('row', drop=False)
more_region_file = pd.read_csv('myFiles/users_with_more_regions.csv')
more_region = list(more_region_file["user_id"])
cold_users_with_more_region = []
for i in more_region:
if i in cold_users:
cold_users_with_more_region.append(i)
output_file = open('output/'+filename+'.csv', mode='w')
print('user_id,item_list', file=output_file)
for user_id in tqdm(range_target_users):
if user_id in cold_users:
'''
most_similar_users = recommenderUserCBF.recommend(user_id)
most_similar_user_without_cold = [x for x in most_similar_users if x not in cold_users]
most_similar_user = most_similar_user_without_cold[0]
items_rec = recommenderHybridSemiCold.recommend(most_similar_user)[0:10]
'''
if user_id in both_list:
age_of_user = ages1.loc[user_id, 'col']
if user_id in cold_users_with_more_region:
temp = region2.loc[user_id, 'col']
temp_list = [int(i) for i in temp if i != ',']
region_of_user = choose_region(temp_list)
else:
region_of_user = region2.loc[user_id, 'col']
list_topPop_for_age = open('myCombinations/TopPopForAge' + '{}'.format(age_of_user) + 'AndRegion{}'.format(region_of_user) + '.txt')
items_rec = list_topPop_for_age.read().split('\n')
else:
if user_id in users_with_age:
age_of_user = ages1.loc[user_id, 'col']
list_topPop_for_age = open('myFiles/TopPopForAge' + '{}'.format(age_of_user) + '.txt')
items_rec = list_topPop_for_age.read().split('\n')
else:
items_rec = recommenderTP.recommend(user_id)[0:10]
if user_id in warm_users:
items_rec = recommenderHybrid.recommend(user_id)[0:10]
if user_id in semi_cold_users:
items_rec = recommenderHybridSemiCold.recommend(user_id)[0:10]
sarr = [str(a) for a in items_rec]
print('{},'.format(user_id) + ' '.join(sarr), file=output_file)
output_file.close()
|
# Copyright (C) 2021, RTE (http://www.rte-france.com)
# SPDX-License-Identifier: Apache-2.0
from vm_manager.vm_manager import (
list_vms,
start,
stop,
create,
clone,
remove,
enable_vm,
disable_vm,
is_enabled,
status,
create_snapshot,
remove_snapshot,
list_snapshots,
purge_image,
rollback_snapshot,
list_metadata,
get_metadata,
set_metadata,
)
|
import numpy as np
import theano
import theano.tensor as T
from .initialization import random_init, create_shared
from .initialization import ReLU, tanh, linear, sigmoid
from .basic import Layer, RecurrentLayer
class IterAttentionLayer(Layer):
def __init__(self, n_in, n_out):
self.n_in = n_in
self.n_out = n_out
self.create_parameters()
def create_parameters(self):
n_in = self.n_in
n_out = self.n_out
self.W = create_shared(random_init((n_in, n_out)), name="W")
self.a = create_shared(random_init((n_out)), name="a")
self.V = create_shared(random_init((n_out, n_out)), name="V")
self.V1 = create_shared(random_init((n_out)), name="V1")
self.V2 = create_shared(random_init((n_out)), name="V2")
self.U = create_shared(random_init((n_in)), name="U")
self.lst_params = [self.W, self.a, self.V1, self.V2, self.V, self.U]
def multi_hop_forward(self, prev_output, user_embs=None, isWord=True, hop=1, masks=None):
W = self.W
V = self.V
V1 = self.V1
U = self.U
a = self.a
n_out = self.n_out
doc_vecs = prev_output.dimshuffle(1, 0, 2)
if isWord and user_embs:
doc_vecs = doc_vecs.reshape(
(user_embs.shape[0], doc_vecs.shape[0] / user_embs.shape[0], doc_vecs.shape[1], doc_vecs.shape[2]))
s_Rs = []
v_tmp_ = T.dot(doc_vecs, V) + a
for i in range(hop):
v_tmp = v_tmp_
if i == 0:
if isWord:
if user_embs:
v_tmp = v_tmp + \
T.dot(user_embs, W).dimshuffle(0, 'x', 'x', 1)
else:
v_tmp = v_tmp + T.dot(U, W)
else:
if user_embs:
v_tmp = v_tmp + \
T.dot(user_embs, W).dimshuffle(0, 'x', 1)
else:
v_tmp = v_tmp + T.dot(U, W)
else:
p_tmp = s_Rs[-1]
alpha = T.exp(T.dot(T.tanh(v_tmp), V1))
if masks is not None:
if masks.dtype != theano.config.floatX:
masks = T.cast(masks, theano.config.floatX)
if isWord and user_embs:
masks = masks.dimshuffle(1, 0)
masks = masks.reshape(
(user_embs.shape[0], masks.shape[0] / user_embs.shape[0], masks.shape[1]))
alpha = alpha * masks
else:
alpha = alpha * masks.dimshuffle(1, 0)
if isWord and user_embs:
alpha_S = T.sum(alpha, axis=2)
alpha = alpha / (alpha_S.dimshuffle(0, 1, 'x') + 1e-5)
s_Rs.append(
T.sum(doc_vecs * alpha.dimshuffle(0, 1, 2, 'x'), axis=2))
else:
alpha_S = T.sum(alpha, axis=1)
alpha = alpha / (alpha_S.dimshuffle(0, 'x') + 1e-5)
s_Rs.append(
T.sum(doc_vecs * alpha.dimshuffle(0, 1, 'x'), axis=1))
result_vec = s_Rs[-1]
if isWord and user_embs:
result_vec = result_vec.reshape(
(result_vec.shape[0] * result_vec.shape[1], result_vec.shape[2]))
return result_vec
@property
def params(self):
return self.lst_params
@params.setter
def params(self, param_list):
assert len(param_list) == len(self.lst_params)
for p, q in zip(self.lst_params, param_list):
p.set_value(q.get_value())
class CNN(Layer):
def __init__(self, n_in, n_out, activation=tanh,
order=1, clip_gradients=False):
self.n_in = n_in
self.n_out = n_out
self.activation = activation
self.order = order
self.clip_gradients = clip_gradients
internal_layers = self.internal_layers = []
for i in range(order):
input_layer = Layer(n_in, n_out, linear, has_bias=False,
clip_gradients=clip_gradients)
internal_layers.append(input_layer)
self.bias = create_shared(random_init((n_out,)), name="bias")
def forward(self, x, mask, hc):
order, n_in, n_out, activation = self.order, self.n_in, self.n_out, self.activation
layers = self.internal_layers
if hc.ndim > 1:
h_tm1 = hc[:, n_out * order:]
else:
h_tm1 = hc[n_out * order:]
lst = []
for i in range(order):
if hc.ndim > 1:
c_i_tm1 = hc[:, n_out * i:n_out * i + n_out]
else:
c_i_tm1 = hc[n_out * i:n_out * i + n_out]
in_i_t = layers[i].forward(x)
if i == 0:
c_i_t = in_i_t
else:
c_i_t = in_i_t + c_im1_tm1
lst.append(T.cast(c_i_t * mask.dimshuffle(0, 'x'), 'float32'))
c_im1_tm1 = c_i_tm1
c_im1_t = c_i_t
h_t = activation(c_i_t + self.bias)
lst.append(T.cast(h_t * mask.dimshuffle(0, 'x'), 'float32'))
if hc.ndim > 1:
return T.concatenate(lst, axis=1)
else:
return T.concatenate(lst)
def forward_all(self, x, masks=None, h0=None, return_c=False, direction=None):
if h0 is None:
if x.ndim > 1:
h0 = T.zeros(
(x.shape[1], self.n_out * (self.order + 1)), dtype=theano.config.floatX)
else:
h0 = T.zeros((self.n_out * (self.order + 1),),
dtype=theano.config.floatX)
if masks == None:
masks = T.ones((x.shape[0], x.shape[1]),
dtype=theano.config.floatX)
h, _ = theano.scan(
fn=self.forward,
sequences=[x, masks],
outputs_info=[h0]
)
if return_c:
return h
elif x.ndim > 1:
return h[:, :, self.n_out * self.order:]
else:
return h[:, self.n_out * self.order:]
@property
def params(self):
return [x for layer in self.internal_layers for x in layer.params] + [self.bias]
@params.setter
def params(self, param_list):
start = 0
for layer in self.internal_layers:
end = start + len(layer.params)
layer.params = param_list[start:end]
start = end
self.bias.set_value(param_list[-1].get_value())
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jul 17 20:19:11 2021
@author: UBTOHTS
"""
import sys
from PyQt5 import QtWidgets
import pandas as pd
import sqlite3
from sqlite3 import OperationalError
import os
from main import MainWindow
class lectable(MainWindow):
def outputshown(self, x):
self.ui.tb_error.clear()
self.ui.lec_error.append(x)
# =============================================================================
# Load the data from the lectable
# =============================================================================
def loadtbdata(self, enable):
if enable:
self.ui.lec_table.setHorizontalHeaderLabels(["LECTURE NAME","MS-SUB NAME", "MS-LEC NAME", "ALTER NAME"])
stylesheet = "::section{color:rgb(0,0,0);}"
self.ui.lec_table.horizontalHeader().setStyleSheet(stylesheet)
stylesheet = "::section{color:rgb(0,0,0);}"
self.ui.lec_table.verticalHeader().setStyleSheet(stylesheet)
df = pd.DataFrame()
rows = self.ui.lec_table.rowCount()
print(rows)
columnss = self.ui.lec_table.columnCount()
print(columnss)
df_list = {}
for col in range(columnss):
df_list2 = []
for row in range(rows):
table_item = self.ui.lec_table.item(row, col)
df_list2.append('' if table_item is None else str(table_item.text()))
headerit = self.ui.lec_table.horizontalHeaderItem(col)
nameit = str(col) if headerit is None else headerit.text()
df_list[nameit] = df_list2
df = pd.DataFrame(data=df_list)
print(df)
try:
conn = sqlite3.connect(self.resource_path('sqldata/lectable.db'))
df.to_sql('lecdata', con=conn)
conn.close()
print('thara bhai work completely')
lectable.outputshown(self, "Completely, save tha data")
except Exception as e:
print("exception rise:- ", e)
except:
print("It doesn't work")
lectable.outputshown(self,"something wrong check again")
# tablemanage.assigntbdata(self)
# =============================================================================
# Delete the data from the lectable
# =============================================================================
def deleteeve(self, enable):
if enable:
self.ui.lec_table.setHorizontalHeaderLabels(["LECTURE NAME","MS-SUB NAME", "MS-LEC NAME", "ALTER NAME"])
stylesheet = "::section{color:rgb(0,0,0);}"
self.ui.lec_table.horizontalHeader().setStyleSheet(stylesheet)
stylesheet = "::section{color:rgb(0,0,0);}"
self.ui.lec_table.verticalHeader().setStyleSheet(stylesheet)
try:
conn = sqlite3.connect(self.resource_path('sqldata/lectable.db'))
c = conn.cursor()
c.execute('DROP TABLE lecdata')
conn.commit()
c.close()
conn.close()
print("completly work succesfully")
lectable.outputshown(self, "Everything delete successul")
except Exception as E:
lectable.outputshown(self, "TABLE is comepletely deleted Make new one")
print(E)
except:
print('sorry something going wrong')
# =============================================================================
# For refresh the data in table
# =============================================================================
def refresh(self, enable):
if enable:
try:
self.ui.lec_table.clear()
self.ui.lec_table.setHorizontalHeaderLabels(["LECTURE NAME","MS-SUB NAME", "MS-LEC NAME", "ALTER NAME"])
stylesheet = "::section{color:rgb(0,0,0);}"
self.ui.lec_table.horizontalHeader().setStyleSheet(stylesheet)
stylesheet = "::section{color:rgb(0,0,0);}"
self.ui.lec_table.verticalHeader().setStyleSheet(stylesheet)
conn = sqlite3.connect(self.resource_path('sqldata/lectable.db'))
c = conn.cursor()
sqlstr = 'select * from lecdata'
tablerow = 0
results = c.execute(sqlstr)
for row in results:
print(row[1])
self.ui.lec_table.setItem(tablerow, 0, QtWidgets.QTableWidgetItem(row[1]))
self.ui.lec_table.setItem(tablerow, 1, QtWidgets.QTableWidgetItem(row[2]))
self.ui.lec_table.setItem(tablerow, 2, QtWidgets.QTableWidgetItem(row[3]))
tablerow += 1
print("i am doing my work completely")
c.close()
print("emit the database which i saved")
conn.close()
print("i am close the connection")
lectable.outputshown(self, "REFRESH COMPLETELY")
except OperationalError:
print('Table is not defined mean a table is not exist make own your table')
except:
print("lode lage bhai")
lectable.outputshown(self, "something going wrong ramu") |
# Написать произвольную анкеты, и вывести полученные данные
name_input = input('Введите имя тут: ')
print(f'Привет, {name_input}!') |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'YiJing.ui'
#
# Created by: PyQt5 UI code generator 5.15.1
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_YiJing(object):
def setupUi(self, YiJing):
YiJing.setObjectName("YiJing")
YiJing.resize(1280, 743)
YiJing.setMinimumSize(QtCore.QSize(12, 0))
YiJing.setMaximumSize(QtCore.QSize(1280, 16777215))
self.centralwidget = QtWidgets.QWidget(YiJing)
self.centralwidget.setMinimumSize(QtCore.QSize(1280, 698))
self.centralwidget.setObjectName("centralwidget")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.centralwidget)
self.horizontalLayout.setObjectName("horizontalLayout")
self.tabWidget = QtWidgets.QTabWidget(self.centralwidget)
self.tabWidget.setObjectName("tabWidget")
self.tabBase = QtWidgets.QWidget()
self.tabBase.setObjectName("tabBase")
self.verticalLayout = QtWidgets.QVBoxLayout(self.tabBase)
self.verticalLayout.setSpacing(0)
self.verticalLayout.setObjectName("verticalLayout")
self.centralWidget = QtWidgets.QWidget(self.tabBase)
self.centralWidget.setObjectName("centralWidget")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.centralWidget)
self.horizontalLayout_2.setSpacing(0)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.widgetInput = QtWidgets.QWidget(self.centralWidget)
self.widgetInput.setObjectName("widgetInput")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.widgetInput)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.widgetTime = QtWidgets.QWidget(self.widgetInput)
self.widgetTime.setMaximumSize(QtCore.QSize(16777215, 50))
self.widgetTime.setObjectName("widgetTime")
self.horizontalLayout_3 = QtWidgets.QHBoxLayout(self.widgetTime)
self.horizontalLayout_3.setSpacing(10)
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.spinYear = QtWidgets.QSpinBox(self.widgetTime)
self.spinYear.setMinimumSize(QtCore.QSize(0, 0))
self.spinYear.setMinimum(1900)
self.spinYear.setMaximum(2100)
self.spinYear.setObjectName("spinYear")
self.horizontalLayout_3.addWidget(self.spinYear)
self.comboMonth = QtWidgets.QComboBox(self.widgetTime)
self.comboMonth.setObjectName("comboMonth")
self.comboMonth.addItem("")
self.comboMonth.addItem("")
self.comboMonth.addItem("")
self.comboMonth.addItem("")
self.comboMonth.addItem("")
self.comboMonth.addItem("")
self.comboMonth.addItem("")
self.comboMonth.addItem("")
self.comboMonth.addItem("")
self.comboMonth.addItem("")
self.comboMonth.addItem("")
self.comboMonth.addItem("")
self.horizontalLayout_3.addWidget(self.comboMonth)
self.comboDay = QtWidgets.QComboBox(self.widgetTime)
self.comboDay.setObjectName("comboDay")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.comboDay.addItem("")
self.horizontalLayout_3.addWidget(self.comboDay)
self.comboTime = QtWidgets.QComboBox(self.widgetTime)
self.comboTime.setObjectName("comboTime")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.comboTime.addItem("")
self.horizontalLayout_3.addWidget(self.comboTime)
self.verticalLayout_3.addWidget(self.widgetTime, 0, QtCore.Qt.AlignLeft)
self.widgetStandardTime = QtWidgets.QWidget(self.widgetInput)
self.widgetStandardTime.setObjectName("widgetStandardTime")
self.horizontalLayout_10 = QtWidgets.QHBoxLayout(self.widgetStandardTime)
self.horizontalLayout_10.setObjectName("horizontalLayout_10")
self.radBtnBeiJingTime = QtWidgets.QRadioButton(self.widgetStandardTime)
self.radBtnBeiJingTime.setChecked(True)
self.radBtnBeiJingTime.setObjectName("radBtnBeiJingTime")
self.horizontalLayout_10.addWidget(self.radBtnBeiJingTime)
self.radBtnZoneTime = QtWidgets.QRadioButton(self.widgetStandardTime)
self.radBtnZoneTime.setObjectName("radBtnZoneTime")
self.horizontalLayout_10.addWidget(self.radBtnZoneTime)
self.verticalLayout_3.addWidget(self.widgetStandardTime, 0, QtCore.Qt.AlignLeft)
self.widgetName = QtWidgets.QWidget(self.widgetInput)
self.widgetName.setMaximumSize(QtCore.QSize(16777215, 50))
self.widgetName.setObjectName("widgetName")
self.horizontalLayout_4 = QtWidgets.QHBoxLayout(self.widgetName)
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.labelName = QtWidgets.QLabel(self.widgetName)
self.labelName.setObjectName("labelName")
self.horizontalLayout_4.addWidget(self.labelName)
self.editName = QtWidgets.QTextEdit(self.widgetName)
self.editName.setMaximumSize(QtCore.QSize(70, 25))
self.editName.setObjectName("editName")
self.horizontalLayout_4.addWidget(self.editName)
self.labelSex = QtWidgets.QLabel(self.widgetName)
self.labelSex.setObjectName("labelSex")
self.horizontalLayout_4.addWidget(self.labelSex)
self.comboSex = QtWidgets.QComboBox(self.widgetName)
self.comboSex.setObjectName("comboSex")
self.comboSex.addItem("")
self.comboSex.addItem("")
self.horizontalLayout_4.addWidget(self.comboSex)
self.widgetIsLunar = QtWidgets.QWidget(self.widgetName)
self.widgetIsLunar.setMaximumSize(QtCore.QSize(16777215, 50))
self.widgetIsLunar.setObjectName("widgetIsLunar")
self.horizontalLayout_5 = QtWidgets.QHBoxLayout(self.widgetIsLunar)
self.horizontalLayout_5.setObjectName("horizontalLayout_5")
self.radBtnSolar = QtWidgets.QRadioButton(self.widgetIsLunar)
self.radBtnSolar.setChecked(True)
self.radBtnSolar.setObjectName("radBtnSolar")
self.horizontalLayout_5.addWidget(self.radBtnSolar)
self.radBtnLunar = QtWidgets.QRadioButton(self.widgetIsLunar)
self.radBtnLunar.setObjectName("radBtnLunar")
self.horizontalLayout_5.addWidget(self.radBtnLunar)
self.horizontalLayout_4.addWidget(self.widgetIsLunar)
self.verticalLayout_3.addWidget(self.widgetName, 0, QtCore.Qt.AlignLeft)
self.widgetProvince = QtWidgets.QWidget(self.widgetInput)
self.widgetProvince.setObjectName("widgetProvince")
self.horizontalLayout_7 = QtWidgets.QHBoxLayout(self.widgetProvince)
self.horizontalLayout_7.setObjectName("horizontalLayout_7")
self.label_5 = QtWidgets.QLabel(self.widgetProvince)
self.label_5.setObjectName("label_5")
self.horizontalLayout_7.addWidget(self.label_5)
self.comboProvince = QtWidgets.QComboBox(self.widgetProvince)
self.comboProvince.setObjectName("comboProvince")
self.horizontalLayout_7.addWidget(self.comboProvince)
self.label_6 = QtWidgets.QLabel(self.widgetProvince)
self.label_6.setObjectName("label_6")
self.horizontalLayout_7.addWidget(self.label_6)
self.comboCity = QtWidgets.QComboBox(self.widgetProvince)
self.comboCity.setObjectName("comboCity")
self.horizontalLayout_7.addWidget(self.comboCity)
self.widgetCounty = QtWidgets.QWidget(self.widgetProvince)
self.widgetCounty.setObjectName("widgetCounty")
self.horizontalLayout_9 = QtWidgets.QHBoxLayout(self.widgetCounty)
self.horizontalLayout_9.setObjectName("horizontalLayout_9")
self.label_7 = QtWidgets.QLabel(self.widgetCounty)
self.label_7.setObjectName("label_7")
self.horizontalLayout_9.addWidget(self.label_7)
self.comboCounty = QtWidgets.QComboBox(self.widgetCounty)
self.comboCounty.setObjectName("comboCounty")
self.horizontalLayout_9.addWidget(self.comboCounty)
self.horizontalLayout_7.addWidget(self.widgetCounty)
self.verticalLayout_3.addWidget(self.widgetProvince, 0, QtCore.Qt.AlignLeft)
self.widgetWeights = QtWidgets.QWidget(self.widgetInput)
self.widgetWeights.setMaximumSize(QtCore.QSize(16777215, 16777208))
self.widgetWeights.setObjectName("widgetWeights")
self.gridLayout_2 = QtWidgets.QGridLayout(self.widgetWeights)
self.gridLayout_2.setObjectName("gridLayout_2")
self.label_10 = QtWidgets.QLabel(self.widgetWeights)
self.label_10.setObjectName("label_10")
self.gridLayout_2.addWidget(self.label_10, 0, 4, 1, 1)
self.label_14 = QtWidgets.QLabel(self.widgetWeights)
self.label_14.setObjectName("label_14")
self.gridLayout_2.addWidget(self.label_14, 1, 4, 1, 1)
self.label_13 = QtWidgets.QLabel(self.widgetWeights)
self.label_13.setObjectName("label_13")
self.gridLayout_2.addWidget(self.label_13, 1, 2, 1, 1)
self.editHourWeight = QtWidgets.QTextEdit(self.widgetWeights)
self.editHourWeight.setMaximumSize(QtCore.QSize(50, 25))
self.editHourWeight.setObjectName("editHourWeight")
self.gridLayout_2.addWidget(self.editHourWeight, 0, 7, 1, 1)
self.label_11 = QtWidgets.QLabel(self.widgetWeights)
self.label_11.setObjectName("label_11")
self.gridLayout_2.addWidget(self.label_11, 0, 6, 1, 1)
self.label_9 = QtWidgets.QLabel(self.widgetWeights)
self.label_9.setObjectName("label_9")
self.gridLayout_2.addWidget(self.label_9, 0, 2, 1, 1)
self.comboElems = QtWidgets.QComboBox(self.widgetWeights)
self.comboElems.setMaximumSize(QtCore.QSize(50, 16777215))
self.comboElems.setObjectName("comboElems")
self.comboElems.addItem("")
self.comboElems.addItem("")
self.gridLayout_2.addWidget(self.comboElems, 1, 7, 1, 1)
self.editLiuNianWeight = QtWidgets.QTextEdit(self.widgetWeights)
self.editLiuNianWeight.setMaximumSize(QtCore.QSize(50, 25))
self.editLiuNianWeight.setObjectName("editLiuNianWeight")
self.gridLayout_2.addWidget(self.editLiuNianWeight, 1, 3, 1, 1)
self.label_8 = QtWidgets.QLabel(self.widgetWeights)
self.label_8.setObjectName("label_8")
self.gridLayout_2.addWidget(self.label_8, 0, 0, 1, 1)
self.editDayWeight = QtWidgets.QTextEdit(self.widgetWeights)
self.editDayWeight.setMaximumSize(QtCore.QSize(50, 25))
self.editDayWeight.setObjectName("editDayWeight")
self.gridLayout_2.addWidget(self.editDayWeight, 0, 5, 1, 1)
self.label_12 = QtWidgets.QLabel(self.widgetWeights)
self.label_12.setObjectName("label_12")
self.gridLayout_2.addWidget(self.label_12, 1, 0, 1, 1)
self.label = QtWidgets.QLabel(self.widgetWeights)
self.label.setObjectName("label")
self.gridLayout_2.addWidget(self.label, 1, 6, 1, 1)
self.editMonthWeight = QtWidgets.QTextEdit(self.widgetWeights)
self.editMonthWeight.setMaximumSize(QtCore.QSize(50, 25))
self.editMonthWeight.setObjectName("editMonthWeight")
self.gridLayout_2.addWidget(self.editMonthWeight, 0, 3, 1, 1)
self.editLiuYueWeight = QtWidgets.QTextEdit(self.widgetWeights)
self.editLiuYueWeight.setMaximumSize(QtCore.QSize(50, 25))
self.editLiuYueWeight.setObjectName("editLiuYueWeight")
self.gridLayout_2.addWidget(self.editLiuYueWeight, 1, 5, 1, 1)
self.editDaYunWeight = QtWidgets.QTextEdit(self.widgetWeights)
self.editDaYunWeight.setMaximumSize(QtCore.QSize(50, 25))
self.editDaYunWeight.setObjectName("editDaYunWeight")
self.gridLayout_2.addWidget(self.editDaYunWeight, 1, 1, 1, 1)
self.editYearWight = QtWidgets.QTextEdit(self.widgetWeights)
self.editYearWight.setMaximumSize(QtCore.QSize(50, 25))
self.editYearWight.setObjectName("editYearWight")
self.gridLayout_2.addWidget(self.editYearWight, 0, 1, 1, 1)
self.verticalLayout_3.addWidget(self.widgetWeights)
self.widgetWeightsInfo = QtWidgets.QWidget(self.widgetInput)
self.widgetWeightsInfo.setObjectName("widgetWeightsInfo")
self.gridLayout_3 = QtWidgets.QGridLayout(self.widgetWeightsInfo)
self.gridLayout_3.setObjectName("gridLayout_3")
self.lbLiuNianWeight = QtWidgets.QLabel(self.widgetWeightsInfo)
self.lbLiuNianWeight.setObjectName("lbLiuNianWeight")
self.gridLayout_3.addWidget(self.lbLiuNianWeight, 1, 5, 1, 1)
self.lbYearWeight = QtWidgets.QLabel(self.widgetWeightsInfo)
self.lbYearWeight.setObjectName("lbYearWeight")
self.gridLayout_3.addWidget(self.lbYearWeight, 0, 3, 1, 1)
self.label_24 = QtWidgets.QLabel(self.widgetWeightsInfo)
self.label_24.setObjectName("label_24")
self.gridLayout_3.addWidget(self.label_24, 1, 2, 1, 1)
self.lbMonthWeight = QtWidgets.QLabel(self.widgetWeightsInfo)
self.lbMonthWeight.setObjectName("lbMonthWeight")
self.gridLayout_3.addWidget(self.lbMonthWeight, 0, 5, 1, 1)
self.lbDayWeight = QtWidgets.QLabel(self.widgetWeightsInfo)
self.lbDayWeight.setObjectName("lbDayWeight")
self.gridLayout_3.addWidget(self.lbDayWeight, 0, 7, 1, 1)
self.label_22 = QtWidgets.QLabel(self.widgetWeightsInfo)
self.label_22.setObjectName("label_22")
self.gridLayout_3.addWidget(self.label_22, 0, 8, 1, 1)
self.label_17 = QtWidgets.QLabel(self.widgetWeightsInfo)
self.label_17.setObjectName("label_17")
self.gridLayout_3.addWidget(self.label_17, 0, 4, 1, 1)
self.label_15 = QtWidgets.QLabel(self.widgetWeightsInfo)
self.label_15.setObjectName("label_15")
self.gridLayout_3.addWidget(self.label_15, 0, 2, 1, 1)
self.lbDaYunWeight = QtWidgets.QLabel(self.widgetWeightsInfo)
self.lbDaYunWeight.setObjectName("lbDaYunWeight")
self.gridLayout_3.addWidget(self.lbDaYunWeight, 1, 3, 1, 1)
self.lbHourWeight = QtWidgets.QLabel(self.widgetWeightsInfo)
self.lbHourWeight.setObjectName("lbHourWeight")
self.gridLayout_3.addWidget(self.lbHourWeight, 0, 9, 1, 1)
self.label_20 = QtWidgets.QLabel(self.widgetWeightsInfo)
self.label_20.setObjectName("label_20")
self.gridLayout_3.addWidget(self.label_20, 0, 6, 1, 1)
self.label_26 = QtWidgets.QLabel(self.widgetWeightsInfo)
self.label_26.setObjectName("label_26")
self.gridLayout_3.addWidget(self.label_26, 1, 4, 1, 1)
self.label_28 = QtWidgets.QLabel(self.widgetWeightsInfo)
self.label_28.setObjectName("label_28")
self.gridLayout_3.addWidget(self.label_28, 1, 6, 1, 1)
self.label_18 = QtWidgets.QLabel(self.widgetWeightsInfo)
self.label_18.setObjectName("label_18")
self.gridLayout_3.addWidget(self.label_18, 0, 0, 1, 1)
self.lbLiuYueWeight = QtWidgets.QLabel(self.widgetWeightsInfo)
self.lbLiuYueWeight.setObjectName("lbLiuYueWeight")
self.gridLayout_3.addWidget(self.lbLiuYueWeight, 1, 7, 1, 1)
self.verticalLayout_3.addWidget(self.widgetWeightsInfo)
self.widgetBtns = QtWidgets.QWidget(self.widgetInput)
self.widgetBtns.setObjectName("widgetBtns")
self.gridLayout = QtWidgets.QGridLayout(self.widgetBtns)
self.gridLayout.setObjectName("gridLayout")
self.btnReset = QtWidgets.QPushButton(self.widgetBtns)
self.btnReset.setMinimumSize(QtCore.QSize(90, 40))
self.btnReset.setMaximumSize(QtCore.QSize(90, 40))
self.btnReset.setObjectName("btnReset")
self.gridLayout.addWidget(self.btnReset, 2, 2, 1, 1)
self.btnChangeWeight = QtWidgets.QPushButton(self.widgetBtns)
self.btnChangeWeight.setMinimumSize(QtCore.QSize(90, 40))
self.btnChangeWeight.setMaximumSize(QtCore.QSize(90, 40))
self.btnChangeWeight.setObjectName("btnChangeWeight")
self.gridLayout.addWidget(self.btnChangeWeight, 2, 3, 1, 1)
self.btnCalculate = QtWidgets.QPushButton(self.widgetBtns)
self.btnCalculate.setMinimumSize(QtCore.QSize(90, 40))
self.btnCalculate.setMaximumSize(QtCore.QSize(90, 40))
self.btnCalculate.setObjectName("btnCalculate")
self.gridLayout.addWidget(self.btnCalculate, 2, 0, 1, 1)
self.verticalLayout_3.addWidget(self.widgetBtns)
self.widgetBaZiInfo = QtWidgets.QWidget(self.widgetInput)
self.widgetBaZiInfo.setMaximumSize(QtCore.QSize(200, 16777215))
self.widgetBaZiInfo.setObjectName("widgetBaZiInfo")
self.verticalLayout_5 = QtWidgets.QVBoxLayout(self.widgetBaZiInfo)
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.label_16 = QtWidgets.QLabel(self.widgetBaZiInfo)
self.label_16.setMinimumSize(QtCore.QSize(182, 20))
self.label_16.setMaximumSize(QtCore.QSize(182, 20))
self.label_16.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.label_16.setObjectName("label_16")
self.verticalLayout_5.addWidget(self.label_16)
self.labelBaZi = QtWidgets.QLabel(self.widgetBaZiInfo)
self.labelBaZi.setMinimumSize(QtCore.QSize(182, 20))
self.labelBaZi.setMaximumSize(QtCore.QSize(182, 20))
self.labelBaZi.setText("")
self.labelBaZi.setObjectName("labelBaZi")
self.verticalLayout_5.addWidget(self.labelBaZi)
self.label_21 = QtWidgets.QLabel(self.widgetBaZiInfo)
self.label_21.setMinimumSize(QtCore.QSize(182, 20))
self.label_21.setMaximumSize(QtCore.QSize(182, 20))
self.label_21.setObjectName("label_21")
self.verticalLayout_5.addWidget(self.label_21)
self.labelDaYun = QtWidgets.QLabel(self.widgetBaZiInfo)
self.labelDaYun.setMinimumSize(QtCore.QSize(182, 20))
self.labelDaYun.setMaximumSize(QtCore.QSize(182, 20))
self.labelDaYun.setText("")
self.labelDaYun.setObjectName("labelDaYun")
self.verticalLayout_5.addWidget(self.labelDaYun)
self.verticalLayout_3.addWidget(self.widgetBaZiInfo)
self.btnSave = QtWidgets.QPushButton(self.widgetInput)
self.btnSave.setMinimumSize(QtCore.QSize(90, 40))
self.btnSave.setMaximumSize(QtCore.QSize(90, 40))
self.btnSave.setObjectName("btnSave")
self.verticalLayout_3.addWidget(self.btnSave)
self.horizontalLayout_2.addWidget(self.widgetInput, 0, QtCore.Qt.AlignTop)
self.widgetCharts = QtWidgets.QWidget(self.centralWidget)
self.widgetCharts.setObjectName("widgetCharts")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.widgetCharts)
self.verticalLayout_2.setSpacing(0)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.scrollArea = QtWidgets.QScrollArea(self.widgetCharts)
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setObjectName("scrollArea")
self.scrollAreaWidgetContents = QtWidgets.QWidget()
self.scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 784, 1700))
self.scrollAreaWidgetContents.setMinimumSize(QtCore.QSize(750, 1700))
self.scrollAreaWidgetContents.setObjectName("scrollAreaWidgetContents")
self.verticalLayout_4 = QtWidgets.QVBoxLayout(self.scrollAreaWidgetContents)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.widgetRPChart = QtWidgets.QWidget(self.scrollAreaWidgetContents)
self.widgetRPChart.setMinimumSize(QtCore.QSize(750, 700))
self.widgetRPChart.setObjectName("widgetRPChart")
self.horizontalLayout_8 = QtWidgets.QHBoxLayout(self.widgetRPChart)
self.horizontalLayout_8.setContentsMargins(0, 0, 0, -1)
self.horizontalLayout_8.setObjectName("horizontalLayout_8")
self.widget_charts = QtWidgets.QWidget(self.widgetRPChart)
self.widget_charts.setMinimumSize(QtCore.QSize(700, 700))
self.widget_charts.setMaximumSize(QtCore.QSize(700, 700))
self.widget_charts.setObjectName("widget_charts")
self.verticalLayout_7 = QtWidgets.QVBoxLayout(self.widget_charts)
self.verticalLayout_7.setObjectName("verticalLayout_7")
self.boxRadar = QtWidgets.QVBoxLayout()
self.boxRadar.setObjectName("boxRadar")
self.verticalLayout_7.addLayout(self.boxRadar)
self.boxPie = QtWidgets.QVBoxLayout()
self.boxPie.setObjectName("boxPie")
self.verticalLayout_7.addLayout(self.boxPie)
self.horizontalLayout_8.addWidget(self.widget_charts)
self.verticalLayout_4.addWidget(self.widgetRPChart, 0, QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.tblOverview = QtWidgets.QTableWidget(self.scrollAreaWidgetContents)
self.tblOverview.setMinimumSize(QtCore.QSize(0, 530))
self.tblOverview.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.tblOverview.setObjectName("tblOverview")
self.tblOverview.setColumnCount(21)
self.tblOverview.setRowCount(17)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setVerticalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setVerticalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setVerticalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setVerticalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setVerticalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setVerticalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setVerticalHeaderItem(7, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setVerticalHeaderItem(8, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setVerticalHeaderItem(9, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setVerticalHeaderItem(10, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setVerticalHeaderItem(11, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setVerticalHeaderItem(12, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setVerticalHeaderItem(13, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setVerticalHeaderItem(14, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setVerticalHeaderItem(15, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setVerticalHeaderItem(16, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setHorizontalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setHorizontalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setHorizontalHeaderItem(7, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setHorizontalHeaderItem(8, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setHorizontalHeaderItem(9, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setHorizontalHeaderItem(10, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setHorizontalHeaderItem(11, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setHorizontalHeaderItem(12, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setHorizontalHeaderItem(13, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setHorizontalHeaderItem(14, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setHorizontalHeaderItem(15, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setHorizontalHeaderItem(16, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setHorizontalHeaderItem(17, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setHorizontalHeaderItem(18, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setHorizontalHeaderItem(19, item)
item = QtWidgets.QTableWidgetItem()
self.tblOverview.setHorizontalHeaderItem(20, item)
self.tblOverview.horizontalHeader().setVisible(False)
self.tblOverview.verticalHeader().setVisible(False)
self.verticalLayout_4.addWidget(self.tblOverview)
self.lbDetailYear = QtWidgets.QLabel(self.scrollAreaWidgetContents)
font = QtGui.QFont()
font.setPointSize(10)
self.lbDetailYear.setFont(font)
self.lbDetailYear.setText("")
self.lbDetailYear.setObjectName("lbDetailYear")
self.verticalLayout_4.addWidget(self.lbDetailYear, 0, QtCore.Qt.AlignHCenter|QtCore.Qt.AlignVCenter)
self.tblDetail = QtWidgets.QTableWidget(self.scrollAreaWidgetContents)
self.tblDetail.setObjectName("tblDetail")
self.tblDetail.setColumnCount(9)
self.tblDetail.setRowCount(13)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setVerticalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setVerticalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setVerticalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setVerticalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setVerticalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setVerticalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setVerticalHeaderItem(7, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setVerticalHeaderItem(8, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setVerticalHeaderItem(9, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setVerticalHeaderItem(10, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setVerticalHeaderItem(11, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setVerticalHeaderItem(12, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setHorizontalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setHorizontalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setHorizontalHeaderItem(7, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setHorizontalHeaderItem(8, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tblDetail.setItem(0, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tblDetail.setItem(0, 2, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tblDetail.setItem(0, 3, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tblDetail.setItem(0, 4, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tblDetail.setItem(0, 5, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tblDetail.setItem(0, 6, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tblDetail.setItem(0, 7, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
self.tblDetail.setItem(0, 8, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsDropEnabled|QtCore.Qt.ItemIsUserCheckable|QtCore.Qt.ItemIsEnabled)
self.tblDetail.setItem(1, 0, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setItem(1, 1, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setItem(1, 2, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setItem(1, 3, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setItem(1, 4, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setItem(1, 5, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setItem(1, 6, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setItem(1, 7, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsDropEnabled|QtCore.Qt.ItemIsUserCheckable|QtCore.Qt.ItemIsEnabled)
self.tblDetail.setItem(2, 0, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setItem(2, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsDropEnabled|QtCore.Qt.ItemIsUserCheckable|QtCore.Qt.ItemIsEnabled)
self.tblDetail.setItem(3, 0, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setItem(3, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsDropEnabled|QtCore.Qt.ItemIsUserCheckable|QtCore.Qt.ItemIsEnabled)
self.tblDetail.setItem(4, 0, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setItem(4, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsDropEnabled|QtCore.Qt.ItemIsUserCheckable|QtCore.Qt.ItemIsEnabled)
self.tblDetail.setItem(5, 0, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setItem(5, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsDropEnabled|QtCore.Qt.ItemIsUserCheckable|QtCore.Qt.ItemIsEnabled)
self.tblDetail.setItem(6, 0, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setItem(6, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsDropEnabled|QtCore.Qt.ItemIsUserCheckable|QtCore.Qt.ItemIsEnabled)
self.tblDetail.setItem(7, 0, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setItem(7, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsDropEnabled|QtCore.Qt.ItemIsUserCheckable|QtCore.Qt.ItemIsEnabled)
self.tblDetail.setItem(8, 0, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setItem(8, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsDropEnabled|QtCore.Qt.ItemIsUserCheckable|QtCore.Qt.ItemIsEnabled)
self.tblDetail.setItem(9, 0, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setItem(9, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsDropEnabled|QtCore.Qt.ItemIsUserCheckable|QtCore.Qt.ItemIsEnabled)
self.tblDetail.setItem(10, 0, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setItem(10, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsDropEnabled|QtCore.Qt.ItemIsUserCheckable|QtCore.Qt.ItemIsEnabled)
self.tblDetail.setItem(11, 0, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setItem(11, 1, item)
item = QtWidgets.QTableWidgetItem()
item.setTextAlignment(QtCore.Qt.AlignCenter)
item.setFlags(QtCore.Qt.ItemIsSelectable|QtCore.Qt.ItemIsDragEnabled|QtCore.Qt.ItemIsDropEnabled|QtCore.Qt.ItemIsUserCheckable|QtCore.Qt.ItemIsEnabled)
self.tblDetail.setItem(12, 0, item)
item = QtWidgets.QTableWidgetItem()
self.tblDetail.setItem(12, 1, item)
self.tblDetail.horizontalHeader().setVisible(False)
self.tblDetail.verticalHeader().setVisible(False)
self.verticalLayout_4.addWidget(self.tblDetail)
self.scrollArea.setWidget(self.scrollAreaWidgetContents)
self.verticalLayout_2.addWidget(self.scrollArea)
self.widgetLines = QtWidgets.QWidget(self.widgetCharts)
self.widgetLines.setMinimumSize(QtCore.QSize(810, 250))
self.widgetLines.setMaximumSize(QtCore.QSize(810, 250))
self.widgetLines.setObjectName("widgetLines")
self.verticalLayout_6 = QtWidgets.QVBoxLayout(self.widgetLines)
self.verticalLayout_6.setContentsMargins(0, -1, -1, -1)
self.verticalLayout_6.setObjectName("verticalLayout_6")
self.boxLines = QtWidgets.QVBoxLayout()
self.boxLines.setObjectName("boxLines")
self.verticalLayout_6.addLayout(self.boxLines)
self.verticalLayout_2.addWidget(self.widgetLines)
self.horizontalLayout_2.addWidget(self.widgetCharts)
self.horizontalLayout_2.setStretch(0, 1)
self.verticalLayout.addWidget(self.centralWidget)
self.verticalLayout.setStretch(0, 1)
self.tabWidget.addTab(self.tabBase, "")
self.horizontalLayout.addWidget(self.tabWidget)
YiJing.setCentralWidget(self.centralwidget)
self.statusbar = QtWidgets.QStatusBar(YiJing)
self.statusbar.setObjectName("statusbar")
YiJing.setStatusBar(self.statusbar)
self.menuBar = QtWidgets.QMenuBar(YiJing)
self.menuBar.setGeometry(QtCore.QRect(0, 0, 1280, 23))
self.menuBar.setObjectName("menuBar")
self.menu = QtWidgets.QMenu(self.menuBar)
self.menu.setObjectName("menu")
YiJing.setMenuBar(self.menuBar)
self.menuBar.addAction(self.menu.menuAction())
self.retranslateUi(YiJing)
self.tabWidget.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(YiJing)
def retranslateUi(self, YiJing):
_translate = QtCore.QCoreApplication.translate
YiJing.setWindowTitle(_translate("YiJing", "易经编程"))
self.comboMonth.setItemText(0, _translate("YiJing", "1月"))
self.comboMonth.setItemText(1, _translate("YiJing", "2月"))
self.comboMonth.setItemText(2, _translate("YiJing", "3月"))
self.comboMonth.setItemText(3, _translate("YiJing", "4月"))
self.comboMonth.setItemText(4, _translate("YiJing", "5月"))
self.comboMonth.setItemText(5, _translate("YiJing", "6月"))
self.comboMonth.setItemText(6, _translate("YiJing", "7月"))
self.comboMonth.setItemText(7, _translate("YiJing", "8月"))
self.comboMonth.setItemText(8, _translate("YiJing", "9月"))
self.comboMonth.setItemText(9, _translate("YiJing", "10月"))
self.comboMonth.setItemText(10, _translate("YiJing", "11月"))
self.comboMonth.setItemText(11, _translate("YiJing", "12月"))
self.comboDay.setItemText(0, _translate("YiJing", "1日"))
self.comboDay.setItemText(1, _translate("YiJing", "2日"))
self.comboDay.setItemText(2, _translate("YiJing", "3日"))
self.comboDay.setItemText(3, _translate("YiJing", "4日"))
self.comboDay.setItemText(4, _translate("YiJing", "5日"))
self.comboDay.setItemText(5, _translate("YiJing", "6日"))
self.comboDay.setItemText(6, _translate("YiJing", "7日"))
self.comboDay.setItemText(7, _translate("YiJing", "8日"))
self.comboDay.setItemText(8, _translate("YiJing", "9日"))
self.comboDay.setItemText(9, _translate("YiJing", "10日"))
self.comboDay.setItemText(10, _translate("YiJing", "11日"))
self.comboDay.setItemText(11, _translate("YiJing", "12日"))
self.comboDay.setItemText(12, _translate("YiJing", "13日"))
self.comboDay.setItemText(13, _translate("YiJing", "14日"))
self.comboDay.setItemText(14, _translate("YiJing", "15日"))
self.comboDay.setItemText(15, _translate("YiJing", "16日"))
self.comboDay.setItemText(16, _translate("YiJing", "17日"))
self.comboDay.setItemText(17, _translate("YiJing", "18日"))
self.comboDay.setItemText(18, _translate("YiJing", "19日"))
self.comboDay.setItemText(19, _translate("YiJing", "20日"))
self.comboDay.setItemText(20, _translate("YiJing", "21日"))
self.comboDay.setItemText(21, _translate("YiJing", "22日"))
self.comboDay.setItemText(22, _translate("YiJing", "23日"))
self.comboDay.setItemText(23, _translate("YiJing", "24日"))
self.comboDay.setItemText(24, _translate("YiJing", "25日"))
self.comboDay.setItemText(25, _translate("YiJing", "26日"))
self.comboDay.setItemText(26, _translate("YiJing", "27日"))
self.comboDay.setItemText(27, _translate("YiJing", "28日"))
self.comboDay.setItemText(28, _translate("YiJing", "29日"))
self.comboDay.setItemText(29, _translate("YiJing", "30日"))
self.comboDay.setItemText(30, _translate("YiJing", "31日"))
self.comboTime.setItemText(0, _translate("YiJing", "00:00~00:59"))
self.comboTime.setItemText(1, _translate("YiJing", "01:00~01:59"))
self.comboTime.setItemText(2, _translate("YiJing", "02:00~02:59"))
self.comboTime.setItemText(3, _translate("YiJing", "03:00~03:59"))
self.comboTime.setItemText(4, _translate("YiJing", "04:00~04:59"))
self.comboTime.setItemText(5, _translate("YiJing", "05:00~05:59"))
self.comboTime.setItemText(6, _translate("YiJing", "06:00~06:59"))
self.comboTime.setItemText(7, _translate("YiJing", "07:00~07:59"))
self.comboTime.setItemText(8, _translate("YiJing", "08:00~08:59"))
self.comboTime.setItemText(9, _translate("YiJing", "09:00~09:59"))
self.comboTime.setItemText(10, _translate("YiJing", "10:00~10:59"))
self.comboTime.setItemText(11, _translate("YiJing", "11:00~12:59"))
self.comboTime.setItemText(12, _translate("YiJing", "12:00~12:59"))
self.comboTime.setItemText(13, _translate("YiJing", "13:00~13:59"))
self.comboTime.setItemText(14, _translate("YiJing", "14:00~14:59"))
self.comboTime.setItemText(15, _translate("YiJing", "15:00~15:59"))
self.comboTime.setItemText(16, _translate("YiJing", "16:00~16:59"))
self.comboTime.setItemText(17, _translate("YiJing", "17:00~17:59"))
self.comboTime.setItemText(18, _translate("YiJing", "18:00~18:59"))
self.comboTime.setItemText(19, _translate("YiJing", "19:00~19:59"))
self.comboTime.setItemText(20, _translate("YiJing", "20:00~20:59"))
self.comboTime.setItemText(21, _translate("YiJing", "21:00~21:59"))
self.comboTime.setItemText(22, _translate("YiJing", "22:00~22:59"))
self.comboTime.setItemText(23, _translate("YiJing", "23:00~23:59"))
self.radBtnBeiJingTime.setText(_translate("YiJing", "北京时间"))
self.radBtnZoneTime.setText(_translate("YiJing", "跟据地理位置修改的时间"))
self.labelName.setText(_translate("YiJing", "姓名"))
self.labelSex.setText(_translate("YiJing", "性别"))
self.comboSex.setItemText(0, _translate("YiJing", "女"))
self.comboSex.setItemText(1, _translate("YiJing", "男"))
self.radBtnSolar.setText(_translate("YiJing", "公历"))
self.radBtnLunar.setText(_translate("YiJing", "农历"))
self.label_5.setText(_translate("YiJing", "省份/直辖市"))
self.label_6.setText(_translate("YiJing", "市"))
self.label_7.setText(_translate("YiJing", "县/区"))
self.label_10.setText(_translate("YiJing", "日柱"))
self.label_14.setText(_translate("YiJing", "流月"))
self.label_13.setText(_translate("YiJing", "流年"))
self.editHourWeight.setHtml(_translate("YiJing", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'SimSun\'; font-size:9pt; font-weight:400; font-style:normal;\">\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>"))
self.label_11.setText(_translate("YiJing", "时柱"))
self.label_9.setText(_translate("YiJing", "月柱"))
self.comboElems.setItemText(0, _translate("YiJing", "4"))
self.comboElems.setItemText(1, _translate("YiJing", "5"))
self.editLiuNianWeight.setHtml(_translate("YiJing", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'SimSun\'; font-size:9pt; font-weight:400; font-style:normal;\">\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>"))
self.label_8.setText(_translate("YiJing", "年柱"))
self.editDayWeight.setHtml(_translate("YiJing", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'SimSun\'; font-size:9pt; font-weight:400; font-style:normal;\">\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>"))
self.label_12.setText(_translate("YiJing", "大运"))
self.label.setText(_translate("YiJing", "权重维数"))
self.editMonthWeight.setHtml(_translate("YiJing", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'SimSun\'; font-size:9pt; font-weight:400; font-style:normal;\">\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>"))
self.editLiuYueWeight.setHtml(_translate("YiJing", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'SimSun\'; font-size:9pt; font-weight:400; font-style:normal;\">\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>"))
self.editDaYunWeight.setHtml(_translate("YiJing", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'SimSun\'; font-size:9pt; font-weight:400; font-style:normal;\">\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>"))
self.editYearWight.setHtml(_translate("YiJing", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'SimSun\'; font-size:9pt; font-weight:400; font-style:normal;\">\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>"))
self.lbLiuNianWeight.setText(_translate("YiJing", "2"))
self.lbYearWeight.setText(_translate("YiJing", "1"))
self.label_24.setText(_translate("YiJing", "大运:"))
self.lbMonthWeight.setText(_translate("YiJing", "4"))
self.lbDayWeight.setText(_translate("YiJing", "1"))
self.label_22.setText(_translate("YiJing", "时柱:"))
self.label_17.setText(_translate("YiJing", "月柱:"))
self.label_15.setText(_translate("YiJing", "年柱:"))
self.lbDaYunWeight.setText(_translate("YiJing", "2"))
self.lbHourWeight.setText(_translate("YiJing", "2"))
self.label_20.setText(_translate("YiJing", "日柱:"))
self.label_26.setText(_translate("YiJing", "流年:"))
self.label_28.setText(_translate("YiJing", "流月:"))
self.label_18.setText(_translate("YiJing", "当前权重为:"))
self.lbLiuYueWeight.setText(_translate("YiJing", "1"))
self.btnReset.setText(_translate("YiJing", "重置"))
self.btnChangeWeight.setText(_translate("YiJing", "修改权重"))
self.btnCalculate.setText(_translate("YiJing", "排八字"))
self.label_16.setText(_translate("YiJing", "生辰八字:"))
self.label_21.setText(_translate("YiJing", "大运、流年、流月:"))
self.btnSave.setText(_translate("YiJing", "保存记录"))
item = self.tblOverview.verticalHeaderItem(0)
item.setText(_translate("YiJing", "新建行"))
item = self.tblOverview.verticalHeaderItem(1)
item.setText(_translate("YiJing", "新建行"))
item = self.tblOverview.verticalHeaderItem(2)
item.setText(_translate("YiJing", "新建行"))
item = self.tblOverview.verticalHeaderItem(3)
item.setText(_translate("YiJing", "新建行"))
item = self.tblOverview.verticalHeaderItem(4)
item.setText(_translate("YiJing", "新建行"))
item = self.tblOverview.verticalHeaderItem(5)
item.setText(_translate("YiJing", "新建行"))
item = self.tblOverview.verticalHeaderItem(6)
item.setText(_translate("YiJing", "新建行"))
item = self.tblOverview.verticalHeaderItem(7)
item.setText(_translate("YiJing", "新建行"))
item = self.tblOverview.verticalHeaderItem(8)
item.setText(_translate("YiJing", "新建行"))
item = self.tblOverview.verticalHeaderItem(9)
item.setText(_translate("YiJing", "新建行"))
item = self.tblOverview.verticalHeaderItem(10)
item.setText(_translate("YiJing", "新建行"))
item = self.tblOverview.verticalHeaderItem(11)
item.setText(_translate("YiJing", "新建行"))
item = self.tblOverview.verticalHeaderItem(12)
item.setText(_translate("YiJing", "新建行"))
item = self.tblOverview.verticalHeaderItem(13)
item.setText(_translate("YiJing", "新建行"))
item = self.tblOverview.verticalHeaderItem(14)
item.setText(_translate("YiJing", "新建行"))
item = self.tblOverview.verticalHeaderItem(15)
item.setText(_translate("YiJing", "新建行"))
item = self.tblOverview.verticalHeaderItem(16)
item.setText(_translate("YiJing", "新建行"))
item = self.tblOverview.horizontalHeaderItem(0)
item.setText(_translate("YiJing", "新建列"))
item = self.tblOverview.horizontalHeaderItem(1)
item.setText(_translate("YiJing", "新建列"))
item = self.tblOverview.horizontalHeaderItem(2)
item.setText(_translate("YiJing", "新建列"))
item = self.tblOverview.horizontalHeaderItem(3)
item.setText(_translate("YiJing", "新建列"))
item = self.tblOverview.horizontalHeaderItem(4)
item.setText(_translate("YiJing", "新建列"))
item = self.tblOverview.horizontalHeaderItem(5)
item.setText(_translate("YiJing", "新建列"))
item = self.tblOverview.horizontalHeaderItem(6)
item.setText(_translate("YiJing", "新建列"))
item = self.tblOverview.horizontalHeaderItem(7)
item.setText(_translate("YiJing", "新建列"))
item = self.tblOverview.horizontalHeaderItem(8)
item.setText(_translate("YiJing", "新建列"))
item = self.tblOverview.horizontalHeaderItem(9)
item.setText(_translate("YiJing", "新建列"))
item = self.tblOverview.horizontalHeaderItem(10)
item.setText(_translate("YiJing", "新建列"))
item = self.tblOverview.horizontalHeaderItem(11)
item.setText(_translate("YiJing", "新建列"))
item = self.tblOverview.horizontalHeaderItem(12)
item.setText(_translate("YiJing", "新建列"))
item = self.tblOverview.horizontalHeaderItem(13)
item.setText(_translate("YiJing", "新建列"))
item = self.tblOverview.horizontalHeaderItem(14)
item.setText(_translate("YiJing", "新建列"))
item = self.tblOverview.horizontalHeaderItem(15)
item.setText(_translate("YiJing", "新建列"))
item = self.tblOverview.horizontalHeaderItem(16)
item.setText(_translate("YiJing", "新建列"))
item = self.tblOverview.horizontalHeaderItem(17)
item.setText(_translate("YiJing", "新建列"))
item = self.tblOverview.horizontalHeaderItem(18)
item.setText(_translate("YiJing", "新建列"))
item = self.tblOverview.horizontalHeaderItem(19)
item.setText(_translate("YiJing", "新建列"))
item = self.tblOverview.horizontalHeaderItem(20)
item.setText(_translate("YiJing", "新建列"))
item = self.tblDetail.verticalHeaderItem(0)
item.setText(_translate("YiJing", "新建行"))
item = self.tblDetail.verticalHeaderItem(1)
item.setText(_translate("YiJing", "一月"))
item = self.tblDetail.verticalHeaderItem(2)
item.setText(_translate("YiJing", "二月"))
item = self.tblDetail.verticalHeaderItem(3)
item.setText(_translate("YiJing", "三月"))
item = self.tblDetail.verticalHeaderItem(4)
item.setText(_translate("YiJing", "四月"))
item = self.tblDetail.verticalHeaderItem(5)
item.setText(_translate("YiJing", "五月"))
item = self.tblDetail.verticalHeaderItem(6)
item.setText(_translate("YiJing", "六月"))
item = self.tblDetail.verticalHeaderItem(7)
item.setText(_translate("YiJing", "七月"))
item = self.tblDetail.verticalHeaderItem(8)
item.setText(_translate("YiJing", "八月"))
item = self.tblDetail.verticalHeaderItem(9)
item.setText(_translate("YiJing", "九月"))
item = self.tblDetail.verticalHeaderItem(10)
item.setText(_translate("YiJing", "十月"))
item = self.tblDetail.verticalHeaderItem(11)
item.setText(_translate("YiJing", "十一月"))
item = self.tblDetail.verticalHeaderItem(12)
item.setText(_translate("YiJing", "十二月"))
item = self.tblDetail.horizontalHeaderItem(0)
item.setText(_translate("YiJing", "新建列"))
item = self.tblDetail.horizontalHeaderItem(1)
item.setText(_translate("YiJing", "大运"))
item = self.tblDetail.horizontalHeaderItem(2)
item.setText(_translate("YiJing", "流年"))
item = self.tblDetail.horizontalHeaderItem(3)
item.setText(_translate("YiJing", "流月"))
item = self.tblDetail.horizontalHeaderItem(4)
item.setText(_translate("YiJing", "金"))
item = self.tblDetail.horizontalHeaderItem(5)
item.setText(_translate("YiJing", "木"))
item = self.tblDetail.horizontalHeaderItem(6)
item.setText(_translate("YiJing", "水"))
item = self.tblDetail.horizontalHeaderItem(7)
item.setText(_translate("YiJing", "火"))
item = self.tblDetail.horizontalHeaderItem(8)
item.setText(_translate("YiJing", "土"))
__sortingEnabled = self.tblDetail.isSortingEnabled()
self.tblDetail.setSortingEnabled(False)
item = self.tblDetail.item(0, 1)
item.setText(_translate("YiJing", "大运"))
item = self.tblDetail.item(0, 2)
item.setText(_translate("YiJing", "流年"))
item = self.tblDetail.item(0, 3)
item.setText(_translate("YiJing", "流月"))
item = self.tblDetail.item(0, 4)
item.setText(_translate("YiJing", "金"))
item = self.tblDetail.item(0, 5)
item.setText(_translate("YiJing", "木"))
item = self.tblDetail.item(0, 6)
item.setText(_translate("YiJing", "水"))
item = self.tblDetail.item(0, 7)
item.setText(_translate("YiJing", "火"))
item = self.tblDetail.item(0, 8)
item.setText(_translate("YiJing", "土"))
item = self.tblDetail.item(1, 0)
item.setText(_translate("YiJing", "一月"))
item = self.tblDetail.item(2, 0)
item.setText(_translate("YiJing", "二月"))
item = self.tblDetail.item(3, 0)
item.setText(_translate("YiJing", "三月"))
item = self.tblDetail.item(4, 0)
item.setText(_translate("YiJing", "四月"))
item = self.tblDetail.item(5, 0)
item.setText(_translate("YiJing", "五月"))
item = self.tblDetail.item(6, 0)
item.setText(_translate("YiJing", "六月"))
item = self.tblDetail.item(7, 0)
item.setText(_translate("YiJing", "七月"))
item = self.tblDetail.item(8, 0)
item.setText(_translate("YiJing", "八月"))
item = self.tblDetail.item(9, 0)
item.setText(_translate("YiJing", "九月"))
item = self.tblDetail.item(10, 0)
item.setText(_translate("YiJing", "十月"))
item = self.tblDetail.item(11, 0)
item.setText(_translate("YiJing", "十一月"))
item = self.tblDetail.item(12, 0)
item.setText(_translate("YiJing", "十二月"))
self.tblDetail.setSortingEnabled(__sortingEnabled)
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tabBase), _translate("YiJing", "基本"))
self.menu.setTitle(_translate("YiJing", "菜单"))
|
# External module imports
import RPi.GPIO as GPIO
import time
import sys
if len(sys.argv) != 3:
print "incorrect number of args"
exit()
# Pin Definitons:
pin = int(sys.argv[1])
state = (GPIO.HIGH if sys.argv[2] == "on" else GPIO.LOW)
print "setting %d to %s" %(pin, sys.argv[2])
print pin
print state
# Pin Setup:
GPIO.setmode(GPIO.BOARD)
# Initial state for LEDs:
GPIO.setup(pin, GPIO.OUT)
GPIO.output(pin, state)
# no cleanup, leave state for relay
|
import os
from math import prod
from typing import List, Tuple
from solutions.python.common.files import read_lines, INPUTS_FOLDER
from solutions.python.common.timing import timer
@timer
def multiply_tree_counts_for_several_slopes(area_map: List[str], slopes: List[Tuple[int, int]]) -> int:
return prod(count_trees(area_map, slope_right, slope_down) for slope_right, slope_down in slopes)
def count_trees(area_map: List[str], slope_right: int, slope_down: int) -> int:
map_bottom_row = len(area_map)
map_rightmost_column = len(area_map[0])
row, column, tree_count = 0, 0, 0
while row < map_bottom_row:
if area_map[row][column] == '#':
tree_count += 1
row += slope_down
column = (column + slope_right) % map_rightmost_column
return tree_count
if __name__ == '__main__':
input_file_path: str = os.path.join(INPUTS_FOLDER, 'day_03', 'input.txt')
input_list: List[str] = read_lines(input_file_path=input_file_path, line_type=str)
# Part 1
part_1_result: int = multiply_tree_counts_for_several_slopes(area_map=input_list, slopes=[(3, 1)])
assert part_1_result == 211
print('Part 1 result :', part_1_result)
# Part 2
part_2_result: int = multiply_tree_counts_for_several_slopes(area_map=input_list,
slopes=[(1, 1), (3, 1), (5, 1), (7, 1), (1, 2)])
assert part_2_result == 3584591857
print('Part 2 result :', part_2_result)
|
def genTest():
yield 1
yield 2
yield 3
def genFib():
Fib_1=1 #Fib(n-1)
Fib_2=0 #Fib(n-2)
while True:
#Fib_n=Fib(n-1)+Fib(n-2)
next=Fib_1 +Fib_2
yield next
Fib_1=next
Fib_2=Fib_1
def allCombo(items):
n=len(items)
for i in range (3**n):
bag_1=[]
bag_2=[]
for j in range (n):
if (i//(3**j))%3 ==0:
bag_1.append(items[j])
elif (i//(3**j))%3==1:
bag_2.append(items[j])
yield bag_1,bag_2
|
import dash
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
#%%
app = dash.Dash()
elements = [
dcc.Input(id='my-id',value='Initial Text',type='text'),
html.Div(id='my-div',style={'border':'2px blue solid'})
]
app.layout=html.Div(elements)
## note callback must by after app layout
@app.callback(
## for output div, we choose children property, whcih is the default
Output(component_id='my-div',component_property='children'),
## for input div, we choose the value property
## not that Input is a list of Input items
[Input(component_id='my-id',component_property='value')]
)
def update_output_div(input_value):
return "You entered: {}".format(input_value)
if __name__ == '__main__':
app.run_server()
|
import jwt
from flask import jsonify, request
import urls.basic_urls as bu
from models import Car, app, db, Rent, User
from schema import RentSchema
@app.route("/rent", methods=['POST'])
def create_rent():
auth_token = request.args.get('access_token')
try:
keys = bu.decode_auth_token(auth_token)
except jwt.ExpiredSignatureError:
return jsonify({'message': 'Signature expired. Please log in again.'}), 401
except jwt.InvalidTokenError:
return jsonify({'message': 'Invalid token. Please log in again.'}), 401
admin = keys[0]
id = keys[1]
if admin == 1:
return jsonify({'response': "You are not a user"}), 403
data = request.get_json()
if not data:
return jsonify({"response": "No input data provided"}), 400
try:
result = RentSchema().load(data)
except Exception:
return jsonify({'response': "Invalid input"}), 403
if db.session.query(Car.carId).filter_by(carId=result["car_id"]).scalar() is None:
return jsonify({'response': "Invalid car ID found"}), 404
rent = Rent(owner_id=id, car_id=result["car_id"], startT=result["startTime"],
endT=result["endTime"])
db.session.add(rent)
db.session.commit()
return jsonify({'response': "Success"}), 201
@app.route("/rent", methods=['GET'])
def get_rents():
auth_token = request.args.get('access_token')
try:
keys = bu.decode_auth_token(auth_token)
except jwt.ExpiredSignatureError:
return jsonify({'message': 'Signature expired. Please log in again.'}), 401
except jwt.InvalidTokenError:
return jsonify({'message': 'Invalid token. Please log in again.'}), 401
admin = keys[0]
id = keys[1]
if admin == 1:
return jsonify({'response': "You are not a user"}), 403
result = db.session.query(Rent).filter(Rent.owner_id == id).order_by(Rent.rentId).all()
schema = RentSchema(many=True)
dump_data = schema.dump(result)
return jsonify({'response': dump_data}), 200
@app.route("/rent/<int:rentId>/", methods=['DELETE'])
def delete_rent(rentId):
try:
auth_token = request.args.get('access_token')
keys = bu.decode_auth_token(auth_token)
except jwt.ExpiredSignatureError:
return jsonify({'message': 'Signature expired. Please log in again.'}), 401
except jwt.InvalidTokenError:
return jsonify({'message': 'Invalid token. Please log in again.'}), 401
admin = keys[0]
id = keys[1]
if admin == 1:
return jsonify({'response': "You are not a user"}), 403
if db.session.query(Rent.rentId).filter_by(rentId=rentId, owner_id=id).scalar() is None:
return jsonify({'response': "No your`s rents with this ID found"}), 404
db.session.query(Rent).filter(Rent.rentId == rentId).delete()
db.session.commit()
return jsonify({'response': "Success"}), 200
|
''' Rotate an array of size n by d elements '''
'''
Algorithm:
Consider arr = [1, 2, 3, 4, 5, 6, 7]
n = 7, d = 2
A = [1, 2] (arr[0] - arr[d-1])
B = [3, 4, 5, 6, 7] (arr[d] - arr[n-1])
Rotate A and B:
Thus,
Ar = [2, 1] and Br = [7, 6, 5, 4, 3]
ArB = [2, 1, 3, 4, 5, 6, 7]
(ArBr)r = [7, 6, 5, 4, 3, 1, 2]
'''
import unittest
def reverseArray(arr, start, end):
while start < end:
temp = arr[start]
arr[start] = arr[end]
arr[end] = temp
start += 1
end -= 1
def leftRotate(arr, n, d):
reverseArray(arr, 0, d-1)
reverseArray(arr, d, n-1)
reverseArray(arr, 0, n-1)
class TestRotate(unittest.TestCase):
def test_rotate(self):
arr = [1, 2, 3, 4, 5, 6, 7]
leftRotate(arr, 7, 2)
self.assertEqual(arr, [3, 4, 5, 6, 7, 1, 2])
if __name__ == '__main__':
unittest.main()
|
# 定义函数
# args不定长参数 适用场景:不确定是否有参数,也不确定参数的个数
print("666")
print("44444")
def good_job(salary, bonus, subsidy=4000, *args,**kwargs):
sum1 = salary + bonus + subsidy
for num in args:
sum1 += num
# print(sum1)
# print('参数kwargs为:{}'.format(kwargs))
# 把**kwargs传入的参值与sum1相加求和 ,可以循环遍历
for i in kwargs:
# 取到i的值
# print(kwargs.get(i))
sum1 += kwargs.get(i)
return sum1 #接收
# print('参数args:{}'.format(args))
# print('工资的总和是:{}'.format(sum1))
good_job(2000, 3000, 500, 6000,500,aa=100,bb=200)
# 扩展: ["hello","python","lemon"] split
# 将一个字符串以一个符号截断,返回列表(str,num) ----str字符串 num---截取的次数,默认-1截取到最后
str1 = "hello ,python,lemon"
str2 = str1.split(',')
print(str2) |
# -*- coding: utf-8 -*-
import benzina.native
import gc
import numpy as np
import torch
import torch.utils.data
from torch.utils.data.dataloader import default_collate
from contextlib import suppress
from . import operations as ops
class DataLoader(torch.utils.data.DataLoader):
"""
Loads images from a :class:`benzina.torch.dataset.Dataset`. Encapsulates a sampler
and data processing transformations.
Args:
dataset (:class:`benzina.torch.dataset.Dataset`): dataset from which to load the
data.
shape (int or tuple of ints): set the shape of the samples. Note that
this does not imply a resize of the image but merely set the shape
of the tensor in which the data will be copied.
path (str, optional): path to the archive from which samples will be
decoded. If not specified, the dataloader will attempt to get it
from :attr:`dataset`.
batch_size (int, optional): how many samples per batch to load.
(default: ``1``)
shuffle (bool, optional): set to ``True`` to have the data reshuffled
at every epoch. (default: ``False``)
sampler (torch.utils.data.Sampler, optional): defines the strategy to
draw samples from the dataset. If specified, :attr:`shuffle` must
be ``False``.
batch_sampler (torch.utils.data.Sampler, optional): like sampler, but
returns a batch of indices at a time. Mutually exclusive with
:attr:`batch_size`, :attr:`shuffle`, :attr:`sampler`, and
:attr:`drop_last`.
collate_fn (callable, optional): merges a list of samples to form a
mini-batch.
drop_last (bool, optional): set to ``True`` to drop the last incomplete
batch, if the dataset size is not divisible by the batch size. If
``False`` and the size of dataset is not divisible by the batch
size, then the last batch will be smaller. (default: ``False``)
timeout (numeric, optional): if positive, the timeout value for
collecting a batch. Should always be non-negative. (default: ``0``)
device (torch.device, optional): set the device to use. Note that only
CUDA devices are supported for the moment.
multibuffering (int, optional): set the size of the multibuffering
buffer. (default: ``3``)
seed (int, optional): set the seed for the random transformations.
bias_transform (:class:`benzina.torch.operations.BiasTransform` or float, optional):
set the bias transformation. Values to substract a pixel's channels
with. Note that this transformation is applied before
:attr:`norm_transform`.
norm_transform (:class:`benzina.torch.operations.NormTransform` or float or iterable of float, optional):
set the normalization transformation. Values to multiply a pixel's
channels with. Note that this transformation is applied after
:attr:`bias_transform`.
warp_transform (:class:`benzina.torch.operations.WarpTransform` or iterable of float, optional):
set the warp transformation or use as the arguments to initialize a
WarpTransform.
"""
def __init__(self,
dataset,
shape,
path = None,
batch_size = 1,
shuffle = False,
sampler = None,
batch_sampler = None,
collate_fn = default_collate,
drop_last = False,
timeout = 0,
device = None,
multibuffering = 3,
seed = None,
bias_transform = None,
norm_transform = None,
warp_transform = None):
super().__init__(dataset,
batch_size = batch_size,
shuffle = shuffle,
sampler = sampler,
batch_sampler = batch_sampler,
num_workers = 0,
collate_fn = collate_fn,
pin_memory = True,
drop_last = drop_last,
timeout = float(timeout),
worker_init_fn = None)
if isinstance(shape, int):
shape = (shape, shape)
if path is None:
path = dataset.filename
if seed is None:
seed = torch.randint(low = 0,
high = 2**32,
size = (),
dtype = torch.int64,
device = "cpu")
seed = int(seed)
if not isinstance(warp_transform, ops.WarpTransform):
warp_transform = ops.ConstantWarpTransform(warp_transform)
if not isinstance(norm_transform, ops.NormTransform):
norm_transform = ops.ConstantNormTransform(norm_transform)
if not isinstance(bias_transform, ops.BiasTransform):
bias_transform = ops.ConstantBiasTransform(bias_transform)
self.path = path
self.device = device
self.multibuffering = multibuffering
self.shape = shape
self.RNG = np.random.RandomState(seed)
self.warp_transform = warp_transform
self.color_transform = ops.ConstantColorTransform()
self.oob_transform = ops.ConstantOOBTransform()
self.norm_transform = norm_transform
self.bias_transform = bias_transform
def __iter__(self):
return _DataLoaderIter(self)
class _DataLoaderIter:
def __init__(self, loader):
assert(loader.multibuffering >= 1)
self.length = len(loader)
self.dataset = loader.dataset
self.dataset_core = benzina.native.DatasetCore(loader.path, len(loader.dataset))
self.batch_size = loader.batch_size
self.batch_iter = iter(loader.batch_sampler)
self.multibuffering = loader.multibuffering
self.shape = loader.shape
self.collate_fn = loader.collate_fn
self.drop_last = loader.drop_last
self.timeout = loader.timeout
if loader.device is None or loader.device == "cuda":
self.device = torch.device(torch.cuda.current_device())
elif isinstance(loader.device, (str, int)):
self.device = torch.device(loader.device)
else:
self.device = loader.device
self.RNG = np.random.RandomState(loader.RNG.randint(2**32))
self.warp_transform = loader.warp_transform
self.color_transform = loader.color_transform
self.oob_transform = loader.oob_transform
self.norm_transform = loader.norm_transform
self.bias_transform = loader.bias_transform
self.multibuffer = None
self.core = None
self.first_indices = None
self.stop_iteration = None
def __del__(self):
"""
Destroy the iterator and all its resources.
Because extraneous and circular references can keep the large GPU
multibuffer tensor allocated indefinitely, we:
1. Forcibly destroy all our members, thereby losing all of the
iterator's possible references to the multibuffer and the iterator
core. Tensor deallocations may or may not happen at this moment.
2. Invoke the garbage collector, which is capable of identifying
cyclic trash and removing it. The iterator core object supports
garbage collection and is capable of breaking all reference cycles
involving it.
3. Empty the PyTorch CUDA cache, returning the CUDA memory buffers to
the allocation pool.
Because data loaders are not intended to be created extremely often,
the extra cycles spent here doing this are worth it.
"""
del self.__dict__
self.garbage_collect()
def __iter__(self):
return self
def __len__(self):
return self.length
def __next__(self):
if self.stop_iteration is not None:
raise self.stop_iteration
try:
if self.core_needs_init():
self.pull_first_indices()
self.init_core()
self.push_first_indices()
with suppress(StopIteration):
self.fill_core()
else:
with suppress(StopIteration):
self.fill_one_batch()
return self.pull()
except StopIteration as si:
self.stop_iteration = si
self.garbage_collect()
raise self.stop_iteration
def core_needs_init(self):
return self.core is None
def pull_first_indices(self):
self.first_indices = next(self.batch_iter)
def init_core(self):
"""
Initialize the iterator core.
From the first batch drawn from the sample iterator, we know the
maximum batch size. We allocate a multibuffer large enough to
containing self.multibuffering batches of the maximum size.
Before we do so, however, we trigger garbage collection and empty
the tensor cache, in an attempt to ensure circular references
keeping previous large multibuffers alive have been destroyed.
"""
self.garbage_collect()
self.check_or_set_batch_size(self.first_indices)
self.multibuffer = torch.zeros([self.multibuffering,
self.batch_size,
3,
self.shape[0],
self.shape[1]],
dtype = torch.float32,
device = self.device)
self.core = benzina.native.NvdecodeDataLoaderIterCore(
self.dataset_core,
str(self.device),
self.multibuffer,
self.multibuffer.data_ptr(),
self.batch_size,
self.multibuffering,
self.shape[0],
self.shape[1],
)
self._memoryviews = [[None] * self.batch_size
for i in range(self.multibuffering
if self.multibuffering else 1)]
def push_first_indices(self):
self.push(self.__dict__.pop("first_indices"))
def fill_core(self):
while self.core.pushes < self.core.multibuffering:
self.fill_one_batch()
def push(self, indices):
self.check_or_set_batch_size(indices)
buffer = self.multibuffer[self.core.pushes % self.core.multibuffering][:len(indices)]
memviews = self._memoryviews[self.core.pushes % self.core.multibuffering]
indices = [int(i) for i in indices]
ptrs = [int(buffer[n].data_ptr()) for n in range(len(indices))]
samples = [self.dataset[i] for i in indices]
memviews[:len(indices)], auxd, tracks = \
zip(*[(memoryview(s.input), s.aux, s.track) for s in samples])
memviews[len(indices):] = [None] * (len(indices) - self.batch_size)
token = (buffer, *self.collate_fn(auxd))
t_args = (self.shape, self.RNG)
with self.core.batch(token) as batch:
for i,ptr,memview,track in zip(indices, ptrs, memviews, tracks):
with batch.sample(i, ptr, memview, track.sample_location(0),
track.video_configuration_location()):
self.core.setHomography (*self.warp_transform (i, track.shape, *t_args))
self.core.selectColorMatrix(*self.color_transform(i, track.shape, *t_args))
self.core.setBias (*self.bias_transform (i, track.shape, *t_args))
self.core.setScale (*self.norm_transform (i, track.shape, *t_args))
self.core.setOOBColor (*self.oob_transform (i, track.shape, *t_args))
def pull(self):
if self.core.pulls >= self.core.pushes:
raise StopIteration
return self.core.waitBatch(block=True, timeout=self.timeout)
def fill_one_batch(self):
self.push(next(self.batch_iter))
def check_or_set_batch_size(self, indices):
iter_batch_size = len(indices)
if self.batch_size is None:
self.batch_size = iter_batch_size
elif self.batch_size < iter_batch_size:
raise RuntimeError("Batch size expected to be {}, but iterator returned larger batch size {}!"
.format(self.batch_size, iter_batch_size))
elif self.batch_size > iter_batch_size:
if self.drop_last:
raise StopIteration
def garbage_collect(self):
self.core = None
self.multibuffer = None
gc.collect()
torch.cuda.empty_cache()
|
import math
def solve(n):
if n == 0:
return "INSOMNIA"
seen = set()
for i in range(1, 100):
newn = str(i * n)
for char in newn:
seen.add(char)
if len(seen) == 10:
return newn
return "INSOMNIA"
name = "storage/emulated/0/codejam/A-large"
fi = open(name + ".in", "r")
fout = open(name + ".out", "w")
numTestCases = int(fi.readline())
print "#TestCases: ", numTestCases
for i in range(0, numTestCases):
line = fi.readline().strip().split(" ")
line = map(int, line)[0]
fout.write("Case #" + str(i + 1) + ": " + solve(line) + "\n")
print "Case #" + str(i + 1) + ": " + solve(line)
fi.close()
fout.close() |
import os.path
from data.base_dataset import BaseDataset, get_transform
import torch.nn.functional as F
from PIL import Image
import pandas as pd
import numpy as np
import torch
class KeyDataset(BaseDataset):
def initialize(self, opt):
self.opt = opt
self.root = opt.dataroot
if opt.phase == 'train':
self.dir_P = os.path.join(opt.dataroot, opt.phase)
self.dir_K = os.path.join(opt.dataroot, opt.phase + 'K')
self.dir_conn_map = os.path.join(opt.dataroot, 'pose_connect_map')
elif opt.phase == 'test':
self.dir_P = os.path.join(opt.dataroot, opt.phase)
self.dir_K = os.path.join(opt.dataroot, opt.phase + 'K')
self.dir_conn_map = os.path.join(opt.dataroot, 'pose_connect_map')
self.dir_SP = opt.dataroot
self.SP_input_nc = opt.SP_input_nc
if opt.phase == 'train':
self.init_categories_train(opt.unpairLst)
elif opt.phase == 'test':
self.init_categories_test(opt.pairLst)
self.transform = get_transform(opt)
def init_categories_train(self, unpairLst):
unpairlist = pd.read_csv(unpairLst)
self.size = len(unpairlist)
self.imgs = []
print('Loading data unpairs ...')
for i in range(self.size):
img = unpairlist.iloc[i]['images_name']
self.imgs.append(img)
print('Loading data unpairs finished ...')
def init_categories_test(self, pairLst):
pairlist = pd.read_csv(pairLst)
self.size = len(pairlist)
self.imgs = []
print('Loading data pairs ...')
for i in range(self.size):
img = [pairlist.iloc[i]['from'], pairlist.iloc[i]['to']]
self.imgs.append(img)
print('Loading data pairs finished ...')
def __getitem__(self, index):
if self.opt.phase == 'train':
# person image
P1_name = self.imgs[index]
P1_path = os.path.join(self.dir_P, P1_name)
P1_img = Image.open(P1_path).convert('RGB')
P1_img = P1_img.resize((176, 256))
P1 = self.transform(P1_img)
# pose
BP2_path = os.path.join(self.dir_K, P1_name + '.npy')
BP2_img = np.load(BP2_path)
PCM2_path = os.path.join(self.dir_conn_map, P1_name + '.npy')
PCM2_mask = np.load(PCM2_path)
BP2 = torch.from_numpy(BP2_img).float() #h,w,c
BP2 = BP2.transpose(2, 0) #c,w,h
BP2 = BP2.transpose(2, 1) #c,h,w
PCM2_mask = torch.from_numpy(PCM2_mask).float()
BP2 = torch.cat([BP2, PCM2_mask], 0)
# semantic
SP1_name = self.split_name(P1_name, 'semantic_merge3')
SP1_path = os.path.join(self.dir_SP, SP1_name)
SP1_path = SP1_path[:-4] + '.npy'
SP1_data = np.load(SP1_path)
SP1 = np.zeros((self.SP_input_nc, 256, 176), dtype='float32')
parti = np.random.randint(1, 8)
for id in range(self.SP_input_nc):
if id == 6 or id == 7: # arms and legs
if np.random.random() > 0.7:
continue
SP1[id] = (SP1_data == id).astype('float32')
return {'P1': P1, 'SP1': SP1, 'BP2': BP2, 'P1_path': P1_name}
elif self.opt.phase == 'test':
# person image
P1_name, P2_name = self.imgs[index]
P1_path = os.path.join(self.dir_P, P1_name)
P1_img = Image.open(P1_path).convert('RGB')
P2_path = os.path.join(self.dir_P, P2_name)
P2_img = Image.open(P2_path).convert('RGB')
P1_img = P1_img.resize((176, 256))
P2_img = P2_img.resize((176, 256))
P1 = self.transform(P1_img)
P2 = self.transform(P2_img)
# pose
BP2_path = os.path.join(self.dir_K, P2_name + '.npy')
PCM2_path = os.path.join(self.dir_conn_map, P2_name + '.npy')
PCM2_mask = np.load(PCM2_path)
BP2_img = np.load(BP2_path)
BP2 = torch.from_numpy(BP2_img).float()
BP2 = BP2.transpose(2, 0) #c,w,h
BP2 = BP2.transpose(2, 1) #c,h,w
PCM2_mask = torch.from_numpy(PCM2_mask).float()
BP2 = torch.cat([BP2, PCM2_mask], 0)
# semantic
SP1_name = self.split_name(P1_name, 'semantic_merge3')
SP1_path = os.path.join(self.dir_SP, SP1_name)
SP1_path = SP1_path[:-4] + '.npy'
SP1_data = np.load(SP1_path)
SP1 = np.zeros((self.SP_input_nc, 256, 176), dtype='float32')
for id in range(self.SP_input_nc):
SP1[id] = (SP1_data == id).astype('float32')
return {'P1': P1, 'SP1': SP1, 'P2': P2, 'BP2': BP2, 'P1_path': P1_name, 'P2_path': P2_name}
def __len__(self):
if self.opt.phase == 'train':
return self.size
elif self.opt.phase == 'test':
return self.size
def name(self):
return 'KeyDataset'
def split_name(self,str,type):
list = []
list.append(type)
if (str[len('fashion'):len('fashion') + 2] == 'WO'):
lenSex = 5
else:
lenSex = 3
list.append(str[len('fashion'):len('fashion') + lenSex])
idx = str.rfind('id0')
list.append(str[len('fashion') + len(list[1]):idx])
id = str[idx:idx + 10]
list.append(id[:2]+'_'+id[2:])
pose = str[idx + 10:]
list.append(pose[:4]+'_'+pose[4:])
head = ''
for path in list:
head = os.path.join(head, path)
return head
|
import matplotlib.pyplot as plt
from numpy import cos, arange
t = arange(0, 5.0, 0.01)
x = arange(0, 5.0, 0.01)
plt.plot(t, cos(x**2)/x)
plt.show()
|
import argparse
import numpy as np
import sys
import json
import models
import torch
import torch.nn as nn
from torchvision import transforms
import torch.nn.functional as F
import base
from torch.utils.data import DataLoader, Dataset
import os
from tqdm import tqdm
import PIL
import nibabel as nib
from utils.metrics import eval_metrics, AverageMeter
import cv2 as cv
def main():
# get the argument from parser
args = parse_arguments()
# CONFIG -> assert if config is here
assert args.config
config = json.load(open(args.config))
# DATA
testdataset = base.testDataset(args.site)
loader = DataLoader(testdataset, batch_size=1, shuffle=False, num_workers=0)
num_classes = testdataset.num_classes
# MODEL
config['model']['supervised'] = True; config['model']['semi'] = False
encoder = models.model.Encoder(True)
model = models.model.CCT(encoder, num_classes=num_classes, conf=config['model'], testing=True)
map_location = args.map
checkpoint = torch.load(args.model, map_location)
if map_location == 'cpu':
for key in list(checkpoint['state_dict'].keys()):
if 'module.' in key:
checkpoint['state_dict'][key.replace('module.', '')] = checkpoint['state_dict'][key]
del checkpoint['state_dict'][key]
try:
model.load_state_dict(checkpoint['state_dict'], strict=True)
except Exception as e:
print(f'Some modules are missing: {e}')
model.load_state_dict(checkpoint['state_dict'], strict=False)
model.float()
model.eval()
if args.map == 'gpu':
model.cuda()
check_directory(args.site, args.experiment)
# LOOP OVER THE DATA
tbar = tqdm(loader, ncols=100)
total_loss_val = AverageMeter()
total_inter, total_union = 0, 0
total_correct, total_label = 0, 0
total_dice = 0
count = 0
for index, data in enumerate(tbar):
image, label, image_id = data
if args.map == 'gpu':
image = image.cuda()
# PREDICT
with torch.no_grad():
output = model(image)
correct, labeled, inter, union, dice = eval_metrics(output, label, num_classes, args.map)
total_inter, total_union = total_inter + inter, total_union + union
total_correct, total_label = total_correct + correct, total_label + labeled
total_dice = ((count * total_dice) + (dice * output.size(0))) / (count + output.size(0))
count += output.size(0)
pixAcc = 1.0 * total_correct / (np.spacing(1) + total_label)
IoU = 1.0 * total_inter / (np.spacing(1) + total_union)
mIoU = IoU.mean()
mdice = total_dice.mean()
seg_metrics = {"Pixel_Accuracy": np.round(pixAcc, 3), "Mean_IoU": np.round(mIoU, 3),
"Mean_dice": np.round(mdice, 3),
"Class_IoU": dict(zip(range(num_classes), np.round(IoU, 3))),
"Class_dice": dict(zip(range(num_classes), np.round(total_dice, 3)))}
tbar.set_description('EVAL | Loss: {:.3f}, PixelAcc: {:.2f}, Mean IoU: {:.2f} Mean Dice {:.2f} |'.format(
total_loss_val.average, pixAcc, mIoU, mdice))
output = torch.argmax(output, dim=1)
prediction = output.numpy()
label = label.numpy()
predictions = batch_scale(prediction)
labels = batch_scale(label)
if args.overlay:
prediction_contours = batch_contour(predictions)
label_contours = batch_contour(labels)
# SAVE RESULTS
for i in range(predictions.shape[0]):
prediction_im = PIL.Image.fromarray(predictions[i])
prediction_im.save(f'outputs/{args.site}/{args.experiment}/{image_id[i]}_prediction.png')
label_im = PIL.Image.fromarray(labels[i])
label_im.save(f'outputs/{args.site}/{args.experiment}/{image_id[i]}_label.png')
if args.overlay:
image = image.numpy()
image = np.squeeze(image, axis=1)
image = batch_scale(image)
palette = contour_palette(testdataset.site)
for i in range(image.shape[0]):
image_gt = cv.cvtColor(image[i].copy(), cv.COLOR_GRAY2RGB)
image_pred = cv.cvtColor(image[i].copy(), cv.COLOR_GRAY2RGB)
cv.drawContours(image_gt, label_contours[i], -1, (palette[0], palette[1], palette[2]), 1)
cv.drawContours(image_pred, prediction_contours[i], -1, (palette[0], palette[1], palette[2]), 1)
cv.imwrite(f'outputs/{args.site}/{args.experiment}/{image_id[i]}_label_overlay.png', image_gt)
cv.imwrite(f'outputs/{args.site}/{args.experiment}/{image_id[i]}_prediction_overlay.png', image_pred)
with open(f'outputs/{args.site}/{args.experiment}/test.txt', 'w') as f:
for k, v in list(seg_metrics.items()):
f.write("%s\n" % (k + ':' + f'{v}'))
def parse_arguments():
parser = argparse.ArgumentParser(description='PyTorch Training')
parser.add_argument('--config', default='configs/config.json', type=str,
help='Path to the config file')
parser.add_argument('--model', default=None, type=str,
help='Path to the trained .pth model')
parser.add_argument('--site', default="BIDMC", type=str,
help='site to test')
parser.add_argument('--map', default="cpu", type=str,
help='map location')
parser.add_argument('--experiment', default=None, type=str,
help='experiment name')
parser.add_argument('--overlay', default=False, type=bool,
help='return original image with overlay of the ground truth and predicted segmentation')
args = parser.parse_args()
return args
def check_directory(site, experiment):
if not os.path.exists('outputs'):
os.makedirs('outputs')
if not os.path.exists(f'outputs/{site}'):
os.makedirs(f'outputs/{site}')
if not os.path.exists(f'outputs/{site}/{experiment}'):
os.makedirs(f'outputs/{site}/{experiment}')
def batch_scale(image):
for i, img in enumerate(image):
a = np.amax(img) - np.amin(img)
if a == 0:
if np.amax(img) == 2:
img = img / 2
image[i, :, :] = 255 * img
else:
image[i, :, :] = 255 * img
else:
img = (img - np.amin(img)) / (np.amax(img) - np.amin(img))
image[i, :, :] = 255 * img
return np.uint8(image)
def batch_contour(image):
contours = []
for i, img in enumerate(image):
prediction_contours, _ = cv.findContours(img, cv.RETR_TREE, cv.CHAIN_APPROX_NONE)
contours.append(prediction_contours)
return contours
def denormalize(image, mean, std):
image = (image * std) + mean
return image
def contour_palette(site):
palette = {'ISBI': [0, 0, 255], 'ISBI_15': [0, 255, 0], 'I2CVB': [255, 0, 0],
'BIDMC': [0, 125, 255], 'HK': [0, 255, 255], 'UCL': [255, 0, 125]}
return palette[site]
if __name__ == '__main__':
main()
|
from threading import Thread
import os
from queue import Queue
def returnName(name):
print(name)
if __name__ == '__main__':
numThreads = os.cpu_count()
threads = []
for i in range(numThreads):
t = Thread(target=returnName, args=("Bruce Wayne",))
threads.append(t)
for t in threads:
t.start()
for t in threads:
t.join()
|
from django.db import models
from django.contrib.auth.models import User
from user.models import Department
# class Comment(models.Model):
# content_object = models.ForeignKey(Homework, on_delete=models.DO_NOTHING)
#
# text = models.TextField()
# comment_time = models.DateTimeField(auto_now_add=True)
# user = models.ForeignKey(User, on_delete=models.DO_NOTHING)
#
# class Meta:
# ordering = ['-comment_time']
#
#
# class Teacher_Comment(models.Model):
# content_object = models.ForeignKey(Teacher, on_delete=models.DO_NOTHING)
#
# text = models.TextField()
# comment_time = models.DateTimeField(auto_now_add=True)
# user = models.ForeignKey(User, on_delete=models.DO_NOTHING)
#
# class Meta:
# ordering = ['-comment_time']
class questionsSearched(models.Model):
questionsName = models.TextField(verbose_name="问题")
numSearched = models.IntegerField(default=0, verbose_name="次数")
department = models.ForeignKey(Department, on_delete=models.DO_NOTHING, verbose_name="科室")
timeFirstSearched = models.DateTimeField(auto_now_add=True, verbose_name="第一次被查询")
timeLastSearched = models.DateTimeField(auto_now=True, verbose_name="最后一次被查询")
class thesis(models.Model):
title = models.CharField(max_length=100)
key_word = models.CharField(max_length=16)
link = models.TextField()
|
import os
import argparse
import random
import numpy as np
from utils.misc import get_datetime, str2bool
from utils.file import copy, prepare_dirs, write_record
from tqdm import tqdm
celeba_attr_names = [
'新双颊胡须', '柳叶眉', '吸引人', '眼袋', '秃头',
'刘海', '大嘴唇', '大鼻子', '黑发', '金发',
'模糊', '棕发', '浓眉', '圆胖', '双下巴',
'戴眼镜', '山羊胡子', '灰发', '浓妆', '高颧骨',
'男性', '微张嘴巴', '八字胡', '细眼睛', '无胡子',
'椭圆脸', '苍白皮肤', '尖鼻子', '高发际线', '红润双颊',
'络腮胡', '微笑', '直发', '卷发', '戴耳环',
'戴帽子', '涂唇膏', '戴项链', '戴领带', '年轻人']
celeba_attr_names_opposite = [
'无新双颊胡须', '非柳叶眉', '不吸引人', '无眼袋', '非秃头',
'无刘海', '非大嘴唇', '非大鼻子', '非黑发', '非金发',
'清晰', '非棕发', '非浓眉', '非圆胖', '非双下巴',
'无眼镜', '非山羊胡子', '非灰发', '非浓妆', '非高颧骨',
'女性', '非微张嘴巴', '非八字胡', '非细眼睛', '有胡子',
'非椭圆脸', '非苍白皮肤', '非尖鼻子', '非高发际线', '非红润双颊',
'非络腮胡', '非微笑', '非直发', '非卷发', '无耳环',
'无帽子', '无唇膏', '无项链', '无领带', '非年轻人']
def process_labels(cfg, attr_file):
with open(os.path.join(cfg.dataset_path, attr_file)) as f:
lines = f.read().splitlines()
sample_num = int(lines[0])
attr_num = len(lines[1].strip().split(' '))
attr_matrix = np.zeros((sample_num, attr_num))
for i in range(sample_num):
line = lines[i + 2].strip().split()[1:]
assert len(line) == attr_num
attr_matrix[i] = line
attr_matrix = attr_matrix.T
attr_dict = {}
for i in range(attr_num):
key = f"a{i}"
attr_dict[key] = np.where(attr_matrix[i] == 1)[0]
return attr_matrix, attr_dict
def main(cfg):
random.seed(cfg.seed)
os.makedirs(cfg.output_path)
if cfg.is_hq:
attr_file = "CelebAMask-HQ-attribute-anno.txt"
src_path = "CelebA-HQ-img"
else:
attr_file = "list_attr_celeba.txt"
src_path = "images"
record_path = os.path.join(cfg.output_path, 'README.txt')
write_record(str(args.__dict__), record_path)
attr_matrix, attr_dict = process_labels(cfg, attr_file)
selected_attrs = [[32], [-32]] # smiling
attr_names = []
if len(attr_names) == 0:
attr_names = [','.join([(celeba_attr_names[a - 1] if a > 0 else celeba_attr_names_opposite[-a - 1]) for a in x])
for x in selected_attrs]
indices_list = []
max_sample_num = 0
for i in range(len(attr_names)):
selected_attr = selected_attrs[i]
indices = np.ones((attr_matrix.shape[1],), dtype=bool)
for a in selected_attr:
indices *= attr_matrix[abs(a) - 1] == (1 if a > 0 else -1)
write_record(f"{attr_names[i]}({selected_attrs[i]}): {indices.sum()}", record_path)
max_sample_num = max(max_sample_num, indices.sum())
indices_list.append(np.where(indices)[0])
if cfg.fixed_num:
max_sample_num = cfg.fixed_num
input(f"Press enter to start splitting dataset...")
src_path = os.path.join(cfg.dataset_path, src_path)
for indices, attr_name in zip(indices_list, attr_names):
indices = indices[:max_sample_num]
if cfg.test_num:
train_num = len(indices) - cfg.test_num
else:
train_num = int(len(indices) * (1 - cfg.ratio))
random.shuffle(indices)
train_indices = indices[:train_num]
test_indices = indices[train_num:]
train_dst_path = os.path.join(cfg.output_path, 'train', attr_name)
test_dst_path = os.path.join(cfg.output_path, 'test', attr_name)
prepare_dirs([train_dst_path, test_dst_path])
for i in tqdm(train_indices):
if cfg.is_hq:
filename = f"{i}.jpg"
else:
filename = f"{i + 1:06}.jpg"
copy(filename, src_path, train_dst_path)
for i in tqdm(test_indices):
if cfg.is_hq:
filename = f"{i}.jpg"
else:
filename = f"{i + 1:06}.jpg"
copy(filename, src_path, test_dst_path)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--dataset_path', type=str, default=r"D:\Data\CelebAMask-HQ")
parser.add_argument('--save_path', type=str, default=r"D:\Data\celeba_splits")
parser.add_argument('--save_name', type=str, default=get_datetime(True))
parser.add_argument('--seed', type=int, default=0)
parser.add_argument('--ratio', type=float)
parser.add_argument('--test_num', type=int, default=1412)
parser.add_argument('--is_hq', type=str2bool, default=True)
parser.add_argument('--fixed_num', type=int)
args = parser.parse_args()
args.output_path = os.path.join(args.save_path, args.save_name)
main(args)
|
from django.core.management.base import BaseCommand
import os
import logging
import numpy as np
from acacia.meetnet.models import Well
logger = logging.getLogger(__name__)
class Command(BaseCommand):
args = ''
help = 'check raw data for QC3'
def handle(self, *args, **options):
baros = {}
with open('QC3.csv','w') as csv:
csv.write('screen,file,from,to,points,3a-buis,3b-filter,3c-sensor,3d-vol,3e-overloop\n')
for well in Well.objects.all():
if not hasattr(well,'meteo') or well.meteo.baro is None:
logger.error('No air pressure defined for well {well}'.format(well=well))
continue
baroseries = well.meteo.baro
logger.info('well {well}, air pressure = {pressure}'.format(well=well,pressure=baroseries.meetlocatie().name))
if baroseries in baros:
baro = baros[baroseries]
else:
baro = baroseries.to_pandas()
# if baro datasource = KNMI then convert from hPa to cm H2O
dsbaro = baroseries.datasource()
if dsbaro:
gen = dsbaro.generator
if 'knmi' in gen.name.lower() or 'knmi' in gen.classname.lower():
baro = baro / (well.g or 9.80638)
baros[baroseries] = baro
barostart = baro.index[0]
baroend = baro.index[-1]
for screen in well.screen_set.all():
logger.info(unicode(screen))
for pos in screen.loggerpos_set.order_by('start_date'):
for sf in pos.monfile_set.order_by('start_date'):
try:
fname = os.path.basename(sf.file.name)
df = sf.get_data()
if isinstance(df,dict):
df = df.itervalues().next()
if df is None or df.empty:
logger.warning('File {} skipped: no data'.format(fname))
continue
data = df['PRESSURE'].dropna().groupby(df.index).last().sort_index()
dataend = data.index[0]
if dataend < barostart:
logger.warning('File {} skipped: no air pressure data available before {}'.format(fname,barostart))
continue
datastart = data.index[0]
if datastart > baroend:
logger.warning('File {} skipped: no air pressure data available after {}'.format(fname,baroend))
continue
if datastart < barostart:
logger.warning('File {} only partly checked: no air pressure data available before {}'.format(fname,barostart))
if dataend > baroend:
logger.warning('File {} only partly checked: no air pressure data available after {}'.format(fname,baroend))
adata, abaro = data.align(baro)
abaro = abaro.interpolate(method='time')
abaro = abaro.reindex(data.index)
abaro[:barostart] = np.NaN
abaro[baroend:] = np.NaN
data = data - abaro
data.dropna(inplace=True)
zmaaiveld = screen.well.maaiveld
zsensor = pos.refpnt - pos.depth
ztopbuis = screen.refpnt
zbottombuis = screen.refpnt - screen.depth
zbottomfilter = screen.refpnt - screen.bottom
level = data / 100.0 + zsensor
qc3a = level[level < zbottombuis].count()
qc3b = level[level < zbottomfilter].count()
qc3c = level[level < zsensor].count()
qc3d = level[level > zmaaiveld].count()
qc3e = level[level > ztopbuis].count()
txt = ','.join(map(str,[screen,fname,sf.start,sf.stop,sf.rows,
qc3a, qc3b, qc3c, qc3d, qc3e]))
logger.debug(txt)
csv.write('{}\n'.format(txt))
except:
pass
|
import pandas as pd
import numpy as np
from app.users_orm import Users, add_user, users_get_all
from app.posts_orm import Posts, add_post
from app.groups_orm import Groups, add_group, groups_get_all
from app.user_subscribes_to_group_orm import UserSubscribes_toGroup, add_user_subscriptions
from app import db
from sqlalchemy.exc import IntegrityError
file_name = '/home/ubuntu/Documents/Praxisprojekt/german_reddit_submissions_20k.pickle'
top_n_user = 1000
def insert_test_reddit():
df = pd.read_pickle(file_name)
# User
df = df[df['author'] != '[deleted]']
count_sub = df['author'].value_counts()
top_user = list(count_sub.index[:top_n_user])
df = df[df['author'].isin(top_user)]
for u in top_user:
user = Users(u, '123')
add_user(user)
# Groups
count_reddits = df['subreddit'].value_counts()
reddit_groups = list(count_reddits.index)
for g in reddit_groups:
group = Groups(g, 1)
add_group(group)
# Posts
for i, row in df.iterrows():
text = str(row['title']) + ' ' + str(row['selftext'])
post = Posts(text,
(top_user.index(str(row['author'])) + 1),
(reddit_groups.index(str(row['subreddit'])) + 1))
add_post(post)
# User Subscriptions
for u in top_user:
user_profile = df[df['author'] == u]
user_groups = list(user_profile['subreddit'].unique())
user_id = top_user.index(u) + 1
for g_name in user_groups:
user_subscription = UserSubscribes_toGroup(user_id, (reddit_groups.index(g_name) + 1))
add_user_subscriptions(user_subscription)
def insert_test_user():
# Youtube/ Influencer User
groups = [Groups.query.filter_by(name='Papaplatte').first(), Groups.query.filter_by(name='PietSmiet').first(),
Groups.query.filter_by(name='Klengan').first()]
group_embeddings = [g.embedding for g in groups]
new_user = Users('test_user1', '123')
new_user.embedding = list(map(float, list(np.mean(np.array(group_embeddings), axis=0))))
try:
add_user(new_user)
except IntegrityError:
db.session.rollback()
pass
user = Users.query.filter_by(username=new_user.username).first()
for g in groups:
try:
new_user_sub = UserSubscribes_toGroup(user.id, g.id)
add_user_subscriptions(new_user_sub)
except IntegrityError:
db.session.rollback()
pass
# Alt. Fakten/ AFD
groups = [Groups.query.filter_by(name='AFD').first(), Groups.query.filter_by(name='Volksverpetzer').first(),
Groups.query.filter_by(name='alt_fakten').first()]
group_embeddings = [g.embedding for g in groups]
new_user = Users('test_user2', '123')
new_user.embedding = list(map(float, list(np.mean(np.array(group_embeddings), axis=0))))
try:
add_user(new_user)
except IntegrityError:
db.session.rollback()
pass
user = Users.query.filter_by(username=new_user.username).first()
for g in groups:
try:
new_user_sub = UserSubscribes_toGroup(user.id, g.id)
add_user_subscriptions(new_user_sub)
except IntegrityError:
db.session.rollback()
pass
|
import boto3
import time
import sys
profile=sys.argv[1]
region_session = boto3.Session(region_name='us-east-1', profile_name=profile)
r = region_session.client('ec2')
regions = [region['RegionName'] for region in r.describe_regions()['Regions']]
def checkVolumes(region):
volume_session = boto3.Session(region_name=region, profile_name=profile)
ec = volume_session.client('ec2')
vol = ec.describe_volumes(Filters=[{'Name': 'status', 'Values': ['available']}])["Volumes"]
return len(vol)
def checkSnapshots(region):
snapshot_session = boto3.Session(region_name=region, profile_name=profile)
ec = snapshot_session.client('ec2')
snapshots = ec.describe_snapshots(OwnerIds=['self'])["Snapshots"]
return len(snapshots)
def checkAttachedVolume(region):
attached_session = boto3.Session(region_name=region, profile_name=profile)
ec = attached_session.client('ec2')
allInstances = ec.describe_instances()['Reservations']
allInstances_AttachedVolumes=[]
for i in allInstances:
c = len(i["Instances"][0]['BlockDeviceMappings'])
if c > 2:
allInstances_AttachedVolumes.append(c)
return len(allInstances_AttachedVolumes)
def checkStoppedInstances(region):
stopped_session = boto3.Session(region_name=region, profile_name=profile)
ec = stopped_session.client('ec2')
allStoppedInstances = ec.describe_instances(Filters=[{'Name':'instance-state-name', 'Values':['stopped']}])['Reservations']
return len(allStoppedInstances)
def checkEIPs(region):
eip_session = boto3.Session(region_name=region, profile_name=profile)
ec = eip_session.client('ec2')
allEIPs = ec.describe_addresses(Filters=[{'Name':'instance-id', "Values":[""]}])["Addresses"]
return len(allEIPs)
def main():
metric=sys.argv[2]
#metric="unattached-eips"
if metric == "snapshots":
snapshot_length = []
for reg in regions:
snapshotCount=checkSnapshots(reg)
snapshot_length.append(snapshotCount)
print "AWSAccountMetrics.{}.snapshotCount {} {}".format(profile, sum(snapshot_length), time.time())
elif metric == "volumes":
volume_length = []
for reg in regions:
volumeCount=checkVolumes(reg)
volume_length.append(volumeCount)
print "AWSAccountMetrics.{}.volumeCount {} {}".format(profile, sum(volume_length), time.time())
elif metric == "attached-volumes":
attachedVolumes_length = []
for reg in regions:
attachedCount=checkAttachedVolume(reg)
attachedVolumes_length.append(attachedCount)
print "AWSAccountMetrics.{}.attachedVolumesGreaterThanTwo {} {}".format(profile, sum(attachedVolumes_length), time.time())
elif metric == "stopped-instances":
stopped_instances_count = []
for reg in regions:
c = checkStoppedInstances(reg)
stopped_instances_count.append(c)
print "AWSAccountMetrics.{}.stoppedInstances {} {}".format(profile, sum(stopped_instances_count), time.time())
elif metric == "unattached-eips":
eip_count=[]
for reg in regions:
c = checkEIPs(reg)
eip_count.append(c)
print "AWSAccountMetrics.{}.unattachedEIPCount {} {}".format(profile, sum(eip_count), time.time())
main()
|
import numpy as np
import urllib.request
# url with dataset
url = "http://archive.ics.uci.edu/ml/machine-learning-databases/pima-indians-diabetes/pima-indians-diabetes.data"
# download the file
raw_data = urllib.request.urlopen(url)
# load the CSV file as a numpy matrix
dataset = np.loadtxt(raw_data, delimiter=",")
# separate the data from the target attributes
X = dataset[:,0:7]
y = dataset[:,8]
print(X)
print(y) |
from django.contrib import admin
# Register your models here.
from .models import Tutor, Tutee
#admin.site.register(Tutor)
#admin.site.register(Tutee)
# Define the admin class
class TutorAdmin(admin.ModelAdmin):
list_display = ('name', 'email', 'mobile_no', 'subjects', 'grade', 'timeslot', 'days')
list_filter = ('subjects', 'grade')
fieldsets = (
('Tutor', {
'fields': ('name', 'email', 'mobile_no')
}),
('Subjects', {
'fields': ('subjects', 'grade')
}),
('Availability', {
'fields': ('timeslot', 'days')
}),
)
# Register the admin class with the associated model
admin.site.register(Tutor, TutorAdmin)
# Define the admin class
class TuteeAdmin(admin.ModelAdmin):
list_display = ('name', 'email', 'mobile_no', 'subjects', 'timeslot', 'days')
fieldsets = (
('Tutor', {
'fields': ('name', 'email', 'mobile_no')
}),
('Subjects', {
'fields': ['subjects']
}),
('Availability', {
'fields': ('timeslot', 'days')
}),
)
# Register the admin class with the associated model
admin.site.register(Tutee, TuteeAdmin)
|
import urllib2
import time
import datetime
stocks_to_pull = ['AAPL', 'GOOG', 'MSFT', 'CMG', 'AMZN', 'EBAY', 'TSLA']
def pullDataPart3(stock):
try:
path = 'C:\Users\B40904\Documents\Personal\PythonFinanceCharts\\'
file_line = path+stock+'.txt'
url_to_visit = 'http://chartapi.finance.yahoo.com/instrument/1.0/'+stock+'/chartdata;type=quote;range=1y/csv'
source_code = urllib2.urlopen(url_to_visit).read()
split_source = source_code.split('\n')
for line in split_source:
split_line = line.split(',')
if (len(split_line) == 6) and ('values' not in line):
save_file = open(file_line, 'a')
line_to_write = line+'\n'
save_file.write(line_to_write)
print 'Pulled ' + stock
print 'sleeping'
time.sleep(1)
except Exception,e:
print 'main loop', str(e)
def pullDataPart4(stock):
print 'Currently pulling',stock
print str(datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S'))
url_to_visit = 'http://chartapi.finance.yahoo.com/instrument/1.0/'+stock+'/chartdata;type=quote;range=1d/csv'
path = 'C:\Users\B40904\Documents\Personal\PythonFinanceCharts\\'
file_line = path+stock+'.txt'
# Gets the last timestamp
try:
read_existing_data = open(file_line, 'r').read()
split_existing = read_existing_data.split('\n')
most_recent_line = split_existing[-2] # Last line is blank
last_unix = most_recent_line.split(',')[0]
except Exception,e:
last_unix = 0
save_file = open(file_line, 'a')
source_code = urllib2.urlopen(url_to_visit).read()
split_source = source_code.split('\n')
for line in split_source:
if 'values' not in line:
split_line = line.split(',')
if len(split_line) == 6:
if int(split_line[0]) > int(last_unix):
line_to_write = line+'\n'
save_file.write(line_to_write)
save_file.close()
print 'Pulled ' + stock
print 'sleeping'
print str(datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S'))
time.sleep(10)
def part5():
while True:
for stock in stocks_to_pull:
pullDataPart4(stock)
time.sleep(10)
part5()
print 'Done!' |
from re import findall
class Date(object):
def __init__(self, year = 0, month = 0, day = 0, hour = 0, minute = 0):
self.__year = year
self.__month = month
self.__day = day
self.__hour = hour
self.__minute = minute
@property
def year(self):
return self.__year
@property
def month(self):
return self.__month
@property
def day(self):
return self.__day
@property
def hour(self):
return self.__hour
@property
def minute(self):
return self.__minute
@year.setter
def year(self, year):
self.__year = year
@month.setter
def month(self, month):
self.__month = month
@day.setter
def day(self, day):
self.__day = day
@hour.setter
def hour(self, hour):
self.__hour = hour
@minute.setter
def minute(self, minute):
self.__minute = minute
def __str__(date):
if isinstance(date, Date):
return '%04d-%02d-%02d/%02d:%02d' % (date.__year, date.__month, date.__day, date.__hour, date.__minute)
def stringToDate(str):
date = Date()
date_dict = findall(pattern="([\\d]{4})-([\\d]{2})-([\\d]{2})/([\\d]{2}):([\\d]{2})", string = str)
if date_dict:
date_dict = date_dict[0]
date.year = date_dict[0]
date.month = date_dict[1]
date.day = date_dict[2]
date.hour = date_dict[3]
date.minute = data_dict[4]
return date
def __lt__(self, date):
if isinstance(date, Date):
return [self.year, self.month, self.day, self.hour, self.minute] < [date.year, date.month, date.day, date.hour, date.minute]
else:
return False;
def __eq__(self, date):
if isinstance(date, Date):
return self.year == date.year and self.month == date.month and self.day == date.day and self.minute == date.minute and self.huor == date.hour
else:
return False
def __ne__(self, date):
if isinstance(date, Date):
return not self == date
else:
return False
def __le__(self, date):
if isinstance(date, Date):
return self == date and self < date
else:
return False
def __gt__(self, date):
if isinstance(date, Date):
return self != date and (not self < date)
else:
return False
def __ge__(self, date):
if isinstance(date, Date):
return self == date and self > date
else:
return False
def isVaild(date):
if isinstance(date, Date):
if date.year % 400 == 0 or (date.year % 100 != 0 and date.year % 4 == 0):
Date.__leapDict[date.month] = 29
else:
Date.__leapDict[date.month] = 28
return date.year >= 1000 and date.year <= 9999 and date.month >= 1 and date.month <= 12 and date.day >= 1 and date.day <= Date.__leapDict[date.month] and date.hour >= 0 and date.hour <= 23 and date.minute >= 0 and date.minute <= 59
else:
return False
__leapDict = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
|
# setuptools script
# Use `python setup.py sdist` to compile to dist/
import setuptools
with open('README.md', 'r', encoding='utf-8') as fh:
long_description = fh.read()
setuptools.setup(
name='demuxfb',
# Date of Facebook 'Download Your Information' data archive creation this
# is built against.
version='2020-12-15',
author='Nicholas Killeen',
author_email='nicholas.killeen2@gmail.com',
description='Parse Facebook Conversation Archives',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/nick-killeen/demuxfb',
license='MIT',
package_dir={'': 'src'},
packages=setuptools.find_packages(include=['demuxfb', 'demuxfb.*']),
classifiers=[
'Programming Language :: Python :: 3',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
],
keywords='facebook messages data',
python_requires='>=3.8',
)
|
#!/usr/bin/python3
# -*- config:utf-8 -*-
import bs4
import datetime
import requests
import re
import yaml
with open('config.yaml',) as fh:
c = yaml.load(fh.read(), Loader=yaml.FullLoader)
s = requests.Session()
r = s.get(url=c['url'])
b = bs4.BeautifulSoup(r.text, 'html5lib')
f = b.select_one('form#new_user')
class Yeyasu():
def __init__(
self,
url='https:/',
user='',
password='',
):
self.session=requests.Session()
self.post = {}
self.login_url = url
self.user = user
self.password = password
self.site_root = self.relative_path_to_url('/', url)[:-1]
self.standard_start_time = '09:00' if not 'sst' in c else c['sst']
self.standard_end_time = '18:00' if not 'set' in c else c['set']
self.standard_break_time = '1:00' if not 'sbt' in c else c['sbt']
def relative_path_to_url(self, path='/', url=''):
if not url:
url = self.site_root
_r = re.search('^(https?)://([^/]+)', url, flags=re.I)
proto = _r.group(1)
domain = _r.group(2)
if re.search('^//', path):
return proto + ':' + path
elif re.search('^/', path):
return proto + '://' + domain + path
#else:
return proto + '://' + domain + '/' + path
def create_post_from_toppage(self):
'''
- create/reuse session.
- download toppage.
- generate post string from form, IDs
- return [postdata, session]
'''
self.request = self.session.get(url=self.login_url)
self.bs = bs4.BeautifulSoup(self.request.text, 'html5lib')
self.form = self.bs.select_one('form#new_user')
self.login_form = {}
for tag in self.form.select('[name]'):
if tag.has_attr('value'):
self.login_form[tag['name']] = tag['value']
else:
self.login_form[tag['name']] = ''
self.login_form['user[login_id]'] = self.user
self.login_form['user[password]'] = self.password
#print(self.form['action'], self.login_form)
self.login_url = self.relative_path_to_url(self.form['action'])
return self.login_form
def login(self):
self.create_post_from_toppage()
return bs4.BeautifulSoup(
self.session.post(url=self.login_url, data=self.login_form).text,
'html5lib')
def print_monthly_summary(self, date="2021/04"):
"""
date format:
yyyy.mm[.dd]
"""
_r = re.search('^\s*([0-9]{4})[^0-9]([0-9]{2})', date)
if not _r:
return {}
yyyy = _r.group(1)
mm =_r.group(2)
_l = {}
_url = self.site_root + "/works/%s-%s" % (yyyy, mm)
#print(_url, self.site_root + "/works/%s-%s" % (yyyy, mm))
self.bs = bs4.BeautifulSoup(self.session.get(url=_url).text, 'html5lib')
#print(self.bs.prettify())
summary = {}
for _i in self.bs.form.select('table tr'):
# date
_d = _i.select_one('td.cellDate span.date')
# href
_h = _i.select_one('td.cellDate div.view_work a[href]')
if not _h:
_h = "UnEditable or Accepted"
else:
#_h = self.site_root + "/" + _h['href'].strip()
_h = self.relative_path_to_url(_h['href'].strip())
# start
_s = _i.select_one('td.cellTime.cellTime01.cellBreak.view_work div.item01 span')
# end
_e = _i.select_one('td.cellTime.cellTime02.view_work div.item01')
# break time
_b = _i.select_one('td.cellTime.cellTime07.cellBtime.view_work')
# total time
_t = _i.select_one('td.cellTime.cellTime08.view_work')
if not (_d and _s and _e and _b and _t):
# lack date or href or ....
continue
_d = _d.text.strip()
_s = _s.text.strip()
_e = _e.text.strip()
_b = _b.text.strip()
_t = _t.text.strip()
summary[_d] = {'link': _h, 'start': _s, 'end': _e, 'break': _b, 'total': _t,}
"""
print(summary)
print('Year,%s,Month,%s,' % (yyyy, mm))
print('Date,Start,End,Breaks,Total,Link')
for _i in sorted(summary.keys()):
print('%s,%s,%s,%s,%s,%s' % (
_i,
summary[_i]['start'],
summary[_i]['end'],
summary[_i]['break'],
summary[_i]['total'],
summary[_i]['link']))
"""
return summary
def str_hhmm_2_int_sssss(self, _t='2:34') -> int:
_r = re.search(r'^([0-9]{1,}):([0-9]{1,}$)', _t)
if _r:
_h, _m = [int(x) for x in _r.groups()]
return int(60*_m + 60**2*_h)
else:
return _t
def int_sssss_2_str_hhmm(self, _t='12345') -> str:
if _t == None:
return _t
_m = int(int(_t) / 60)
_h = int(_m / 60)
_m = int(_m % 60)
return "%02d:%02d" % (_h, _m)
def update_attendance(self, command=''):
'''
command format:
[YYYY/MM/][D]D,[k|[int],[int][,int]][,][# comments]
example(core time between 09:00 and 18:00):
2021/04/01,,+30 # 09:00 <-> 18:30(+30min)
2021/04/02,-20,300,60 # 08:40(-20min) <-> 24:00(+300min) break 2:00(+60min)
2021/04/03,, # do nothing
4, # do nothing
2021/04/06,08:40,24:00, # 08:40(-20min) <-> 24:00(+300min)
2021/04/05,k # set rest day *** not implemented ***
'''
_r = re.search(
r'^\s*((([0-9]{4})[^0-9])([0-9]{1,2})[^0-9])?([0-9]{1,2}),(([0-9]{1,2}:[0-9]{1,2})|([-+]?[0-9]+))?' \
r'(,(([0-9]{1,2}:[0-9]{1,2})|([-+]?[0-9]+))?)?(,(([0-9]{1,2}:[0-9]{1,2})|([-+]?[0-9]+))?)?',
command,
flags=re.I,)
'''
1: ((([0-9]{4})[^0-9])([0-9]{1,2})[^0-9])? # 2021/04/, 2020-12-
2: (([0-9]{4})[^0-9]) # 2021/, 2023-
3: ([0-9]{4}) # [Year:Opt] 2004
4: ([0-9]{1,2}) # [Month:Opt] 0, 12
5: ([0-9]{1,2}) # [Date:Mand] 11, 3, 31
6: (([-+]?[0-9]+)|([0-9]{1,2}:[0-9]{1,2}))? # 20, -45, -300, +30, 9:15, 13:0
7: ([0-9]{1,2}:[0-9]{1,2}) # [AbsStartHour:Opt] 9:15, 13:0 , 08:00
8: ([-+]?[0-9]+) # [RelativeStartHour:Opt] 20, -45, -300, +30
9: (,(([-+]?[0-9]+)|([0-9]{1,2}:[0-9]{1,2}))?)? # ,20 ,-45 ,-300 ,+30 ,9:15 ,13:0 ,
10: (([-+]?[0-9]+)|([0-9]{1,2}:[0-9]{1,2}))? # 20, -45, -300, +30, 9:15, 13:0
11: ([0-9]{1,2}:[0-9]{1,2}) # [AbsEndHour:Opt] 18:15, 20:0 , 23:00
12: ([-+]?[0-9]+) # [RelativeEndHour:Opt] 20, -45, -300, +30
13: (,(([-+]?[0-9]+)|([0-9]{1,2}:[0-9]{1,2}))?)? # ,20 ,-45 ,-300 ,+30 ,9:15 ,13:0 ,
14: (([-+]?[0-9]+)|([0-9]{1,2}:[0-9]{1,2}))? # 20, -45, -300, +30, 9:15, 13:0
15: ([0-9]{1,2}:[0-9]{1,2}) # [AbsBreakTime:Opt] 0:30, 2:20
16: ([-+]?[0-9]+) # [RelativeBreakTime:Opt] 20, -45, -300, +30
## "2021/04/01,,+30" -> ['2021/04/', '2021/', '2021', '04', '01', None, None, None, ',+30', '+30', None, '+30', None, None, None, None]
'''
if not _r:
"""Unrecognized command"""
return None
# now
_now = datetime.datetime.now()
# year
if _r.group(3):
_yyyy = int(_r.group(3))
else:
""" fill current year,if not specified. """
_yyyy = _now.year
# month
if _r.group(4):
_mm = int(_r.group(4))
else:
""" fill current month, if not specified. """
_mm = _now.month
# date (mandatory)
_dd = int(_r.group(5))
# start time
if _r.group(7) != None:
_st = self.str_hhmm_2_int_sssss(_r.group(7))
elif _r.group(8) != None:
_st = int(_r.group(8)) * 60 + self.str_hhmm_2_int_sssss(self.standard_start_time)
else:
""" Not specifiied, so do not modify start-time """
_st = None
# end time
if _r.group(11) != None:
_et = self.str_hhmm_2_int_sssss(_r.group(11))
elif _r.group(12) != None:
_et = int(_r.group(12)) * 60 + self.str_hhmm_2_int_sssss(self.standard_end_time)
else:
""" Not specifiied, so do not modify end-time """
_et = None
# break time
if _r.group(15) != None:
_bt = self.str_hhmm_2_int_sssss(_r.group(15))
elif _r.group(16) != None:
_bt = int(_r.group(16)) * 60 + self.str_hhmm_2_int_sssss(self.standard_break_time)
else:
""" Not specifiied, so do not modify break-time """
_bt = None
if _st == None and _et == None and _bt == None:
""" command does nothing """
return None
_s = self.print_monthly_summary("%04d/%02d" % (_yyyy, _mm))
_b = bs4.BeautifulSoup(
self.session.get(
url=_s["%02d" % _dd]['link']).text,
'html5lib')
_f = _b.select_one('form[enctype="multipart/form-data"]')
# action
action = self.relative_path_to_url(_f["action"])
# inputs
_form = {}
[ft.wrap(_b.new_tag('fake')) for ft in _f.select('[name]')]
for tag in _f.select('fake'):
'''
inputs habitts:
valueless hidden -> set("") / except name=holiday
unchecked checkbox -> no entry
'''
if tag.select_one('input[type=checkbox]'):
# input[type=checkbox]
continue
elif tag.select_one('input[disabled=disabled]'):
# input[type=*][disabled=disabled]
continue
elif tag.select_one('input[type=hidden][name][value]'):
# input[hidden]
_form[tag.select_one('input[type=hidden][name][value]')['name']] = (None, tag.select_one(
'input[type=hidden][name][value]')['value'])
elif tag.select_one('select[name] option[selected=selected][value]'):
# select option[selected]
_form[tag.select_one('select[name]')['name']] = (None, tag.select_one(
'select option[selected=selected][value]')['value'])
elif tag.select_one('input[type=text][value]'):
# input[text]
_form[tag.select_one('input[type=text][name][value]')['name']] = (None, tag.select_one(
'input[type=text][name][value]')['value'])
else:
#print("ERR", tag.prettify())
_form[tag.select_one('[name]')['name']] = (None, '')
overwriting_form = {
"holiday": "false",
"commit": "$BEPO?$9$k(B",
"work[next_day_start]": "",
"work[next_day_end]": "",
"next_day_break_1_start]": "",
"next_day_break_1_end": "",
"next_day_break_2_start": "",
"next_day_break_2_end": "",
}
for i in overwriting_form.keys():
_form[i] = (None, overwriting_form[i])
remove_keys = ["add_application"]
for i in remove_keys:
del _form[i]
if _st != None:
_form['work[start_at_str]'] = (None, self.int_sssss_2_str_hhmm(_st))
if _et != None:
_form['work[end_at_str]'] = (None, self.int_sssss_2_str_hhmm(_et))
if _bt != None:
_form['work[break_2_start_at_str]'] = (None, self.standard_end_time)
_form['work[break_2_end_at_str]'] = (None, self.int_sssss_2_str_hhmm(
self.str_hhmm_2_int_sssss(self.standard_end_time)
+ _bt
- self.str_hhmm_2_int_sssss(self.standard_break_time)))
ret = bs4.BeautifulSoup(
self.session.post(url=action, data=_form).text,
'html5lib')
return ()
y = Yeyasu(url=c['url'], user=c['usr'], password=c['pas'])
y.login()
#y.update_attendance("2021/04/02,,300,60")
#y.update_attendance("2021/04/05,,30,")
#y.update_attendance("2021/04/06,-10,30,20")
data = '''2021/04/02,,300,60
2021/04/05,,30,
2021/04/06,-10,30,20
'''
for l in data.splitlines():
y.update_attendance(l)
exit()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jul 29 10:04:58 2019
@author: seth
"""
from LSTMClass import LSTMClass as LSTMPrediction
from ProphetClass import ProhetClass as ProhetPrediction
import matplotlib.pyplot as plt
from matplotlib import rcParams
from gather_data import get_data
import numpy as np
def build_plots():
"""
Makes plots formatted so each prediction line is on the same graph
and formats the graph to be easily displayed in the readme.
Makes a class for each item stock for each time frame and combines the
results to the same figure
"""
google, microsoft, apple, jnj, amazon = get_data()
stocks = [['GOOGL', google], ['MSFT', microsoft], ['AAPL', apple],
['JNJ', jnj], ['AMZN', amazon]]
rcParams.update({'figure.autolayout': True})
plt.style.use('fivethirtyeight')
lap7 = LSTMPrediction(apple, 'APPL', 7)
lap14 = LSTMPrediction(apple, 'APPL', 14)
lap30 = LSTMPrediction(apple, 'APPL', 30)
pap7 = ProhetPrediction(apple, 'AAPL', 7)
pap14 = ProhetPrediction(apple, 'AAPL', 14)
pap30 = ProhetPrediction(apple, 'AAPL', 30)
lam7 = LSTMPrediction(amazon, 'AMZN', 7)
lam14 = LSTMPrediction(amazon, 'AMZN', 14)
lam30 = LSTMPrediction(amazon, 'AMZN', 30)
pam7 = ProhetPrediction(amazon, 'AMZN', 7)
pam14 = ProhetPrediction(amazon, 'AMZN', 14)
pam30 = ProhetPrediction(amazon, 'AMZN', 30)
lgo7 = LSTMPrediction(google, 'GOOGL', 7)
lgo14 = LSTMPrediction(google, 'GOOGL', 14)
lgo30 = LSTMPrediction(google, 'GOOGL', 30)
pgo7 = ProhetPrediction(google, 'GOOGL', 7)
pgo14 = ProhetPrediction(google, 'GOOGL', 14)
pgo30 = ProhetPrediction(google, 'GOOGL', 30)
ljj7 = LSTMPrediction(jnj, 'JNJ', 7)
ljj14 = LSTMPrediction(jnj, 'JNJ', 14)
ljj30 = LSTMPrediction(jnj, 'JNJ', 30)
pjj7 = ProhetPrediction(jnj, 'JNJ', 7)
pjj14 = ProhetPrediction(jnj, 'JNJ', 14)
pjj30 = ProhetPrediction(jnj, 'JNJ', 30)
lms7 = LSTMPrediction(microsoft, 'MSFT', 7)
lms14 = LSTMPrediction(microsoft, 'MSFT', 14)
lms30 = LSTMPrediction(microsoft, 'MSFT', 30)
pms7 = ProhetPrediction(microsoft, 'MSFT', 7)
pms14 = ProhetPrediction(microsoft, 'MSFT', 14)
pms30 = ProhetPrediction(microsoft, 'MSFT', 30)
helper = [[lap7, pap7, 'APPL', 7], [lap14, pap14, 'APPL', 14],
[lap30, pap30, 'APPL', 30],
[lam7, pam7, 'AMZN', 7], [lam14, pam14, 'AMZN', 14],
[lam30, pam30, 'AMZN', 30],
[lgo7, pgo7, 'GOOG', 7], [lgo14, pgo14, 'GOOG', 14],
[lgo30, pgo30, 'GOOG', 30],
[ljj7, pjj7, 'JNJ', 7], [ljj14, pjj14, 'JNJ', 14],
[ljj30, pjj30, 'JNJ', 30],
[lms7, pms7, 'MSFT', 7], [lms14, pms14, 'MSFT', 14],
[lms30, pms30, 'MSFT', 30]]
for i in helper:
findex = np.arange(0, i[3]+1)
pindex = np.arange(-(i[3]*2)+1, 1)
lstmp = i[0].predictions[-(i[3]+1):].values
propp = i[1].forecast.yhat[-(i[3]+1):].values
hist = i[1].train.y[-i[3]*2:]
real = i[0].actuals
fig, ax = plt.subplots(figsize=(12, 8))
plt.autoscale()
plt.tight_layout(pad=3)
plt.plot(pindex, hist, 'b')
plt.plot(findex, lstmp, 'r')
plt.plot(findex, propp, 'g')
plt.plot(findex, real, 'b')
plt.axvline(x=0, color='k', linestyle='dashed')
plt.legend(['History', 'LSTM', 'Prophet', 'Actual'], prop={'size': 25})
plt.xlabel('Days Out (From Today)')
plt.ylabel('Value (US$)')
plt.title(i[2] + ' Predictions ' + str(i[3]) + ' days')
plt.savefig('../img/EDA/' + i[2] + '_' + str(i[3]) + '.png')
plt.close()
for i in stocks:
fig, ax = plt.subplots(figsize=(12, 8))
plt.autoscale()
plt.tight_layout(pad=3)
plt.plot(i[1].close, 'k')
plt.title(i[0] + ' Historical Prices')
plt.xlabel('Date')
plt.ylabel('Value (US$)')
plt.savefig('../img/EDA/' + i[0] + '_EDA.png')
plt.close()
|
from django.db import models
from django.contrib.auth.models import User
from django.http import JsonResponse
class Response():
"""
Handle API Responses
"""
def __init__(self, data, status_code=200, message=None):
self.data = data
self.status_code = status_code
def get_obj(self):
return JsonResponse({
'status_code': self.status_code,
'data': self.data,
}) |
import random
import config as Cg
import csv
from copy import deepcopy
import os
def genRuns(low,
num_blocks,
num_study_items):
random.shuffle(low)
exp = {"study": None,
"test": None}
study = []
lure = []
for i in range(num_blocks):
for y in range(num_study_items*2):
temp = low.pop()
tempS = {'stim': os.path.join(Cg.STIM_PATHS, temp)}
# BUTTON_BOX_KEYS[0] Is assumed left, and [1] is assumed right.
if y < num_study_items:
tempS['corr_resp'] = Cg.BUTTON_BOX_KEYS[0:2]
tempS['cata'] = "OLD"
study.append(tempS)
else:
tempS['corr_resp'] = Cg.BUTTON_BOX_KEYS[2:4]
tempS['cata'] = "NEW"
lure.append(tempS)
random.shuffle(lure)
random.shuffle(study)
testing_blocks = [[] for x in xrange(num_blocks)]
for x in range(len(lure)):
temp = lure.pop()
i = random.randint(0, num_blocks-1)
while(len(testing_blocks[i]) >= num_study_items):
i = random.randint(0, num_blocks-1)
testing_blocks[i].append(temp)
stemp = deepcopy(study)
for y in range(len(stemp)):
temp = stemp.pop()
i = random.randint(0, num_blocks-1)
while(len(testing_blocks[i]) >= num_study_items*2):
i = random.randint(0, num_blocks-1)
testing_blocks[i].append(temp)
for z in range(len(testing_blocks)):
random.shuffle(testing_blocks[z])
random.shuffle(study)
exp = {"study": study,
"test": testing_blocks}
return exp
def getDefaultLocalizer():
with open(Cg.LOCALIZER_FILE) as f:
a = [{k: v for k, v in row.items()}
for row in csv.DictReader(f, skipinitialspace=True)]
for trial in a:
trial['stim'] = os.path.join(Cg.STIM_PATHS, trial['stim'])
print type(a[0]['presentation'])
return a
|
# -*- coding: utf-8 -*-
"""Main module."""
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""doc for _main.py - """
import logging
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.DEBUG)
import sys
import argparse
import re
import os
import subprocess
import distutils.sysconfig as sysconfig
from traceback import print_exc as xp
import pdb
from pip_stripper._baseutils import (
set_cpdb,
set_rpdb,
ppp,
debugObject,
cpdb,
fill_template,
rpdb,
sub_template,
)
from pip_stripper.writers import ScanWriter
from pip_stripper.matching import Matcher
from pip_stripper.pip import ClassifierPip
from pip_stripper.trackimports import ClassifierImport
from pip_stripper.common import Command
from pip_stripper.builder import Builder
from yaml import safe_load as yload, dump
if __name__ == "__main__":
set_cpdb(cpdb, remove=True)
dn_script = os.path.dirname(__file__)
if not dn_script:
fnp_script = os.path.join(dn_cwd_start, sys.argv[0])
dn_script = os.path.dirname(fnp_script)
class Main(object):
""" manages batch"""
di_opt = {}
def __repr__(self):
return self.__class__.__name__
def _get_fnp(self, subject):
try:
if subject == "templatedir":
return os.path.join(dn_script, "templates")
else:
fn = self.config["vars"]["filenames"][subject]
return os.path.join(self.workdir, fn)
except (Exception,) as e:
if cpdb():
pdb.set_trace()
raise
def __init__(self, options):
try:
self.options = options
pwd = os.getcwd()
self.workdir = self.options.workdir or pwd
self.config = None
fnp_config = self.options.config
if self.options.init:
fnp_config = self._initialize(fnp_config)
if not fnp_config:
for dn in [self.workdir, pwd]:
fnp_config = os.path.join(dn, self.FN_CONFIG)
try:
with open(fnp_config) as fi:
self.config = yload(fi)
break
except (IOError,) as e:
pass
else:
msg = "missing configuration file. perhaps you wanted to use the --init option to create one?"
print(msg)
sys.exit(1)
else:
with open(fnp_config) as fi:
self.config = yload(fi)
self.scan = not self.options.noscan
#
self.vars = dict()
self.vars["scandir"] = self.workdir
sectionname = "filenames"
section = self.config["vars"][sectionname]
for k, v in section.items():
self.vars.update(**{"%s_%s" % (sectionname, k): v})
self.import_classifier = ClassifierImport(self)
self.scanwriter = ScanWriter(self)
self.matcher = Matcher()
self.builder = Builder(self)
except (ValueError,) as e:
raise
except (Exception,) as e:
if cpdb():
pdb.set_trace()
raise
def process(self):
try:
if self.scan:
self.scanner = Scanner(self)
self.scanner.run()
self.import_classifier.run()
for name in self.import_classifier.packagetracker.di_packagename:
self.matcher.imp.feed(name)
pips = self.pip_classifier = ClassifierPip(self)
for set_ in pips.di_bucket.values():
[self.matcher.pip.feed(name) for name in set_]
pips.run(self.import_classifier.packagetracker)
# for name in self.li_pip:
self.scanwriter.write()
if self.options.build:
self.builder.process()
except (Exception,) as e:
if cpdb():
pdb.set_trace()
raise
DN = os.path.dirname(__file__)
FN_CONFIG = "pip-stripper.yaml"
_s_stdlib = None
@property
def s_stdlib(self):
"""load the std lib import names"""
if self._s_stdlib is None:
self._s_stdlib = liststdlib()
self._s_stdlib |= set(self.config.get("extra_stdlib", []))
return self._s_stdlib
_aliases = _imp2pip = None
@property
def imp2pip(self):
"""uses the aliases to look up import name to pip name """
if self._imp2pip is None:
self._imp2pip = {v: k for k, v in self.aliases.items()}
return self._imp2pip
@property
def aliases(self):
if self._aliases is None:
self._aliases = Matcher.match_all(self)
# self._aliases = self.matcher.di_pip_imp.copy()
self._aliases.update(**self.config.get("hardcoded_aliases", {}))
return self._aliases
pip2imp = aliases
_raw_imports = None
@property
def raw_imports(self):
if self._raw_imports is None:
fnp = self._get_fnp("imports")
with open(fnp) as fi:
self._raw_imports = fi.readlines()
return self._raw_imports
_all_imports = None
@property
def all_imports(self):
"""loads the grep-ed import scans on demand"""
if self._all_imports is None:
self._all_imports = set(
self.import_classifier.packagetracker.di_packagename
)
return self._all_imports
_all_freezes = None
_all_pips = None
@property
def all_freezes(self):
if self._all_freezes is None:
# this triggers the pips which what populates
# the freezes...
self.all_pips
return self._all_freezes
@property
def all_pips(self):
"""loads the pip freeze output on demand"""
if self._all_pips is None:
self._all_pips = set()
self._all_freezes = {}
fnp = self._get_fnp("freeze")
with open(fnp) as fi:
for line in fi.readlines():
try:
packagename = self.pip_classifier.parse_requirement_line(line)
except (ValueError,) as e:
logger.warning("could not parse packagename on %s" % (line))
continue
self._all_pips.add(packagename)
self._all_freezes[packagename] = line.strip()
return self._all_pips
@classmethod
def getOptParser(cls):
parser = argparse.ArgumentParser()
dest = "config"
parser.add_argument(
"--" + dest,
action="store",
help="config file. if not provided will look for %s in --workdir, current directory "
% (cls.FN_CONFIG),
)
dest = "noscan"
default = False
parser.add_argument(
"--" + dest,
default=default,
action="store_true",
help="don't scan to classify packages. build phase will re-use existing pip-stripper.scan.yaml. [%s]. "
% (default),
)
dest = "build"
default = False
parser.add_argument(
"--" + dest,
default=default,
action="store_true",
help="read pip-stripper.scan.yaml to create requirements.prod/dev.txt [%s]"
% (default),
)
dest = "init"
parser.add_argument(
"--" + dest,
action="store_true",
help="initialize the config file (as %s) if it doesn't exist"
% (cls.FN_CONFIG),
)
dest = "workdir"
parser.add_argument(
"--" + dest,
action="store",
help="work directory [defaults to config file's value or current directory]",
)
dest = "verbose"
default = False
parser.add_argument(
"--" + dest,
default=default,
action="store_true",
help="verbose mode. adds extra zzz_debug: entry to pip-stripper.scan.yaml [%s]"
% (default),
)
return parser
def _initialize(self, fnp_config):
"""--init option handling"""
try:
fnp_config = fnp_config or os.path.join(self.workdir, self.FN_CONFIG)
if os.path.isfile(fnp_config):
print(
"pip-stripper configuration file exists already at @ %s. leaving it alone"
% (fnp_config)
)
return fnp_config
# load the template file
fnp_template = os.path.join(self.DN, "templates/pip-stripper.yaml")
with open(fnp_template) as fi:
tmpl = fi.read()
seed = fill_template(tmpl, self)
with open(fnp_config, "w") as fo:
fo.write(seed)
print("pip-stripper configuration generated @ %s" % (fnp_config))
return fnp_config
except (Exception,) as e:
if cpdb():
pdb.set_trace()
raise
def liststdlib():
"""
pretty grungy code, will need a rework
"""
listed = set()
std_lib = sysconfig.get_python_lib(standard_lib=True)
for top, dirs, files in os.walk(std_lib):
for nm in files:
if nm != "__init__.py" and nm[-3:] == ".py":
found = os.path.join(top, nm)[len(std_lib) + 1 : -3].replace("\\", ".")
found = found.split("/")[0]
listed.add(found)
return listed
class Scanner(object):
def __init__(self, mgr):
self.mgr = mgr
self.config = self.mgr.config.get(self.__class__.__name__)
self.tasknames = self.config["tasknames"]
def run(self):
try:
for taskname in self.tasknames:
config = self.mgr.config.get("Command")["tasks"][taskname]
command = Command(self.mgr, taskname, config)
command.run()
fnp_out = os.path.join(
self.mgr.workdir, self.mgr.config["vars"]["filenames"]["liststdlib"]
)
except (Exception,) as e:
if cpdb():
pdb.set_trace()
raise
def main(args=None):
"""the console_scripts entry point"""
if args is None:
args = sys.argv[1:]
parser = Main.getOptParser()
options = parser.parse_args(args)
mgr = Main(options)
mgr.process()
if __name__ == "__main__":
# conditional pdb.trace()-ing with --cpdb on command line
set_cpdb(cpdb, remove=True)
main()
|
"""drafts.py
Implements drafts behavior for dexterity types"""
import zope.interface
from plone.app.drafts.interfaces import IDraft, IDrafting
from plone.z3cformbuttonoverrides.interfaces import IButtonAndHandlerSubscriber
#Custom Behavior Button Marker Interfaces
class IDraftAutoSaveBehavior(zope.interface.Interface):
"""Marker interfac to enable autosave of draft if kss ajax validation
is enabled. The default is not to auto save.
Note: This is set by an opt-in behavior statement.
"""
class IDraftSubmitBehavior(IButtonAndHandlerSubscriber):
"""Marker interface to enable custom submit button and handler override
This is automatically set when creating a draft
"""
class IDraftCancelBehavior(IButtonAndHandlerSubscriber):
"""Marker interface to enable custom cancel button and handler override
This is automatically set when creating a draft
"""
class IDraftSaveBehavior(IButtonAndHandlerSubscriber):
"""Marker interfac to enable custom 'save draft' button and handler override
This is set by an opt-in behavior statement
"""
class IZ3cDraft(IDraft):
"""Marker interface to indicate a z3c.form draft is present
"""
class IZ3cDrafting(IDrafting):
"""Marker interface to indicate a z3c.form draft is currently being
created; but is not yet complete.
"""
class IDraftableField(zope.interface.Interface):
"""Marker interface to indicate a field is draftable.
"""
class IZ3cFormDataContext(zope.interface.Interface):
"""Indirection to help determine where draft forms store their data.
This is a multi-adapter on ``(context, request, form)``. The context and
request are the same as ``form.context`` and ``form.request``, but these
discriminators allow the data context to be customised depending on
the context or request.
The default implementation simply returns ``form.context``.
"""
#class IDictDraftProxy(zope.interface.Interface):
# """Marker interface for the draft proxy where the proxy contains dict
# context.
# """
|
from flask import Flask, request
from flask_restful import Resource, Api
from flask_cors import CORS
import scrapy
app = Flask(__name__)
CORS(app)
api = Api(app)
class DominiosIgnorados(Resource):
def post(self):
try:
dados = request.get_json()
x = scrapy.addDominiosIgnorados(dados['url'])
return {'success': "Cadastro realizado"}, 200
except Exception as e:
return {"error": str(e)}, 400
def get(self):
try:
return {'success': scrapy.getDominiosIgnorados()}, 200
except Exception as e:
return {"error": str(e)}, 400
class DominiosIgnoradosRemove(Resource):
def delete(self, url_id):
try:
scrapy.delDominiosIgnorados(url_id)
return {'success': "Url removida"}, 200
except Exception as e:
return {"error": str(e)}, 400
class Resultado(Resource):
def get(self, pesquisa_id):
try:
return {'success': scrapy.retornaResultadosPesquisa(pesquisa_id)}, 200
except Exception as e:
return {"error": str(e)}, 400
class Pesquisa(Resource):
def post(self):
try:
dados = request.get_json()
item_pesquisa = scrapy.cadastraPesquisa(dados['termo'], dados['user_id'])
if item_pesquisa['status'] == "cadastrado com sucesso":
scrapy.getPesquisa(dados['termo'], item_pesquisa['item_pesquisa'])
return {'success': {"status": item_pesquisa['status'],
"itemPesquisa": item_pesquisa}}, 200
except Exception as e:
return {"error": str(e)}, 400
def get(self):
try:
return {'success': scrapy.retornaPesquisas()}, 200
except Exception as e:
return {"error": str(e)}, 400
class DadosUrl(Resource):
def post(self):
dados = request.get_json()
scrapy.coletaDadosUrl(dados['url_id'])
return {'success': "coletado"}, 200
class DadosTermo(Resource):
def post(self):
dados = request.get_json()
scrapy.getDadosPesquisa(dados['termo_id'])
return {'success': "coletado"}, 200
class ReprocessaPesquisaFalha(Resource):
def get(sel, termo_id):
scrapy.getDadosResultadoFalha(termo_id)
return {'success': "reprocessado"}, 200
api.add_resource(DominiosIgnorados, '/dominios_ignorar/')
api.add_resource(DominiosIgnoradosRemove, '/dominios_ignorar/<int:url_id>/')
api.add_resource(Pesquisa, '/pesquisa/')
api.add_resource(Resultado, '/resultado/<int:pesquisa_id>')
api.add_resource(DadosUrl, '/coletar_dados_url/')
api.add_resource(DadosTermo, '/coletar_dados_termo/')
api.add_resource(ReprocessaPesquisaFalha,'/reprocessa_pesquisa_falha/<int:termo_id>/')
|
#Input: [[10,20],[30,200],[400,50],[30,20]]
#Output: 110
# Explanation:
# The first person goes to city A for a cost of 10.
# The second person goes to city A for a cost of 30.
# The third person goes to city B for a cost of 50.
# The fourth person goes to city B for a cost of 20.
# The total minimum cost is 10 + 30 + 50 + 20 = 110 to have half the people interviewing in each city.
class Solution:
def twoCitySchedCost(self, costs: List[List[int]]) -> int:
costs.sort(key=lambda x: x[0] - x[1])
total = 0
n = len(costs) // 2
# To optimize the company expenses,
# send the first n persons to the city A
# and the others to the city B
for i in range(n):
total += costs[i][0] + costs[i + n][1]
return total
|
import unittest
import os
import requests
os.environ["CONFIG_PATH"] = "bg_agg.config.TestingConfig"
from bg_agg import app, models
from bg_agg.database import Base, engine, session
class TestApp(unittest.TestCase):
def setUp(self):
self.client = app.test_client()
# Set up the tables in the database
Base.metadata.create_all(engine)
def tearDown(self):
session.close()
Base.metadata.drop_all(engine)
def test_new_review(self):
reviewer = models.Reviewer(display_name="test_user", critic=False)
product = models.Product(name="Settlers of Catan",
publisher="Mayfair Games",
release="1995",
player_num="2-4",
image="https://images-na.ssl-images-amazon.com/images/I/615H5ZMhB7L._SX425_.jpg")
review = models.Review(raw_score="buy", score=5.0, summary="You should buy this game",
review="You should definitely buy this game. I would give it a 5.0 out of 5.0.",
source="http://randomsource.com",
product=product,
reviewer=reviewer)
product_2 = models.Product(name="Pandemic",
publisher="Z-Man Games",
release="2007",
player_num="2-5",
image=" http://25.media.tumblr.com/qgIb8tERiqn3b75revfkdxWxo1_500.jpg")
review_2 = models.Review(raw_score="1", score=1.0, summary="You should not buy this game",
review="You should definitely not buy this game. I would give it a 1.0 out of 5.0.",
source="http://randomsource.com",
product=product_2,
reviewer=reviewer)
session.add_all([review, review_2])
session.commit()
review_check = session.query(models.Review).all()
self.assertEqual(len(review_check), 2)
product_check = session.query(models.Product).all()
self.assertEqual(len(product_check), 2)
reviewer_check = session.query(models.Reviewer).all()
self.assertEqual(len(reviewer_check), 1)
# TODO(?) May want to add to this test to make sure the data being put in is the data that
# comes out and validate that nothing funky or unexpected happen
class TestBGGIntegration(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_bgg_topgames_endpoint(self):
r = requests.get("https://www.boardgamegeek.com/xmlapi2/hot?type=boardgame")
self.assertEqual(r.status_code, 200)
def test_bgg_gameid_endpoint(self):
r = requests.get("https://www.boardgamegeek.com/xmlapi2/thing?id=1")
self.assertEqual(r.status_code, 200)
|
class Solution(object):
def __init__(self):
self.ans = []
def letterCombinations(self, digits):
"""
:type digits: str
:rtype: List[str]
"""
alpha = {"2": "abc", "3": "def", "4": "ghi", "5": "jkl", "6": "mno", "7": "pqrs", "8": "tuv", "9": "wxyz"}
def dfs(path, idx):
print("path =", path)
print("idx = ", idx)
if len(path) == len(digits):
self.ans.append(path)
return
for c in alpha[digits[idx]]:
dfs(path + c, idx + 1)
if not digits:
return []
if len(digits) == 1:
return [c for c in alpha[digits[0]]]
for c in alpha[digits[0]]:
dfs(c, 1)
return self.ans
s = Solution()
s.letterCombinations("234")
|
import joblib
import numpy as np
import random
from Agents import modelFreeAgent
from Agents.Collections import ExperienceReplay
from Agents.Collections.TransitionFrame import TransitionFrame
class DeepQ(modelFreeAgent.ModelFreeAgent):
displayName = 'Deep Q'
newParameters = [modelFreeAgent.ModelFreeAgent.Parameter('Batch Size', 1, 256, 1, 32, True, True, "The number of transitions to consider simultaneously when updating the agent"),
modelFreeAgent.ModelFreeAgent.Parameter('Memory Size', 1, 655360, 1, 1000, True, True, "The maximum number of timestep transitions to keep stored"),
modelFreeAgent.ModelFreeAgent.Parameter('Target Update Interval', 1, 100000, 1, 200, True, True, "The distance in timesteps between target model updates")]
parameters = modelFreeAgent.ModelFreeAgent.parameters + newParameters
def __init__(self, *args):
paramLen = len(DeepQ.newParameters)
super().__init__(*args[:-paramLen])
self.batch_size, self.memory_size, self.target_update_interval = [int(arg) for arg in args[-paramLen:]]
self.model = self.buildQNetwork()
self.target = self.buildQNetwork()
empty_state = self.get_empty_state()
self.memory = ExperienceReplay.ReplayBuffer(self, self.memory_size, TransitionFrame(empty_state, -1, 0, empty_state, False))
self.total_steps = 0
self.allMask = np.full((1, self.action_size), 1)
self.allBatchMask = np.full((self.batch_size, self.action_size), 1)
def choose_action(self, state):
qval = self.predict(state, False)
epsilon = self.min_epsilon + (self.max_epsilon - self.min_epsilon) * np.exp(-self.decay_rate * self.time_steps)
# TODO: Put epsilon at a level near this
# if random.random() > epsilon:
action = np.argmax(qval)
# else:
# action = self.state_size.sample()
return action
def sample(self):
return self.memory.sample(self.batch_size)
def addToMemory(self, state, action, reward, new_state, done):
self.memory.append_frame(TransitionFrame(state, action, reward, new_state, done))
def remember(self, state, action, reward, new_state, done=False):
self.addToMemory(state, action, reward, new_state, done)
loss = 0
if len(self.memory) < 2*self.batch_size:
return loss
batch_idxes, mini_batch = self.sample()
X_train, Y_train = self.calculateTargetValues(mini_batch)
loss = self.model.train_on_batch(X_train, Y_train)
'''
If the memory is PrioritiedReplayBuffer then calculate the loss and
update the priority of the sampled transitions
'''
if (isinstance(self.memory, ExperienceReplay.PrioritizedReplayBuffer)):
# Calculate the loss of the batch as the TD error
td_errors = self.compute_loss(mini_batch, np.amax(Y_train, axis = 1))
# Update the priorities.
for idx, td_error in zip(batch_idxes, td_errors):
self.memory.update_error(idx, td_error)
self.updateTarget()
return loss
def updateTarget(self):
if self.total_steps >= 2*self.batch_size and self.total_steps % self.target_update_interval == 0:
self.target.set_weights(self.model.get_weights())
print("target updated")
self.total_steps += 1
def predict(self, state, isTarget):
shape = (1,) + self.state_size
state = np.reshape(state, shape)
if isTarget:
result = self.target.predict([state, self.allMask])
else:
result = self.model.predict([state, self.allMask])
return result
def update(self):
pass
def reset(self):
pass
def create_one_hot(self, vector_length, hot_index):
output = np.zeros((vector_length))
if hot_index != -1:
output[hot_index] = 1
return output
def buildQNetwork(self):
from tensorflow.python.keras.optimizer_v2.adam import Adam
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Dense, Input, Flatten, multiply
inputA = Input(shape=self.state_size)
inputB = Input(shape=(self.action_size,))
x = Flatten()(inputA)
x = Dense(24, input_dim=self.state_size, activation='relu')(x) # fully connected
x = Dense(24, activation='relu')(x)
x = Dense(self.action_size, activation='linear')(x)
outputs = multiply([x, inputB])
model = Model(inputs=[inputA, inputB], outputs=outputs)
model.compile(loss='mse', optimizer=Adam(lr=0.001))
return model
def calculateTargetValues(self, mini_batch):
X_train = [np.zeros((self.batch_size,) + self.state_size), np.zeros((self.batch_size,) + (self.action_size,))]
next_states = np.zeros((self.batch_size,) + self.state_size)
for index_rep, transition in enumerate(mini_batch):
states, actions, rewards, _, dones = transition
X_train[0][index_rep] = transition.state
X_train[1][index_rep] = self.create_one_hot(self.action_size, transition.action)
next_states[index_rep] = transition.next_state
Y_train = np.zeros((self.batch_size,) + (self.action_size,))
qnext = self.target.predict([next_states, self.allBatchMask])
qnext = np.amax(qnext, 1)
for index_rep, transition in enumerate(mini_batch):
if transition.is_done:
Y_train[index_rep][transition.action] = transition.reward
else:
Y_train[index_rep][transition.action] = transition.reward + qnext[index_rep] * self.gamma
print("X train: " + str(X_train))
print("Y train: " + str(Y_train))
return X_train, Y_train
def compute_loss(self, mini_batch, q_target: list = None):
"""
Computes the loss of each sample in the mini_batch. The loss is
calculated as the TD Error of the Q-Network Will use the given
list of q_target value if provided instead of calculating.
:param mini_batch: is the mini batch to compute the loss of.
:param q_target: is a list of q_target values to use in the
calculation of the loss. This is optional. The q_target values
will be calculated if q_target is not provided.
:type q_target: list
"""
# Get the states from the batch.
states = np.zeros((self.batch_size,) + self.state_size)
for batch_idx, transition in enumerate(mini_batch):
states[batch_idx] = transition.state
# Get the actions from the batch.
actions = [transition.action for transition in mini_batch]
'''
If the q_target is None then calculate the target q-value using the
target QNetwork.
'''
if (q_target is None):
next_states = np.zeros((self.batch_size,) + self.state_size)
for batch_idx, transition in enumerate(mini_batch):
next_states[batch_idx] = transition.next_state
rewards = [transition.reward for transition in mini_batch]
is_dones = np.array([transition.is_done for transition in mini_batch]).astype(float)
q_target = self.target.predict([next_states, self.allBatchMask])
q_target = rewards + (1 - is_dones) * self.gamma * np.amax(q_target, 1)
# Get from the current q-values from the QNetwork.
q = self.model.predict([states, self.allBatchMask])
q = np.choose(actions, q.T)
# Calculate and return the loss (TD Error).
loss = (q_target - q) ** 2
return loss
def apply_hindsight(self):
'''
The hindsight replay buffer method checks for
the instance, if instance found add to the memory
'''
if (isinstance(self.memory, ExperienceReplay.HindsightReplayBuffer)):
self.memory.apply_hindsight()
def __deepcopy__(self, memodict={}):
pass
def save(self, filename):
mem = self.model.get_weights()
joblib.dump((DeepQ.displayName, mem), filename)
def load(self, filename):
name, mem = joblib.load(filename)
if name != DeepQ.displayName:
print('load failed')
else:
self.model.set_weights(mem)
self.target.set_weights(mem)
def memsave(self):
return self.model.get_weights()
def memload(self, mem):
self.model.set_weights(mem)
self.target.set_weights(mem)
class DeepQPrioritized(DeepQ):
displayName = 'Deep Q Prioritized'
newParameters = [DeepQ.Parameter('Alpha', 0.00, 1.00, 0.001, 0.60, True, True, "The amount of prioritization that gets used.")]
parameters = DeepQ.parameters + newParameters
def __init__(self, *args):
paramLen = len(DeepQPrioritized.newParameters)
super().__init__(*args[:-paramLen])
self.alpha = float(args[-paramLen])
empty_state = self.get_empty_state()
self.memory = ExperienceReplay.PrioritizedReplayBuffer(self, self.memory_size, TransitionFrame(empty_state, -1, 0, empty_state, False), alpha = self.alpha)
class DeepQHindsight(DeepQ):
displayName = 'Deep Q Hindsight'
newParameters = []
parameters = DeepQ.parameters + newParameters
def __init__(self, *args):
paramLen = len(DeepQHindsight.newParameters)
super().__init__(*args)
empty_state = self.get_empty_state()
self.memory = ExperienceReplay.HindsightReplayBuffer(self, self.memory_size, TransitionFrame(empty_state, -1, 0, empty_state, False))
|
# hithere.py
name = input("What is your name?")
second = input("What about your second name")
lastname = input ("Whats your last name?")
print("Hello there!")
print(name + " "+ second + " " + lastname)
|
# copied from elastic/ansible-elasticsearch
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
def filter_reserved(users_role={}):
reserved = []
for user_role, details in list(users_role.items()):
if (
"metadata" in details
and "_reserved" in details["metadata"]
and details["metadata"]["_reserved"]
):
reserved.append(user_role)
return reserved
class FilterModule(object):
def filters(self):
return {
"filter_reserved": filter_reserved
}
|
from flask import render_template,redirect,session
from app.models import *
from . import main
import functools
import hashlib
#密码加密
def setPassword(password):
md5 = hashlib.md5()
md5.update(password.encode())
result = md5.hexdigest()
return result
from flask import request
@main.route("/register/",methods=["GET","POST"])
def register():
if request.method == "POST":
username = request.form.get("username")
email = request.form.get("email")
password = request.form.get("password")
user = User()
user.user_name = username
user.email = email
user.password = setPassword(password)
user.save()
return render_template("register.html")
def loginValid(fun):
@functools.wraps(fun)
def inner(*args,**kwargs):
username = request.cookies.get('username')
id = request.cookies.get("id","0")
user = User.query.get(int(id))
session_username = session.get("username")
if user:
if user.user_name == username and username == session_username:
return fun(*args,**kwargs)
else:
return redirect("/login/")
else:
return redirect("/login/")
return inner
@main.route("/login/",methods=["GET","POST"])
def login():
error = ""
if request.method == "POST":
form_data = request.form
email = form_data.get("email")
password = form_data.get("password")
user = User.query.filter_by(email = email).first()
if user:
db_password = user.password
password = setPassword(password)
if password == db_password:
response = redirect("/index/")
response.set_cookie("username",user.user_name)
response.set_cookie("email",user.email)
response.set_cookie("id",str(user.id))
session["username"] = user.user_name
return response
else:
error = "密码错误"
else:
error = "用户不存在"
return render_template("login.html",**locals())
@main.route("/logout/")
def logout():
response = redirect("/login/")
response.delete_cookie("username")
response.delete_cookie("email")
response.delete_cookie("id")
del session["username"]
return response
@main.route("/base/")
def base():
return render_template("base.html")
@main.route("/index/")
@loginValid
def index():
return render_template("index.html",**locals())
from .get_Time import Calendar
import datetime
@main.route("/userinfo/")
@loginValid
def userinfo():
calendar = Calendar().return_month()
now = datetime.datetime.now()
month = now.month
return render_template("userinfo.html",**locals())
@main.route("/leave/",methods=["GET","POST"])
@loginValid
def leave():
if request.method == "POST":
data = request.form
request_user = data.get("request_username")
request_type = data.get("request_type")
request_start_time = data.get("request_start_time")
request_end_time = data.get("request_end_time")
request_days = data.get("request_days")
request_phone = data.get("request_phone")
request_description = data.get("request_description")
leave = Leave()
leave.request_id = request.cookies.get("id")
leave.request_name = request_user
leave.request_type = request_type
leave.request_start_time = request_start_time
leave.request_end_time = request_end_time
leave.request_days = request_days
leave.request_phone = request_phone
leave.request_description = request_description
leave.request_status = "0"
leave.save()
return redirect('/leave_list/1/')
return render_template("leave.html")
from .cut_page import Pager
@main.route("/leave_list/<int:page>/")
@loginValid
def leave_list(page):
leaves = Leave.query.all()
pager = Pager(leaves,2)
page_data = pager.page_data(page)
return render_template("leave_list.html",**locals())
from app import api
from flask_restful import Resource
@api.resource("/Api/leave/")
class LeaveApi(Resource):
def __init__(self):
"""定义返回的格式"""
super(LeaveApi, self).__init__()
self.result = {
"version":"1.0",
"data":""
}
def set_data(self,leave):
"""定义返回的数据"""
result_data = {
"request_name":leave.request_name,
"request_type":leave.request_type,
"request_start_time":leave.request_start_time,
"request_end_time":leave.request_end_time,
"request_days":leave.request_days,
"request_description":leave.request_description,
"request_phone":leave.request_phone,
}
return result_data
def get(self):
"""处理get请求
"""
data = request.args #获取请求的数据
id = data.get("id") #获取id
if id :
leave = Leave.query.get(int(id))
result_data = self.set_data(leave)
else: #id不存在 返回所有数据
leaves = Leave.query.all()
result_data = []
for leave in leaves:
result_data.append(self.set_data(leave))
self.result["data"] = result_data
return self.result
def post(self):
"""这是post请求,负责保存数据"""
data = request.form
request_id = data.get("request_id")
request_name = data.get("request_name")
request_type = data.get("request_type")
request_start_time = data.get("request_start_time")
request_end_time = data.get("request_end_time")
request_days = data.get("request_days")
request_description = data.get("request_description")
request_phone = data.get("request_phone")
leave = Leave()
leave.request_id = request_id
leave.request_name = request_name
leave.request_type = request_type
leave.request_start_time = request_start_time
leave.request_end_time = request_end_time
leave.request_days = request_days
leave.request_description = request_description
leave.request_phone = request_phone
leave.request_status = "0"
leave.save()
self.result["data"] = self.set_data(leave)
return self.result
def put(self):
"""put请求,负责修改数据"""
data = request.form #请求数据,类字典对象
id = data.get("id") #data里面的id
leave = Leave.query.get(int(id)) #在数据库里面找到
for key,value in data.items():
if key != "id":
setattr(leave,key,value)
leave.save()
self.result["data"] = self.set_data(leave)
return self.result
def delete(self):
"""delete请求,负责删除数据"""
data = request.form
id = data.get("id")
leave = Leave.query.get(int(id))
leave.delete()
self.result["data"] = "%s 删除成功"%id
return self.result
|
diccionario ={"España": "Madrid", "Portugal": "Lisboa", "Francia": "Paris"}
#creamos un bucle que recorra las claves y valores del diccionario a la vez y vamos guardando cada pasado
#la clave en pais, y el valor en ciudad
for pais, ciudad in diccionario.items():
respuesta = input(print("Cual es la capital de ", pais, "?"))
#si la respuesta que pedimos por input, coincide con la ciudad, se lo hacemos saber al jugador y pasamos a la siguiente pregunta
if respuesta == ciudad and pais != "Francia":
print("Has acertado!, Pasemos a la siguiente pregunta: ")
elif respuesta == ciudad and pais == "Francia":
print("Enhorabuena has acertado todas las capitales, adios")
#si no ha acertado, le decimos que se ha equivocado y entramos en un bucle para que repita la pregunta hasta que la acierte
else:
while True:
print("La respuesta es incorrecta, intentalo de nuevo:")
respuesta = input(print("Cual es la capital de ", pais, "?"))
if respuesta == ciudad:
if pais != "Francia":
print("has acertado!, Pasemos a la siguiente pregunta:")
break
else:
print("Enhorabena, has acertado todas las capitales, adios")
break
|
#!/usr/bin/env python3
#
#
# Diamond Hunt Marketplace Analyzer
# Author: Samuel Pua (kahkin@gmail.com)
#
##############################################
import json
import requests
import numpy
import sys
from statsmodels.stats.weightstats import DescrStatsW
from collections import OrderedDict
from operator import itemgetter
import datetime
from dateutil.tz import tzlocal
################# Config ###################
latestFile="latest.txt"
tresholdPercentile=0.5
stepper=1000*60 #minute stepper
woodList=["logs", "oakLogs", "willowLogs", "mapleLogs", "stardustLogs", "strangeLogs", "ancientLogs"]
energyList=[1,2,5,10,20,30,50]
################ Functions ##################
def allPrint(f, stuff=""):
print(stuff)
f.write(stuff+"\n")
############## Start Analysis ################
#open config file to find out address
with open("config.json") as urlfile:
configbefore=urlfile.read()
configafter=json.loads(configbefore)
#logs latest run
latestFileHandler=open(latestFile, "w")
r = requests.get(configafter['url'])
marketPrice = json.loads(r.text, object_pairs_hook=OrderedDict)
allPrint(latestFileHandler,"Market Prices retrieved from myjson.com with "+str(len(marketPrice))+" items")
lastestTimestamp = float(list(marketPrice["logs"].keys())[-1])/1000.0
latestDatetime = datetime.datetime.fromtimestamp(lastestTimestamp, tzlocal())
allPrint(latestFileHandler,"Latest Price: "+str(latestDatetime.strftime("%I:%M %p %z %d-%b-%Y")))
sys.stdout.write("Press any key to continue...")
input()
allPrint(latestFileHandler,)
allPrint(latestFileHandler,)
profits=OrderedDict()
profitsPercent=OrderedDict()
for item in marketPrice:
times=[]
prices=[]
weights=[]
for time in marketPrice[item]:
if len(times)==0:
prevtime=time
times.append(float(time))
prices.append(float(marketPrice[item][time]))
else:
while float(time)>times[-1]+stepper:
totalTimeDiff=float(time)-float(prevtime)
timePassed=(times[-1]+stepper-float(prevtime)) / totalTimeDiff
times.append(times[-1]+stepper)
newPrice=marketPrice[item][prevtime]+timePassed*(float(marketPrice[item][time])-float(marketPrice[item][prevtime]))
prices.append(newPrice)
#assigning weights based on 0.5 before + 0.5 after
if len(times)==2:
weights.append(1) #first weight is 0.5 of 1st to 2nd
if len(times)>2:
weights.append(1) #middle is next minus beore * 0.5
#adding real records
times.append(float(time))
prices.append(float(marketPrice[item][time]))
#registering actual time change
prevtime=time
#assigning weights based on 0.5 before + 0.5 after
if len(times)==2:
weights.append(1) #first weight is 0.5 of 1st to 2nd
if len(times)>2:
weights.append(1) #middle is next minus beore * 0.5
#add last weight
weights.append(1) #last is last minues before * 0.5
#start analysis
allPrint(latestFileHandler,"Analysis for "+item)
allPrint(latestFileHandler,"============="+"="*len(item))
allPrint(latestFileHandler,"Data count: "+str(len(times)))
stats=DescrStatsW(prices, weights)
allPrint(latestFileHandler,"Weighted Average Price: "+"{:,.2f}".format(stats.mean))
allPrint(latestFileHandler,"Weighted Stdev: "+"{:,.2f}".format(stats.std))
allPrint(latestFileHandler,)
allPrint(latestFileHandler,"Percentiles:")
allPrint(latestFileHandler,"5% : "+"{:,.2f}".format(stats.quantile(0.05,False)[0]))
allPrint(latestFileHandler,"15% : "+"{:,.2f}".format(stats.quantile(0.15,False)[0]))
allPrint(latestFileHandler,"50% : "+"{:,.2f}".format(stats.quantile(0.50,False)[0]))
allPrint(latestFileHandler,"85% : "+"{:,.2f}".format(stats.quantile(0.85,False)[0]))
allPrint(latestFileHandler,"95% : "+"{:,.2f}".format(stats.quantile(0.95,False)[0]))
allPrint(latestFileHandler,)
profits[item]=round(stats.quantile(tresholdPercentile,False)[0],2)-round(prices[-1],2)
profitsPercent[item]=(round(stats.quantile(tresholdPercentile,False)[0],2)-round(prices[-1],2))/prices[-1]
allPrint(latestFileHandler,"Current Price : "+"{:,.2f}".format(prices[-1]))
allPrint(latestFileHandler,"Potential High Price : "+"{:,.2f}".format(stats.quantile(tresholdPercentile,False)[0]))
allPrint(latestFileHandler,"Potential Profit : "+"{:,.2f}".format(profits[item])+" ("+"{:,.2f}".format(profitsPercent[item]*100)+"%)")
allPrint(latestFileHandler,"\n\n")
allPrint(latestFileHandler,"Profit Analysis")
allPrint(latestFileHandler,"===============")
profitsPercent=OrderedDict(sorted(profitsPercent.items(), key=itemgetter(1), reverse=True))
for item in profitsPercent:
allPrint(latestFileHandler,item+" "*(30-len(item))+":"+"{:.2f}".format(profitsPercent[item]*100)+"% ("+"{:,.2f}".format(profits[item])+")")
allPrint(latestFileHandler,"\n")
allPrint(latestFileHandler,"Wood Analysis")
allPrint(latestFileHandler,"=============")
for i in range(len(woodList)):
output=woodList[i]
output=output+": "
woodPrice=list(marketPrice[woodList[i]].values())[-1]
output=output+"{:,.2f}".format(woodPrice)+" ("
output=output+"{:,.2f}".format(woodPrice / energyList[i])+" coins per energy)"
allPrint(latestFileHandler,output)
allPrint(latestFileHandler,)
################ Cleaning Up ################
latestFileHandler.close()
|
import common
result_file = open("results.json", "a")
#CFI
# micro-snake
protection_time = common.measure_protection_time(["./compile.sh", "inputs/snake.bc", "CFI-build/snake", "snake_sens_list.txt"])
print('CFI snake protection time ' + str(protection_time))
runtime_overhead = common.measure_runtime_overhead(["python", "inputs/ptypipe.py", "inputs/micro-snake.in", "input_programs/snake"], ["python", "inputs/ptypipe.py", "inputs/micro-snake.in", "CFI-build/snake"])
print('runtime overhead ' + str(runtime_overhead) + '%')
size_overhead = common.measure_binary_overhead("input_programs/snake", "CFI-build/snake")
print('size overhead ' + str(size_overhead) + '%')
memory_overhead = common.measure_memory_overhead(["python", "inputs/ptypipe.py", "inputs/micro-snake.in", "input_programs/snake"], ["python", "inputs/ptypipe.py", "inputs/micro-snake.in", "CFI-build/snake"])
print('memory overhead ' + str(memory_overhead) + '%')
snippet = common.create_snippet('micro-snake', 'CFI', protection_time, runtime_overhead, 0, size_overhead)
result_file.write(snippet)
# csnake
protection_time = common.measure_protection_time(["./compile.sh", "inputs/csnake.bc", "CFI-build/csnake", "c_snake_sens_list.txt"])
print('CFI csnake protection time ' + str(protection_time))
runtime_overhead = common.measure_runtime_overhead(["python", "inputs/ptypipe.py", "inputs/c-snake.in", "input_programs/csnake"], ["python", "inputs/ptypipe.py", "inputs/c-snake.in", "CFI-build/csnake"])
print('runtime overhead ' + str(runtime_overhead) + '%')
size_overhead = common.measure_binary_overhead("input_programs/csnake", "CFI-build/csnake")
print('size overhead ' + str(size_overhead) + '%')
memory_overhead = common.measure_memory_overhead(["python", "inputs/ptypipe.py", "inputs/c-snake.in", "input_programs/csnake"], ["python", "inputs/ptypipe.py", "inputs/c-snake.in", "CFI-build/csnake"])
print('memory overhead ' + str(memory_overhead) + '%')
snippet = common.create_snippet('csnake', 'CFI', protection_time, runtime_overhead, 0, size_overhead)
result_file.write(snippet)
# tetris
protection_time = common.measure_protection_time(["./compile.sh", "inputs/tetris.bc", "CFI-build/tetris","tetris_sens_list.txt"])
print('CFI tetris protection time ' + str(protection_time))
runtime_overhead = common.measure_runtime_overhead(["python", "inputs/ptypipe.py", "inputs/tetris.in", "input_programs/tetris"], ["python", "inputs/ptypipe.py", "inputs/tetris.in", "CFI-build/tetris"])
print('runtime overhead ' + str(runtime_overhead) + '%')
size_overhead = common.measure_binary_overhead("input_programs/tetris", "CFI-build/tetris")
print('size overhead ' + str(size_overhead) + '%')
memory_overhead = common.measure_memory_overhead(["python", "inputs/ptypipe.py", "inputs/tetris.in", "input_programs/tetris"], ["python", "inputs/ptypipe.py", "inputs/tetris.in", "CFI-build/tetris"])
print('memory overhead ' + str(memory_overhead) + '%')
snippet = common.create_snippet('tetris', 'CFI', protection_time, runtime_overhead, 0, size_overhead)
result_file.write(snippet)
result_file.close()
|
import numpy as np
EXP = 'exponential'
GAU = 'gaussian'
def exp_vario(h,a=1.0,sill=1.0):
return sill * (1.0 - (np.exp((-h/a))))
def gauss_vario(h,a=1.0,sill=1.0):
return sill * (1.0 - (np.exp((-h**2)/(a**2))))
def dist(p1,p2):
return ((p1[0] - p2[0])**2 + (p1[1] - p2[1])**2)**0.5
class mfgrid():
def __init__(self,xoff,yoff,nrow,ncol,delr,delc,rotation=0.0):
self.xoff = float(xoff)
self.yoff = float(yoff)
self.nrow = int(nrow)
self.ncol = int(ncol)
if isinstance(delr,int):
delr = float(delr)
if isinstance(delr,float):
delr = np.zeros(self.ncol) + delr
assert len(delr) == self.ncol
self.delr = delr
if isinstance(delc,int):
delc = float(delc)
if isinstance(delc,float):
delc = np.zeros(self.nrow) + delc
assert len(delc) == self.nrow
self.delc = delc
self.rotation = float(rotation)
def xnode_locations(self):
cols = np.cumsum(self.delr) + self.xoff
xs = []
for j in range(self.ncol):
x = self.xoff + cols[j] - (self.delr[j]/2.0)
xs.append(x)
return xs
def ynode_locations(self):
rows = np.cumsum(self.delc) + self.yoff
ys = []
for i in range(self.nrow):
y = self.yoff + rows[i] - (self.delc[i]/2.0)
ys.append(y)
return ys
def node_locations(self):
points = []
xs = self.xnode_locations()
ys = self.ynode_locations()
for x in xs:
for y in ys:
points.append([x,y])
return np.array(points)
def write_grid(self,filename):
f = open(filename,'w')
f.write(' {0:10.0f} {1:10.0f}\n'.format(self,nrow,self.ncol))
f.write(' {0:15.6e} {1:15.6e} {2:15.6e}\n'.format(self.xoff,self.yoff,self.rotation))
for i in self.delc:
f.write(' {0:15.6e}'.format(i))
f.write('\n')
for j in self.delr:
f.write(' {0:15.6e}'.format(j))
f.write('\n')
f.close()
class geostat():
def __init__(self,a,sill,vtype,nodes,nugget=0.0):
self.a = float(a)
self.sill = float(sill)
self.nugget = float(nugget)
self.eigvals = None
self.eigvecs = None
self.cov = None
self.forward = None
self.back = None
self.nodes = nodes
if 'EXP' in vtype.upper():
self.vtype = EXP
self.vario = exp_vario
elif 'GAU' in vtype.upper():
self.vtype = GAU
self.vario = gauss_vario
else:
raise TypeError('only '+EXP+' or '+GUA+' variograms are supported')
def build_covariance(self):
nnodes = self.nodes.shape[0]
cov = np.zeros((nnodes,nnodes)) - 1.0e+10
for p in range(nnodes):
cov[p,p] = self.sill + self.nugget
#--fill in the upper tri along rows
for i in range(nnodes):
for j in range(i+1,nnodes):
d = dist(self.nodes[j],self.nodes[i])
v = self.vario(d,sill=self.sill,a=self.a)
cov[i,j] = (self.sill + self.nugget) - v
#--replicate across the diagonal
for i in range(nnodes):
for j in range(i+1,nnodes):
cov[j,i] = cov[i,j]
self.cov = cov
def eig(self):
if self.cov == None:
self.build_covariance()
u,s,vt = np.linalg.linalg.svd((self.cov))
self.eigvecs = u
self.eigvals = s**2
def build_forward_kl(self,itrunc):
'''self.eigvals^-0.5 * self.eigvecs^T
'''
if self.eigvecs == None:
self.eig()
forward = self.eigvecs[:,:itrunc].copy()
for i in range(itrunc):
forward[:,i] *= 1.0/(np.sqrt(self.eigvals[i]))
forward = forward.transpose()
self.forward = forward
def build_back_kl(self,itrunc):
if self.eigvecs == None:
self.eig()
back = self.eigvecs[:,:itrunc].copy()
for i in range(itrunc):
back[:,i] *= np.sqrt(self.eigvals[i])
self.back = back |
"""
This module sets up the meetups model and all it's functionality
"""
import os
from flask import jsonify
from app.api.v2.models.base_model import BaseModel, AuthenticationRequired
class Meetup(BaseModel):
def __init__(self, meetup={}, database=os.getenv('FLASK_DATABASE_URI')):
self.base_model = BaseModel('meetups', database)
if meetup:
self.location = meetup['location']
self.images = meetup['images']
self.topic = meetup['topic']
self.description = meetup['description']
self.schedule = meetup['happeningOn']
self.tags = meetup['tags']
self.id = meetup['id']
def save_meetup(self):
meetup_item = dict(
authorId=self.id,
topic=self.topic,
description=self.description,
schedule=self.schedule,
location=self.location,
tags=self.tags,
images=self.images
)
keys = ", ".join(meetup_item.keys())
values = tuple(meetup_item.values())
self.base_model.add_item(keys, values)
def rsvp_meetup(self, rsvp):
""" This method rsvps on a meetup """
keys = ", ".join(rsvp.keys())
values = tuple(rsvp.values())
self.base_model.add_item(keys, values, 'rsvps')
def cancel_rsvp(self, id):
""" This method cancels a rsvp """
def fetch_rsvps(self, fields, condition):
""" This method fetches all the meetup ids """
return self.base_model.grab_items(fields, 'rsvps', condition)
def fetch_meetups(self, fields):
""" This method fetches all meetups """
return self.base_model.grab_all_items(f'{fields}', "True = True")
def fetch_specific_meetup(self, column, condition):
""" This method fetches a single meetup """
return self.base_model.grab_items_by_name(column, condition)
def update_meetup(self, id, updates):
""" This method updates a meetup """
pairs_dict = {
"topic": f"topic = '{updates['topic']}'",
"description": f"description = '{updates['description']}'",
"location": f"location = '{updates['location']}'",
"images": f"images = '{updates['images']}'",
"happeningOn": f"schedule = '{updates['happeningOn']}'",
"tags": f"tags = '{updates['tags']}'"
}
pairs = ", ".join(pairs_dict.values())
if self.fetch_specific_meetup('id', f"id = {id}"):
return self.base_model.update_item(pairs, f"id = {id}")
else:
return jsonify({
"error": "Meetup not found or does not exist!",
"status": 404
})
def delete_meetup(self, id):
""" This method deletes a meetup """
if self.fetch_specific_meetup('id', f"id = {id}"):
return self.base_model.delete_item(f"id = {id}")
else:
return {
"error": "Meetup not found or does not exist!"
}
|
"""Add chinook models
Revision ID: 13d5b7bf4214
Revises: b7f884f5fc23
Create Date: 2020-06-17 22:23:58.202580
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '13d5b7bf4214'
down_revision = 'b7f884f5fc23'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('Artist',
sa.Column('ArtistId', sa.Integer(), nullable=False),
sa.Column('Name', sa.String(length=120), nullable=True),
sa.PrimaryKeyConstraint('ArtistId')
)
op.create_table('Employee',
sa.Column('EmployeeId', sa.Integer(), nullable=False),
sa.Column('LastName', sa.String(length=20), nullable=False),
sa.Column('FirstName', sa.String(length=20), nullable=False),
sa.Column('Title', sa.String(length=30), nullable=True),
sa.Column('ReportsTo', sa.Integer(), nullable=True),
sa.Column('BirthDate', sa.DateTime(), nullable=True),
sa.Column('HireDate', sa.DateTime(), nullable=True),
sa.Column('Address', sa.String(length=70), nullable=True),
sa.Column('City', sa.String(length=40), nullable=True),
sa.Column('State', sa.String(length=40), nullable=True),
sa.Column('Country', sa.String(length=40), nullable=True),
sa.Column('PostalCode', sa.String(length=10), nullable=True),
sa.Column('Phone', sa.String(length=24), nullable=True),
sa.Column('Fax', sa.String(length=24), nullable=True),
sa.Column('Email', sa.String(length=60), nullable=True),
sa.ForeignKeyConstraint(['ReportsTo'], ['Employee.EmployeeId'], ),
sa.PrimaryKeyConstraint('EmployeeId')
)
op.create_index(op.f('ix_Employee_ReportsTo'), 'Employee', ['ReportsTo'], unique=False)
op.create_table('Genre',
sa.Column('GenreId', sa.Integer(), nullable=False),
sa.Column('Name', sa.String(length=120), nullable=True),
sa.PrimaryKeyConstraint('GenreId')
)
op.create_table('MediaType',
sa.Column('MediaTypeId', sa.Integer(), nullable=False),
sa.Column('Name', sa.String(length=120), nullable=True),
sa.PrimaryKeyConstraint('MediaTypeId')
)
op.create_table('Playlist',
sa.Column('PlaylistId', sa.Integer(), nullable=False),
sa.Column('Name', sa.String(length=120), nullable=True),
sa.PrimaryKeyConstraint('PlaylistId')
)
op.create_table('Album',
sa.Column('AlbumId', sa.Integer(), nullable=False),
sa.Column('Title', sa.String(length=160), nullable=False),
sa.Column('ArtistId', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['ArtistId'], ['Artist.ArtistId'], ),
sa.PrimaryKeyConstraint('AlbumId')
)
op.create_index(op.f('ix_Album_ArtistId'), 'Album', ['ArtistId'], unique=False)
op.create_table('Customer',
sa.Column('CustomerId', sa.Integer(), nullable=False),
sa.Column('FirstName', sa.String(length=40), nullable=False),
sa.Column('LastName', sa.String(length=20), nullable=False),
sa.Column('Company', sa.String(length=80), nullable=True),
sa.Column('Address', sa.String(length=70), nullable=True),
sa.Column('City', sa.String(length=40), nullable=True),
sa.Column('State', sa.String(length=40), nullable=True),
sa.Column('Country', sa.String(length=40), nullable=True),
sa.Column('PostalCode', sa.String(length=10), nullable=True),
sa.Column('Phone', sa.String(length=24), nullable=True),
sa.Column('Fax', sa.String(length=24), nullable=True),
sa.Column('Email', sa.String(length=60), nullable=False),
sa.Column('SupportRepId', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['SupportRepId'], ['Employee.EmployeeId'], ),
sa.PrimaryKeyConstraint('CustomerId')
)
op.create_index(op.f('ix_Customer_SupportRepId'), 'Customer', ['SupportRepId'], unique=False)
op.create_table('Invoice',
sa.Column('InvoiceId', sa.Integer(), nullable=False),
sa.Column('CustomerId', sa.Integer(), nullable=False),
sa.Column('InvoiceDate', sa.DateTime(), nullable=False),
sa.Column('BillingAddress', sa.String(length=70), nullable=True),
sa.Column('BillingCity', sa.String(length=40), nullable=True),
sa.Column('BillingState', sa.String(length=40), nullable=True),
sa.Column('BillingCountry', sa.String(length=40), nullable=True),
sa.Column('BillingPostalCode', sa.String(length=10), nullable=True),
sa.Column('Total', sa.Numeric(precision=10, scale=2), nullable=False),
sa.ForeignKeyConstraint(['CustomerId'], ['Customer.CustomerId'], ),
sa.PrimaryKeyConstraint('InvoiceId')
)
op.create_index(op.f('ix_Invoice_CustomerId'), 'Invoice', ['CustomerId'], unique=False)
op.create_table('Track',
sa.Column('TrackId', sa.Integer(), nullable=False),
sa.Column('Name', sa.String(length=200), nullable=False),
sa.Column('AlbumId', sa.Integer(), nullable=True),
sa.Column('MediaTypeId', sa.Integer(), nullable=False),
sa.Column('GenreId', sa.Integer(), nullable=True),
sa.Column('Composer', sa.String(length=220), nullable=True),
sa.Column('Milliseconds', sa.Integer(), nullable=False),
sa.Column('Bytes', sa.Integer(), nullable=True),
sa.Column('UnitPrice', sa.Numeric(precision=10, scale=2), nullable=False),
sa.ForeignKeyConstraint(['AlbumId'], ['Album.AlbumId'], ),
sa.ForeignKeyConstraint(['GenreId'], ['Genre.GenreId'], ),
sa.ForeignKeyConstraint(['MediaTypeId'], ['MediaType.MediaTypeId'], ),
sa.PrimaryKeyConstraint('TrackId')
)
op.create_index(op.f('ix_Track_AlbumId'), 'Track', ['AlbumId'], unique=False)
op.create_index(op.f('ix_Track_GenreId'), 'Track', ['GenreId'], unique=False)
op.create_index(op.f('ix_Track_MediaTypeId'), 'Track', ['MediaTypeId'], unique=False)
op.create_table('InvoiceLine',
sa.Column('InvoiceLineId', sa.Integer(), nullable=False),
sa.Column('InvoiceId', sa.Integer(), nullable=False),
sa.Column('TrackId', sa.Integer(), nullable=False),
sa.Column('UnitPrice', sa.Numeric(precision=10, scale=2), nullable=False),
sa.Column('Quantity', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['InvoiceId'], ['Invoice.InvoiceId'], ),
sa.ForeignKeyConstraint(['TrackId'], ['Track.TrackId'], ),
sa.PrimaryKeyConstraint('InvoiceLineId')
)
op.create_index(op.f('ix_InvoiceLine_InvoiceId'), 'InvoiceLine', ['InvoiceId'], unique=False)
op.create_index(op.f('ix_InvoiceLine_TrackId'), 'InvoiceLine', ['TrackId'], unique=False)
op.create_table('PlaylistTrack',
sa.Column('PlaylistId', sa.Integer(), nullable=False),
sa.Column('TrackId', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['PlaylistId'], ['Playlist.PlaylistId'], ),
sa.ForeignKeyConstraint(['TrackId'], ['Track.TrackId'], ),
sa.PrimaryKeyConstraint('PlaylistId', 'TrackId')
)
op.create_index(op.f('ix_PlaylistTrack_TrackId'), 'PlaylistTrack', ['TrackId'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_PlaylistTrack_TrackId'), table_name='PlaylistTrack')
op.drop_table('PlaylistTrack')
op.drop_index(op.f('ix_InvoiceLine_TrackId'), table_name='InvoiceLine')
op.drop_index(op.f('ix_InvoiceLine_InvoiceId'), table_name='InvoiceLine')
op.drop_table('InvoiceLine')
op.drop_index(op.f('ix_Track_MediaTypeId'), table_name='Track')
op.drop_index(op.f('ix_Track_GenreId'), table_name='Track')
op.drop_index(op.f('ix_Track_AlbumId'), table_name='Track')
op.drop_table('Track')
op.drop_index(op.f('ix_Invoice_CustomerId'), table_name='Invoice')
op.drop_table('Invoice')
op.drop_index(op.f('ix_Customer_SupportRepId'), table_name='Customer')
op.drop_table('Customer')
op.drop_index(op.f('ix_Album_ArtistId'), table_name='Album')
op.drop_table('Album')
op.drop_table('Playlist')
op.drop_table('MediaType')
op.drop_table('Genre')
op.drop_index(op.f('ix_Employee_ReportsTo'), table_name='Employee')
op.drop_table('Employee')
op.drop_table('Artist')
# ### end Alembic commands ###
|
# _*_ coding: utf-8 _*_
from loop import Loop
from channel import Channel, Socket
class Client(object):
def __init__(self, addr, coder=None):
self.sock = Socket()
self.addr = addr
self.loop = Loop()
self.ch = Channel(self.sock, self.loop, coder)
self.ch.set_read_callback(self.on_msg_in)
self.ch.set_write_callback(self.on_msg_sent)
self.ch.set_error_callback(self.on_error)
def start(self):
self.ch.connect(self.addr)
self.loop.loop()
def on_msg_in(self, msg):
pass
def on_msg_sent(self):
pass
def on_error(self):
pass
def send(self, msg):
self.ch.send(msg) |
from errors import *
from tools import *
tls_mode = "normal"
tls_ciphers_v12_paranoid = (
"ECDHE-RSA-CHACHA20-POLY1305",
"ECDHE-RSA-AES256-GCM-SHA384",
"ECDHE-RSA-AES128-GCM-SHA256",
"ECDHE-RSA-AES256-SHA384",
"ECDHE-RSA-AES128-SHA256",
)
tls_ciphers_v12_normal = (
"ECDHE-RSA-CHACHA20-POLY1305",
"ECDHE-RSA-AES256-GCM-SHA384",
"ECDHE-RSA-AES256-SHA384",
"ECDHE-RSA-AES128-GCM-SHA256",
"ECDHE-RSA-AES128-SHA256",
"DHE-RSA-AES256-GCM-SHA384",
"DHE-RSA-AES256-SHA256",
"DHE-RSA-AES128-GCM-SHA256",
"DHE-RSA-AES128-SHA256",
"ECDHE-RSA-AES256-SHA",
"DHE-RSA-AES256-SHA",
"ECDHE-RSA-AES128-SHA",
"DHE-RSA-AES128-SHA",
"ECDHE-RSA-DES-CBC3-SHA",
"EDH-RSA-DES-CBC3-SHA",
"AES256-GCM-SHA384",
"AES256-SHA256",
"AES128-GCM-SHA256",
"AES128-SHA256",
"AES256-SHA",
"AES128-SHA",
"DES-CBC3-SHA",
)
tls_ciphers_v12_backdoor = (
# "AES256-GCM-SHA384",
"AES256-SHA256",
# "AES128-GCM-SHA256",
"AES128-SHA256",
"AES256-SHA",
"AES128-SHA",
# "DES-CBC3-SHA",
)
tls_ciphers_v13_all = (
"TLS_CHACHA20_POLY1305_SHA256",
"TLS_AES_256_GCM_SHA384",
"TLS_AES_128_GCM_SHA256",
)
tls_ciphers_v13_paranoid = tls_ciphers_v13_all
tls_ciphers_v13_normal = tls_ciphers_v13_all
tls_ciphers_v13_backdoor = tls_ciphers_v13_all
tls_options_paranoid = (
"no-tlsv12",
"no-tlsv11",
"no-tlsv10",
"no-sslv3",
"no-tls-tickets",
"no-ssl-reuse",
"strict-sni",
)
tls_options_normal = (
"no-tlsv11",
"no-tlsv10",
"no-sslv3",
"no-tls-tickets",
)
tls_options_backdoor = tls_options_normal
http_status_codes = {
"content_strict" : (200, 204,),
"content_standard" : (200, 201, 202, 204, 206,),
"redirect" : (301, 302, 303, 307, 308,),
"caching" : (304,),
"not_found" : (404,),
"get_strict" : (200,),
"get_standard" : (200, 204, 206,),
"get_redirect" : (301, 302,),
"get_caching" : (304,),
"post_strict" : (200, 201, 202, 204,),
"post_standard" : (200, 201, 202, 204,),
"post_redirect" : (303,),
"post_caching" : (),
}
http_status_codes["harden_allowed_paranoid"] = \
tuple (
list (http_status_codes["content_strict"]) +
list (http_status_codes["caching"])
)
http_status_codes["harden_allowed_strict"] = \
tuple (
list (http_status_codes["content_strict"]) +
list (http_status_codes["redirect"]) +
list (http_status_codes["caching"])
)
http_status_codes["harden_allowed_standard"] = \
tuple (
list (http_status_codes["content_standard"]) +
list (http_status_codes["redirect"]) +
list (http_status_codes["caching"])
)
http_status_codes["harden_allowed_get_paranoid"] = \
tuple (
list (http_status_codes["get_strict"]) +
list (http_status_codes["get_caching"])
)
http_status_codes["harden_allowed_get_strict"] = \
tuple (
list (http_status_codes["get_strict"]) +
list (http_status_codes["get_redirect"]) +
list (http_status_codes["get_caching"])
)
http_status_codes["harden_allowed_get_standard"] = \
tuple (
list (http_status_codes["get_standard"]) +
list (http_status_codes["get_redirect"]) +
list (http_status_codes["get_caching"])
)
http_status_codes["harden_allowed_post_paranoid"] = \
tuple (
list (http_status_codes["post_strict"]) +
list (http_status_codes["post_caching"])
)
http_status_codes["harden_allowed_post_strict"] = \
tuple (
list (http_status_codes["post_strict"]) +
list (http_status_codes["post_redirect"]) +
list (http_status_codes["post_caching"])
)
http_status_codes["harden_allowed_post_standard"] = \
tuple (
list (http_status_codes["post_standard"]) +
list (http_status_codes["post_redirect"]) +
list (http_status_codes["post_caching"])
)
compression_content_types = (
"text/html",
"text/css",
"application/javascript", "text/javascript",
"application/xml", "text/xml",
"application/xhtml+xml",
"application/rss+xml", "application/atom+xml",
"application/json", "text/json",
"text/plain",
"text/csv",
"text/tab-separated-values",
"image/svg+xml",
"image/vnd.microsoft.icon", "image/x-icon",
"font/collection",
"font/otf", "application/font-otf", "application/x-font-otf", "application/x-font-opentype",
"font/ttf", "application/font-ttf", "application/x-font-ttf", "application/x-font-truetype",
"font/sfnt", "application/font-sfnt", "application/x-font-sfnt",
"font/woff", "application/font-woff", "application/x-font-woff",
"font/woff2", "application/font-woff2", "application/x-font-woff2",
"font/eot", "application/font-eot", "application/x-font-eot", "application/vnd.ms-fontobject",
)
logging_type = "json"
logging_tcp_format_text = """{tcp:20161201:01} f-id:%f b-id:%b,%s c-sck:%ci,%cp f-sck:%fi,%fp b-sck:%bi,%bp s-sck:%si,%sp i-sz:%U o-sz:%B w:%Ts.%ms,%Tw,%Tc,%Tt f-cnt:%ac,%fc,%bc,%sc,%rc,%ts b-cnt:%bq,%sq g-cnt:%lc,%rt ssl:%sslv,%sslc ssl-x:%[ssl_fc],%[ssl_fc_protocol],%[ssl_fc_cipher],%[ssl_fc_unique_id,hex],%[ssl_fc_session_id,hex],%[ssl_fc_is_resumed],%[ssl_fc_has_sni],[%{Q}[ssl_fc_sni]],[%{Q}[ssl_fc_alpn]],[%{Q}[ssl_fc_npn]] ssl-xf:%[ssl_fc],%[ssl_f_version],%[ssl_f_sha1,hex],[%{Q}[ssl_f_s_dn]] ssl-xc:%[ssl_c_used],%[ssl_c_version],%[ssl_c_sha1,hex],%[ssl_c_verify],%[ssl_c_err],%[ssl_c_ca_err],%[ssl_c_ca_err_depth],[%{Q}[ssl_c_s_dn]],[%{Q}[ssl_c_i_dn]]"""
logging_http_format_text = """{http:20161201:01} h-v:[%{Q}HV] h-m:[%{Q}HM] h-p:[%{Q}HP] h-q:[%{Q}HQ] h-s:%ST f-id:%f b-id:%b,%s c-sck:%ci,%cp f-sck:%fi,%fp b-sck:%bi,%bp s-sck:%si,%sp h-r-id:[%{Q}ID] h-i-hdr:[%{Q}hrl] h-o-hdr:[%{Q}hsl] h-i-ck:[%{Q}CC] h-o-ck:[%{Q}CS] i-sz:%U o-sz:%B w:%Ts.%ms,%Tq,%Tw,%Tc,%Tr,%Tt f-cnt:%ac,%fc,%bc,%sc,%rc,%tsc b-cnt:%bq,%sq g-cnt:%lc,%rt ssl:%sslv,%sslc ssl-x:%[ssl_fc],%[ssl_fc_protocol],%[ssl_fc_cipher],%[ssl_fc_unique_id,hex],%[ssl_fc_session_id,hex],%[ssl_fc_is_resumed],%[ssl_fc_has_sni],[%{Q}[ssl_fc_sni]],[%{Q}[ssl_fc_alpn]],[%{Q}[ssl_fc_npn]] ssl-xf:%[ssl_fc],%[ssl_f_version],%[ssl_f_sha1,hex],[%{Q}[ssl_f_s_dn]] ssl-xc:%[ssl_c_used],%[ssl_c_version],%[ssl_c_sha1,hex],%[ssl_c_verify],%[ssl_c_err],%[ssl_c_ca_err],%[ssl_c_ca_err_depth],[%{Q}[ssl_c_s_dn]],[%{Q}[ssl_c_i_dn]]"""
def _expand_logging_format_json (_format, _parameters) :
def _expand_value (_value) :
if isinstance (_value, basestring) :
_token = list ()
if _value.startswith ("=") :
_token.append (_value[1:])
elif _value.startswith ("'") or _value.startswith ("+") :
_quote = (_value[0] == "'")
_value = _value[1:]
if _quote :
_token.append ("\"")
if _value.startswith ("%") :
if _quote :
_token.append ("%{Q}")
else :
_token.append ("%")
_token.append (_value[1:])
elif _value.startswith ("@") :
_token.append ("%[")
_token.append (_value[1:])
_token.append ("]")
elif _value.startswith ("'") :
_token.append (_value[1:])
elif _value.startswith ("$") :
_value = expand_token (_value, _parameters)
_token.append (_value)
else :
raise_error ("62d234ed", _value)
if _quote :
_token.append ("\"")
_token = "".join (_token)
elif isinstance (_value, list) :
_token = list ()
_token.append ("[")
_sub_tokens = [_expand_value (_value) for _value in _value]
_sub_tokens = ",".join (_sub_tokens)
_token.append (_sub_tokens)
_token.append ("]")
_token = "".join (_token)
else :
raise_error ("2336219b", _value)
return _token
_tokens = list ()
for _key, _value in _format :
_value = _expand_value (_value)
_token = ["\"", _key, "\"", ":", _value]
_token = "".join (_token)
_tokens.append (_token)
_tokens = ", ".join (_tokens)
_tokens = "{ " + _tokens + " }"
return _tokens
logging_tcp_format_json = None
logging_http_format_json_template = [
("s", "''20230324:01"),
("ss", "'$logging_http_format_subschema"),
("t", "=%Ts.%ms"),
("f_id", "'%f"), #!
("b_id", "'%b"), #!
("s_id", "'%s"), #!
("h_v", "'%HV"), #!
("h_vm", "+@fc_http_major"),
("h_s", "+%ST"),
("h_m0", "'%HM"), #!
("h_u0", "'%HU"), #!
("h_p0", "'%HPO"), #!
("h_q0", "'%HQ"), #!
("h_i0", "'%ID"), #!
("h_t0", "'%trg"), #!
# FIXME: Make this configurable!
("h_h", "'@var(txn.logging_http_host),json()"),
# FIXME: Make this configurable!
("h_m", "'@var(txn.logging_http_method),json()"),
("h_p", "'@var(txn.logging_http_path),json()"),
("h_q", "'@var(txn.logging_http_query),json()"),
# FIXME: Make this configurable!
("h_r_i", "'@var(txn.logging_http_request),json()"),
("h_r_s", "'@var(txn.logging_http_session),json()"),
# FIXME: Make this configurable!
("h_f_h", "'@var(txn.logging_http_forwarded_host),json()"),
("h_f_f", "'@var(txn.logging_http_forwarded_for),json()"),
("h_f_p", "'@var(txn.logging_http_forwarded_proto),json()"),
# FIXME: Make this configurable!
("h_h_a", "'@var(txn.logging_http_agent),json()"),
("h_h_r", "'@var(txn.logging_http_referrer),json()"),
("h_h_l", "'@var(txn.logging_http_location),json()"),
("h_h_ct", "'@var(txn.logging_http_content_type),json()"),
("h_h_ce", "'@var(txn.logging_http_content_encoding),json()"),
("h_h_cl", "'@var(txn.logging_http_content_length),json()"),
("h_h_cc", "'@var(txn.logging_http_cache_control),json()"),
("h_h_cv", "'@var(txn.logging_http_cache_etag),json()"),
("h_i_hdr", "'%hrl"), #!
("h_o_hdr", "'%hsl"), #!
("h_i_ck", "'%CC"), #!
("h_o_ck", "'%CS"), #!
("h_o_comp", ["'@res.comp", "'@res.comp_algo"]),
("c_sck", ["'%ci", "'%cp"]),
("f_sck", ["'%fi", "'%fp"]),
("b_sck", ["'%bi", "'%bp"]),
("s_sck", ["'%si", "'%sp"]),
("ts", "'%tsc"),
("f_err", "'@fc_err"),
("b_err", "'@bc_err"),
("i_sz", "+%U"),
("o_sz", "+%B"),
("w", ["+%Tt", "+%Tq", "+%Ta"]),
("w_x", ["+%Th", "+%Ti", "+%TR", "+%Tw", "+%Tc", "+%Tr", "+%Td"]),
("cnt", ["+%ac", "+%fc", "+%bc", "+%bq", "+%sc", "+%sq", "+%rc", "+%rt", "+%lc"]),
("ssl", ["'%sslv", "'%sslc"]),
("ssl_f", [
"'@ssl_fc,json()",
"'@ssl_fc_err,json()",
"'@ssl_fc_protocol,json()",
"'@ssl_fc_cipher,json()",
"'@ssl_fc_unique_id,hex()",
"'@ssl_fc_session_id,hex()",
"'@ssl_fc_is_resumed,json()",
"'@ssl_fc_alpn,json()",
"'@ssl_fc_npn,json()",
"'@ssl_fc_sni,json()",
]),
("ssl_b", [
"'@ssl_bc,json()",
"'@ssl_bc_err,json()",
"'@ssl_bc_protocol,json()",
"'@ssl_bc_cipher,json()",
"'@ssl_bc_unique_id,hex()",
"'@ssl_bc_session_id,hex()",
"'@ssl_bc_is_resumed,json()",
"'@ssl_bc_alpn,json()",
"'@ssl_bc_npn,json()",
]),
("ssl_xf", [
"'@ssl_fc,json()",
"'@ssl_f_version,json()",
"'@ssl_f_key_alg,json()",
"'@ssl_f_sig_alg,json()",
"'@ssl_f_sha1,hex",
"'@ssl_f_s_dn,json()",
"'@ssl_f_i_dn,json()",
]),
("ssl_xc", [
"'@ssl_c_used,json()",
"'@ssl_c_version,json()",
"'@ssl_c_key_alg,json()",
"'@ssl_c_sig_alg,json()",
"'@ssl_c_sha1,hex",
"'@ssl_c_s_dn,json()",
"'@ssl_c_i_dn,json()",
"'@ssl_c_verify,json()",
"'@ssl_c_err,json()",
"'@ssl_c_ca_err,json()",
"'@ssl_c_ca_err_depth,json()",
]),
("stick", [
"'@sc0_conn_cur()",
"'@sc0_conn_cnt()",
"'@sc0_conn_rate()",
"'@sc0_sess_cnt()",
"'@sc0_sess_rate()",
"'@sc0_http_req_cnt()",
"'@sc0_http_req_rate()",
"'@sc0_http_err_cnt()",
"'@sc0_http_err_rate()",
"'@sc0_kbytes_in()",
"'@sc0_bytes_in_rate()",
"'@sc0_kbytes_out()",
"'@sc0_bytes_out_rate()",
]),
]
logging_http_format_json = lambda (_parameters) : _expand_logging_format_json (logging_http_format_json_template, _parameters)
parameters = {
"proxy_identifier" : parameters_get ("daemon_node"),
"frontend_enabled" : True,
"frontend_http_bind_endpoint" : parameters_get ("defaults_frontend_http_bind_endpoint"),
"frontend_http_bind_endpoint_tls" : parameters_get ("defaults_frontend_http_bind_endpoint_tls"),
"frontend_max_connections_active_count" : parameters_choose_if_false (parameters_get ("frontend_bind_minimal"),
parameters_get ("defaults_frontend_max_connections_active_count")),
"frontend_max_connections_backlog_count" : parameters_choose_if_false (parameters_get ("frontend_bind_minimal"),
parameters_get ("defaults_frontend_max_connections_backlog_count")),
"frontend_bind_options" : (
parameters_choose_if (
parameters_get ("frontend_bind_defer_accept"),
"defer-accept"),
parameters_choose_if_non_null (
parameters_get ("frontend_bind_mss"),
("mss", parameters_get ("frontend_bind_mss"))),
parameters_choose_if_non_null (
parameters_get ("frontend_max_connections_active_count"),
("maxconn", parameters_get ("frontend_max_connections_active_count"))),
parameters_choose_if_non_null (
parameters_get ("frontend_max_connections_backlog_count"),
("backlog", parameters_get ("frontend_max_connections_backlog_count"))),
parameters_choose_if (
parameters_get ("frontend_accept_proxy_enabled"),
"accept-proxy"),
parameters_choose_if_non_null (
parameters_get ("frontend_bind_interface"),
("interface", parameters_get ("frontend_bind_interface"))),
),
"frontend_bind_mss" : parameters_choose_if_false (parameters_get ("frontend_bind_minimal"), 1400),
"frontend_bind_defer_accept" : parameters_not (parameters_get ("frontend_bind_minimal")),
"frontend_bind_tls_certificate" : parameters_choose_if_false (parameters_get ("frontend_bind_tls_minimal"),
parameters_path_base_join ("daemon_paths_configurations_tls", "default.pem")),
"frontend_bind_tls_certificate_rules" : parameters_choose_if_false (parameters_get ("frontend_bind_tls_minimal"),
parameters_path_base_join ("daemon_paths_configurations_tls", "default.conf")),
"frontend_bind_tls_options" : parameters_choose_if_false (parameters_get ("frontend_bind_tls_minimal"), (
parameters_get ("frontend_bind_options"),
parameters_get ("frontend_bind_tls_options_actual"),
)),
"frontend_bind_interface" : None,
# FIXME: Rename this!
"frontend_bind_tls_options_actual" : (
parameters_get ("frontend_tls_options"),
parameters_choose_match (
parameters_get ("tls_verify_client"),
(None, None),
("none", ("verify", "none")),
("optional", ("verify", "optional")),
("required", ("verify", "required"))),
parameters_choose_if_non_null (
parameters_get ("frontend_tls_ciphers_v12_descriptor"),
("ciphers", parameters_get ("frontend_tls_ciphers_v12_descriptor"))),
parameters_choose_if_non_null (
parameters_get ("frontend_tls_ciphers_v13_descriptor"),
("ciphersuites", parameters_get ("frontend_tls_ciphers_v13_descriptor"))),
),
"frontend_tls_mode" : None,
"frontend_tls_ciphers_v12" : parameters_choose_match (
parameters_get ("frontend_tls_mode"),
(None, None),
("normal", parameters_get ("tls_ciphers_v12_normal")),
("paranoid", parameters_get ("tls_ciphers_v12_paranoid")),
("backdoor", parameters_get ("tls_ciphers_v12_backdoor")),
),
"frontend_tls_ciphers_v13" : parameters_choose_match (
parameters_get ("frontend_tls_mode"),
(None, None),
("normal", parameters_get ("tls_ciphers_v13_normal")),
("paranoid", parameters_get ("tls_ciphers_v13_paranoid")),
("backdoor", parameters_get ("tls_ciphers_v13_backdoor")),
),
"frontend_tls_ciphers_v12_descriptor" : parameters_choose_if_non_null ("frontend_tls_ciphers_v12", parameters_join (":", parameters_get ("frontend_tls_ciphers_v12"))),
"frontend_tls_ciphers_v13_descriptor" : parameters_choose_if_non_null ("frontend_tls_ciphers_v13", parameters_join (":", parameters_get ("frontend_tls_ciphers_v13"))),
"frontend_tls_options" : parameters_choose_match (
parameters_get ("frontend_tls_mode"),
(None, (
# parameters_get ("tls_options"),
parameters_get ("tls_pem_descriptor"),
parameters_get ("tls_alpn_descriptor"),
parameters_get ("tls_npn_descriptor"),
parameters_get ("tls_options_extra"))),
("normal", (
# parameters_get ("tls_options_normal"),
parameters_get ("tls_pem_descriptor"),
parameters_get ("tls_alpn_descriptor"),
parameters_get ("tls_npn_descriptor"),
parameters_get ("tls_options_extra"))),
("paranoid", (
# parameters_get ("tls_options_paranoid"),
parameters_get ("tls_pem_descriptor"),
parameters_get ("tls_alpn_descriptor"),
parameters_get ("tls_npn_descriptor"),
parameters_get ("tls_options_extra"))),
("backdoor", (
# parameters_get ("tls_options_backdoor"),
parameters_get ("tls_pem_descriptor"),
parameters_get ("tls_alpn_descriptor"),
parameters_get ("tls_npn_descriptor"),
parameters_get ("tls_options_extra"))),
),
"frontend_monitor_enabled" : True,
"frontend_monitor_path" : parameters_get ("heartbeat_proxy_path"),
"frontend_monitor_fail_acl" : "FALSE",
# FIXME: `monitor-net` was removed!
# "frontend_monitor_network" : "0.0.0.0/0",
"frontend_stats_enabled" : True,
"frontend_stats_token" : "beb36ad8a85568b7e89e314b2e03244f",
"frontend_stats_path" : parameters_format ("%s%s", parameters_get ("haproxy_internals_path_prefix"), parameters_get ("frontend_stats_token")),
"frontend_stats_auth_realm" : parameters_get ("daemon_identifier"),
"frontend_stats_auth_credentials" : None,
"frontend_stats_admin_acl" : None,
"frontend_stats_version" : True,
"frontend_stats_modules" : False,
"frontend_stats_refresh" : 6,
"frontend_accept_proxy_enabled" : False,
"frontend_capture_length" : 1024,
"frontend_http_keep_alive_mode" : "keep-alive",
"frontend_http_keep_alive_timeout" : None,
"frontend_http_stick_source" : parameters_get ("samples_client_ip_method"),
"frontend_http_stick_track" : True,
"frontend_tcp_stick_source" : parameters_get ("samples_client_ip_method"),
"frontend_tcp_stick_track" : True,
"backend_enabled" : True,
"backend_check_enabled" : parameters_get ("backend_check_configure"),
"backend_forward_enabled" : parameters_get ("backend_forward_configure"),
"backend_http_host" : None,
"backend_http_check_enabled" : parameters_get ("backend_check_enabled"),
"backend_http_check_request_method" : "GET",
"backend_http_check_request_uri" : parameters_get ("heartbeat_server_path"),
"backend_http_check_request_version" : "HTTP/1.1",
"backend_http_check_request_host" : parameters_get ("backend_http_host"),
"backend_http_check_expect_matcher" : "status",
"backend_http_check_expect_pattern" : "200",
"backend_server_min_connections_active_count" : parameters_math ("//", parameters_get ("backend_server_max_connections_active_count"), 4, True),
"backend_server_max_connections_active_count" : None,
"backend_server_max_connections_queue_count" : parameters_math ("*", parameters_get ("backend_server_max_connections_active_count"), 4, True),
"backend_server_max_connections_full_count" : parameters_math ("//", parameters_get ("backend_server_max_connections_queue_count"), 8, True),
"backend_server_check_interval_normal" : None,
"backend_server_check_interval_rising" : None,
"backend_server_check_interval_failed" : None,
# FIXME: Apply formulas as in case of defaults!
"backend_server_timeout_activity" : None,
"backend_server_timeout_activity_server" : None,
"backend_server_timeout_activity_client" : None,
"backend_server_timeout_activity_tunnel" : None,
"backend_server_timeout_connect" : None,
"backend_server_timeout_fin" : None,
"backend_server_timeout_queue" : None,
"backend_server_timeout_check" : None,
"backend_server_timeout_tarpit" : None,
"backend_server_timeout_request" : None,
"backend_server_timeout_keep_alive" : None,
"backend_http_keep_alive_mode" : "server-close",
"backend_http_keep_alive_reuse" : "never",
"backend_http_keep_alive_timeout" : None,
"backend_http_keep_alive_pool" : None,
"backend_balance" : None,
"server_enabled" : True,
"server_min_connections_active_count" : parameters_math ("//", parameters_get ("server_max_connections_active_count"), 4, True),
"server_max_connections_active_count" : parameters_get ("backend_server_max_connections_active_count"),
"server_max_connections_queue_count" : parameters_math ("*", parameters_get ("server_max_connections_active_count"), 4, True),
"server_check_enabled" : parameters_get ("backend_check_enabled"),
"server_send_proxy_enabled" : False,
"server_send_proxy_version" : "v1",
"server_tcp_min_connections_active_count" : parameters_get ("server_min_connections_active_count"),
"server_tcp_max_connections_active_count" : parameters_get ("server_max_connections_active_count"),
"server_tcp_max_connections_queue_count" : parameters_get ("server_max_connections_queue_count"),
"server_tcp_check_enabled" : parameters_get ("server_check_enabled"),
"server_tcp_send_proxy_enabled" : parameters_get ("server_send_proxy_enabled"),
"server_tcp_send_proxy_version" : parameters_get ("server_send_proxy_version"),
"server_tcp_options" : (
parameters_choose_if (
parameters_get ("server_tcp_check_enabled"),
"check"),
parameters_choose_if (
parameters_get ("server_tcp_check_enabled"),
("observe", "layer4")),
parameters_choose_if_non_null (
parameters_get ("server_tcp_min_connections_active_count"),
("minconn", parameters_get ("server_tcp_min_connections_active_count"))),
parameters_choose_if_non_null (
parameters_get ("server_tcp_max_connections_active_count"),
("maxconn", parameters_get ("server_tcp_max_connections_active_count"))),
parameters_choose_if_non_null (
parameters_get ("server_tcp_max_connections_queue_count"),
("maxqueue", parameters_get ("server_tcp_max_connections_queue_count"))),
parameters_choose_if (
parameters_get ("server_tcp_send_proxy_enabled"),
(
parameters_choose_match (
parameters_get ("server_tcp_send_proxy_version"),
(True, "send-proxy"),
("v1", "send-proxy"),
("v2", "send-proxy-v2"),
("v2-ssl", "send-proxy-v2-ssl"),
("v2-ssl-cn", "send-proxy-v2-ssl-cn"),
),
parameters_choose_if (parameters_get ("server_tcp_check_enabled"), "check-send-proxy"))),
),
"server_http_min_connections_active_count" : parameters_get ("server_min_connections_active_count"),
"server_http_max_connections_active_count" : parameters_get ("server_max_connections_active_count"),
"server_http_max_connections_queue_count" : parameters_get ("server_max_connections_queue_count"),
"server_http_check_enabled" : parameters_get ("server_check_enabled"),
"server_http_send_proxy_enabled" : parameters_get ("server_send_proxy_enabled"),
"server_http_send_proxy_version" : parameters_get ("server_send_proxy_version"),
"server_http_protocol" : None,
"server_http_options" : (
parameters_choose_if (
parameters_get ("server_http_check_enabled"),
"check"),
parameters_choose_if (
parameters_get ("server_http_check_enabled"),
("observe", "layer7")),
parameters_choose_if_non_null (
parameters_get ("server_http_min_connections_active_count"),
("minconn", parameters_get ("server_http_min_connections_active_count"))),
parameters_choose_if_non_null (
parameters_get ("server_http_max_connections_active_count"),
("maxconn", parameters_get ("server_http_max_connections_active_count"))),
parameters_choose_if_non_null (
parameters_get ("server_http_max_connections_queue_count"),
("maxqueue", parameters_get ("server_http_max_connections_queue_count"))),
parameters_choose_if_non_null (
parameters_get ("server_http_protocol"),
("proto", parameters_get ("server_http_protocol"))),
parameters_choose_if_non_null (
parameters_get ("server_http_protocol"),
parameters_choose_if (
parameters_get ("server_http_check_enabled"),
("check-proto", parameters_get ("server_http_protocol")))),
parameters_choose_if (
parameters_get ("server_http_send_proxy_enabled"),
(
parameters_choose_match (
parameters_get ("server_http_send_proxy_version"),
(True, "send-proxy"),
("v1", "send-proxy"),
("v2", "send-proxy-v2"),
("v2-ssl", "send-proxy-v2-ssl"),
("v2-ssl-cn", "send-proxy-v2-ssl-cn"),
),
parameters_choose_if (parameters_get ("server_http_check_enabled"), "check-send-proxy"))),
),
"server_tls_enabled" : False,
"server_tls_sni" : None,
"server_tls_alpn" : None,
"server_tls_verify" : True,
"server_tls_ca_file" : None,
"server_check_tls_sni" : None,
"server_check_tls_alpn" : None,
"server_tls_options" :
parameters_choose_if (
parameters_get ("server_tls_enabled"),
(
"ssl",
parameters_choose_if_non_null (
parameters_get ("server_tls_ca_file"),
("ca-file", parameters_get ("server_tls_ca_file"))),
parameters_choose_if_non_null (
parameters_get ("server_tls_sni"),
("sni", parameters_get ("server_tls_sni"))),
parameters_choose_if_non_null (
parameters_get ("server_tls_alpn"),
("alpn", parameters_get ("server_tls_alpn"))),
parameters_choose_if (
parameters_get ("server_check_enabled"),
"check-ssl"),
parameters_choose_if_non_null (
parameters_get ("server_check_tls_sni"),
parameters_choose_if (
parameters_get ("server_check_enabled"),
("check-sni", parameters_get ("server_check_tls_sni")))),
parameters_choose_if_non_null (
parameters_get ("server_check_tls_alpn"),
parameters_choose_if (
parameters_get ("server_check_enabled"),
("check-alpn", parameters_get ("server_check_tls_alpn")))),
parameters_choose_if (
parameters_get ("server_tls_verify"),
("verify", "required"),
("verify", "none")),
)
),
"server_check_interval_normal" : None,
"server_check_interval_rising" : None,
"server_check_interval_failed" : None,
"server_resolvers" : parameters_get ("defaults_server_resolvers"),
"server_resolvers_prefer" : parameters_get ("defaults_server_resolvers_prefer"),
"server_resolvers_options" : parameters_get ("defaults_server_resolvers_options"),
"server_options" : (
parameters_choose_match (
parameters_get ("backend_mode"),
("tcp", parameters_get ("server_tcp_options")),
("http", parameters_get ("server_http_options"))),
parameters_get ("server_tls_options"),
parameters_choose_if_non_null (parameters_get ("server_check_interval_normal"), ("inter", parameters_get ("server_check_interval_normal"))),
parameters_choose_if_non_null (parameters_get ("server_check_interval_rising"), ("fastinter", parameters_get ("server_check_interval_rising"))),
parameters_choose_if_non_null (parameters_get ("server_check_interval_failed"), ("downinter", parameters_get ("server_check_interval_failed"))),
parameters_choose_if_non_null (parameters_get ("server_resolvers"), ("resolvers", parameters_get ("server_resolvers"))),
parameters_choose_if_non_null (parameters_get ("server_resolvers_prefer"), ("resolve-prefer", parameters_get ("server_resolvers_prefer"))),
parameters_choose_if_non_null (parameters_get ("server_resolvers_options"), ("resolve-opts", parameters_join (",", parameters_get ("server_resolvers_options")))),
),
"defaults_frontend_http_bind_endpoint" : "ipv4@0.0.0.0:80",
"defaults_frontend_http_bind_endpoint_tls" : "ipv4@0.0.0.0:443",
"defaults_frontend_max_connections_active_count" : parameters_math ("//", parameters_get ("global_max_connections_count"), 2),
"defaults_frontend_max_connections_backlog_count" : parameters_math ("//", parameters_get ("defaults_frontend_max_connections_active_count"), 4),
"defaults_frontend_max_sessions_rate" : parameters_math ("*", parameters_get ("defaults_frontend_max_connections_active_count"), 4),
"defaults_server_min_connections_active_count" : parameters_math ("//", parameters_get ("defaults_server_max_connections_active_count"), 4),
"defaults_server_max_connections_active_count" : 32,
"defaults_server_max_connections_queue_count" : parameters_math ("*", parameters_get ("defaults_server_max_connections_active_count"), 4),
"defaults_server_max_connections_full_count" : parameters_math ("//", parameters_get ("defaults_server_max_connections_queue_count"), 8),
"defaults_server_check_interval_normal" : 60,
"defaults_server_check_interval_rising" : parameters_math ("//", parameters_get ("defaults_server_check_interval_normal"), 30),
"defaults_server_check_interval_failed" : parameters_math ("//", parameters_get ("defaults_server_check_interval_normal"), 3),
"defaults_server_check_count_rising" : 8,
"defaults_server_check_count_failed" : 4,
"defaults_server_check_count_errors" : parameters_get ("defaults_server_check_count_failed"),
"defaults_server_resolvers" : None,
"defaults_server_resolvers_prefer" : None,
"defaults_server_resolvers_options" : None,
"defaults_timeout_activity" : 30,
"defaults_timeout_activity_server" : parameters_math ("*", parameters_get ("defaults_timeout_activity"), 2),
"defaults_timeout_activity_client" : parameters_get ("defaults_timeout_activity"),
"defaults_timeout_activity_tunnel" : parameters_math ("*", parameters_get ("defaults_timeout_activity"), 6),
"defaults_timeout_connect" : 6,
"defaults_timeout_fin" : 6,
"defaults_timeout_queue" : 30,
"defaults_timeout_check" : 6,
"defaults_timeout_tarpit" : parameters_get ("defaults_timeout_queue"),
"defaults_timeout_request" : 30,
"defaults_timeout_keep_alive" : 60,
"defaults_compression_content_types" : compression_content_types,
"defaults_compression_offload" : True,
"global_max_connections_count" : 1024 * 8,
"global_max_connections_rate" : parameters_math ("//", parameters_get ("global_max_connections_count"), 16),
"global_max_sessions_rate" : parameters_math ("*", parameters_get ("global_max_connections_rate"), 4),
"global_max_tls_connections_count" : parameters_math ("//", parameters_get ("global_max_connections_count"), 2),
"global_max_tls_connections_rate" : parameters_math ("//", parameters_get ("global_max_tls_connections_count"), 16),
"global_max_pipes" : parameters_math ("//", parameters_get ("global_max_connections_count"), 2),
"tls_enabled" : True,
"tls_ca_base" : parameters_choose_if (parameters_get ("tls_enabled"), parameters_choose_if (parameters_get ("tls_ca_base_enabled"), parameters_path_base_join ("daemon_paths_configurations_tls", "ca"))),
"tls_ca_file" : parameters_choose_if (parameters_get ("tls_enabled"), parameters_choose_if (parameters_get ("tls_ca_file_enabled"), parameters_path_base_join ("daemon_paths_configurations_tls", "ca.pem"))),
"tls_ca_verify_file" : parameters_choose_if (parameters_get ("tls_enabled"), parameters_choose_if (parameters_get ("tls_ca_verify_file_enabled"), parameters_path_base_join ("daemon_paths_configurations_tls", "ca-verify.pem"))),
"tls_ca_sign_file" : parameters_choose_if (parameters_get ("tls_enabled"), parameters_choose_if (parameters_get ("tls_ca_sign_file_enabled"), parameters_path_base_join ("daemon_paths_configurations_tls", "ca-sign.pem"))),
"tls_crt_base" : parameters_choose_if (parameters_get ("tls_enabled"), parameters_choose_if (parameters_get ("tls_crt_base_enabled"), parameters_path_base_join ("daemon_paths_configurations_tls", "certificates"))),
"tls_crt_file" : parameters_choose_if (parameters_get ("tls_enabled"), parameters_choose_if (parameters_get ("tls_crt_file_enabled"), parameters_path_base_join ("daemon_paths_configurations_tls", "certificates.pem"))),
"tls_dh_params" : parameters_choose_if (parameters_get ("tls_enabled"), parameters_choose_if (parameters_get ("tls_dh_params_enabled"), parameters_path_base_join ("daemon_paths_configurations_tls", "dh-params.pem"))),
"tls_ca_base_enabled" : False,
"tls_ca_file_enabled" : False,
"tls_ca_verify_file_enabled" : False,
"tls_ca_sign_file_enabled" : False,
"tls_crt_base_enabled" : False,
"tls_crt_file_enabled" : False,
"tls_dh_params_enabled" : True,
"tls_mode" : tls_mode,
"tls_ciphers_v12" : parameters_choose_match (
parameters_get ("tls_mode"),
("normal", parameters_get ("tls_ciphers_v12_normal")),
("paranoid", parameters_get ("tls_ciphers_v12_paranoid")),
("backdoor", parameters_get ("tls_ciphers_v12_backdoor")),
),
"tls_ciphers_v12_descriptor" : parameters_join (":", parameters_get ("tls_ciphers_v12")),
"tls_ciphers_v12_normal" : tls_ciphers_v12_normal,
"tls_ciphers_v12_paranoid" : tls_ciphers_v12_paranoid,
"tls_ciphers_v12_backdoor" : tls_ciphers_v12_backdoor,
"tls_ciphers_v13" : parameters_choose_match (
parameters_get ("tls_mode"),
("normal", parameters_get ("tls_ciphers_v13_normal")),
("paranoid", parameters_get ("tls_ciphers_v13_paranoid")),
("backdoor", parameters_get ("tls_ciphers_v13_backdoor")),
),
"tls_ciphers_v13_descriptor" : parameters_join (":", parameters_get ("tls_ciphers_v13")),
"tls_ciphers_v13_normal" : tls_ciphers_v13_normal,
"tls_ciphers_v13_paranoid" : tls_ciphers_v13_paranoid,
"tls_ciphers_v13_backdoor" : tls_ciphers_v13_backdoor,
"tls_options" : parameters_choose_match (
parameters_get ("tls_mode"),
("normal", (
parameters_get ("tls_options_normal"),
parameters_get ("tls_pem_descriptor"),
parameters_get ("tls_alpn_descriptor"),
parameters_get ("tls_npn_descriptor"),
parameters_get ("tls_options_extra"))),
("paranoid", (
parameters_get ("tls_options_paranoid"),
parameters_get ("tls_pem_descriptor"),
parameters_get ("tls_alpn_descriptor"),
parameters_get ("tls_npn_descriptor"),
parameters_get ("tls_options_extra"))),
("backdoor", (
parameters_get ("tls_options_backdoor"),
parameters_get ("tls_pem_descriptor"),
parameters_get ("tls_alpn_descriptor"),
parameters_get ("tls_npn_descriptor"),
parameters_get ("tls_options_extra"))),
),
"tls_options_normal" : tls_options_normal,
"tls_options_paranoid" : tls_options_paranoid,
"tls_options_backdoor" : tls_options_backdoor,
"tls_options_extra" : (
parameters_choose_if (parameters_get ("tls_sni_strict"), "strict-sni"),
parameters_get ("tls_options_custom"),
),
"tls_options_custom" : None,
"tls_pem_enabled" : True,
"tls_pem_descriptor" : parameters_choose_if (
parameters_get ("tls_pem_enabled"),
(
parameters_choose_if_non_null (parameters_get ("tls_crt_file"), ("crt", parameters_get ("tls_crt_file"))),
parameters_choose_if_non_null (parameters_get ("tls_crt_base"), ("crt-base", parameters_get ("tls_crt_base"))),
parameters_choose_if_non_null (parameters_get ("tls_ca_file"), ("ca-file", parameters_get ("tls_ca_file"))),
parameters_choose_if_non_null (parameters_get ("tls_ca_base"), ("ca-base", parameters_get ("tls_ca_base"))),
parameters_choose_if_non_null (parameters_get ("tls_ca_sign_file"), ("ca-sign-file", parameters_get ("tls_ca_sign_file"))),
)),
"tls_alpn_enabled" : False,
"tls_alpn_descriptor" : parameters_choose_if (parameters_get ("tls_alpn_enabled"), ("alpn", parameters_join (",", parameters_get ("tls_alpn_protocols")))),
"tls_alpn_protocols" : ("h2,http/1.1", "http/1.0"),
"tls_npn_enabled" : False,
"tls_npn_descriptor" : parameters_choose_if (parameters_get ("tls_npn_enabled"), ("npn", parameters_join (",", parameters_get ("tls_npn_protocols")))),
"tls_npn_protocols" : ("h2,http/1.1", "http/1.0"),
"tls_sni_strict" : False,
"tls_curves" : parameters_join (",", ("X25519:P-256",)),
"tls_verify_client" : None,
"geoip_enabled" : False,
"geoip_map" : parameters_path_base_join ("daemon_paths_configurations_maps", "geoip.txt"),
"bogons_map" : parameters_path_base_join ("daemon_paths_configurations_maps", "bogons.txt"),
"bots_map" : parameters_path_base_join ("daemon_paths_configurations_maps", "bots.txt"),
"daemon_node" : "localhost",
"daemon_name" : "haproxy",
"daemon_identifier" : parameters_format ("%s@%s", parameters_get ("daemon_name"), parameters_get ("daemon_node")),
"daemon_description" : "[]",
"daemon_user" : "haproxy",
"daemon_group" : parameters_get ("daemon_user"),
"daemon_pid" : parameters_path_base_join ("daemon_paths_runtime", "haproxy.pid"),
"daemon_chroot" : parameters_path_base_join ("daemon_paths_runtime", "haproxy.chroot"),
"daemon_chroot_enabled" : False,
"daemon_ulimit" : 65536,
"daemon_threads_count" : 1,
"daemon_threads_affinity" : None,
"daemon_socket" : parameters_choose_if (True, parameters_format ("unix@%s", parameters_path_base_join ("daemon_paths_runtime", "haproxy.sock"))),
"daemon_paths_configurations" : "/etc/haproxy",
"daemon_paths_configurations_tls" : parameters_path_base_join ("daemon_paths_configurations", "tls"),
"daemon_paths_configurations_maps" : parameters_path_base_join ("daemon_paths_configurations", "maps"),
"daemon_paths_runtime" : "/run",
"daemon_paths_states_prefix" : parameters_path_base_join ("daemon_paths_runtime", "haproxy-states"),
"daemon_paths_state_global" : parameters_path_base_join ("daemon_paths_runtime", "haproxy.state"),
"syslog_1_enabled" : True,
"syslog_1_endpoint" : "/dev/log",
"syslog_1_protocol" : parameters_get ("syslog_protocol"),
"syslog_2_enabled" : False,
"syslog_2_endpoint" : "127.0.0.1:514",
"syslog_2_protocol" : parameters_get ("syslog_protocol"),
"syslog_p_enabled" : False,
"syslog_p_endpoint" : "127.0.0.1:514",
"syslog_p_protocol" : parameters_get ("syslog_protocol"),
"syslog_pg_enabled" : parameters_choose_if (parameters_get ("syslog_p_enabled"), False, True),
# NOTE: Preferred protocol should be `rfc5424`!
# If there are issues, use `rfc3164` and set `syslog_source_node` to `None`.
"syslog_protocol" : "rfc5424",
"syslog_source_node" : parameters_get ("daemon_node"),
"syslog_source_tag" : "haproxy",
"logging_type" : logging_type,
"logging_tcp_type" : parameters_get ("logging_type"),
"logging_tcp_format_text" : logging_tcp_format_text,
"logging_tcp_format_json" : logging_tcp_format_json,
"logging_tcp_format" : parameters_choose_match (
parameters_get ("logging_tcp_type"),
("text", parameters_get ("logging_tcp_format_text")),
("json", parameters_get ("logging_tcp_format_json")),
("default", None),
),
"logging_http_type" : parameters_get ("logging_type"),
"logging_http_format_text" : logging_http_format_text,
"logging_http_format_json" : logging_http_format_json,
"logging_http_format_subschema" : "default",
"logging_http_format" : parameters_choose_match (
parameters_get ("logging_http_type"),
("text", parameters_get ("logging_http_format_text")),
("json", parameters_get ("logging_http_format_json")),
("default", None),
),
"logging_http_variable_method" : "txn.logging_http_method",
"logging_http_variable_host" : "txn.logging_http_host",
"logging_http_variable_path" : "txn.logging_http_path",
"logging_http_variable_query" : "txn.logging_http_query",
"logging_http_variable_forwarded_host" : "txn.logging_http_forwarded_host",
"logging_http_variable_forwarded_for" : "txn.logging_http_forwarded_for",
"logging_http_variable_forwarded_proto" : "txn.logging_http_forwarded_proto",
"logging_http_variable_agent" : "txn.logging_http_agent",
"logging_http_variable_referrer" : "txn.logging_http_referrer",
"logging_http_variable_location" : "txn.logging_http_location",
"logging_http_variable_content_type" : "txn.logging_http_content_type",
"logging_http_variable_content_encoding" : "txn.logging_http_content_encoding",
"logging_http_variable_content_length" : "txn.logging_http_content_length",
"logging_http_variable_cache_control" : "txn.logging_http_cache_control",
"logging_http_variable_cache_etag" : "txn.logging_http_cache_etag",
"logging_http_variable_request" : "txn.logging_http_request",
"logging_http_variable_session" : "txn.logging_http_session",
"logging_http_variable_action" : "txn.logging_http_action",
"logging_http_header_forwarded_host" : "X-Forwarded-Host",
"logging_http_header_forwarded_for" : "X-Forwarded-For",
"logging_http_header_forwarded_proto" : "X-Forwarded-Proto",
"logging_http_header_forwarded_proto_method" : "ssl_fc",
"logging_http_header_forwarded_port" : "X-Forwarded-Port",
"logging_http_header_forwarded_server_ip" : "X-Forwarded-Server-Ip",
"logging_http_header_forwarded_server_port" : "X-Forwarded-Server-Port",
"logging_http_header_request" : parameters_get ("http_tracking_request_header"),
"logging_http_header_session" : parameters_get ("http_tracking_session_header"),
"logging_http_header_action" : "X-HA-HTTP-Action",
"logging_geoip_country_variable" : "txn.logging_geoip_country",
"error_pages_enabled" : True,
"error_pages_codes" : (400, 401, 403, 404, 405, 408, 410, 429, 500, 501, 502, 503, 504,),
"error_pages_store" : parameters_path_base_join ("daemon_paths_configurations", "errors"),
"error_pages_store_http" : parameters_path_base_join ("error_pages_store", "http"),
"error_pages_store_html" : parameters_path_base_join ("error_pages_store", "html"),
"internals_path_prefix" : "/__/",
"internals_rules_order_allow" : -9920,
"internals_rules_order_deny" : -9910,
"internals_netfilter_mark_allowed" : None,
"internals_netfilter_mark_denied" : None,
"haproxy_internals_path_prefix" : parameters_format ("%s%s", parameters_get ("internals_path_prefix"), "haproxy/"),
"heartbeat_server_path" : parameters_format ("%s%s", parameters_get ("internals_path_prefix"), "heartbeat"),
"heartbeat_proxy_path" : parameters_format ("%s%s", parameters_get ("internals_path_prefix"), "heartbeat-proxy"),
"heartbeat_self_path" : parameters_format ("%s%s", parameters_get ("internals_path_prefix"), "heartbeat-haproxy"),
"authenticate_path" : parameters_format ("%s%s", parameters_get ("internals_path_prefix"), "authenticate"),
"error_pages_path_prefix" : parameters_format ("%s%s", parameters_get ("internals_path_prefix"), "errors/"),
"whitelist_path" : parameters_format ("%s%s", parameters_get ("internals_path_prefix"), "whitelist"),
"whitelist_netfilter_mark_allowed" : None,
"whitelist_netfilter_mark_denied" : None,
"http_tracking_session_cookie" : "X-HA-Session-Id",
"http_tracking_session_cookie_max_age" : 2419200,
"http_tracking_session_header" : "X-HA-Session-Id",
"http_tracking_session_variable" : "txn.http_tracking_session",
"http_tracking_request_header" : "X-HA-Request-Id",
"http_tracking_request_variable" : "txn.http_tracking_request",
"http_tracking_enabled_variable" : "txn.http_tracking_enabled",
"http_tracking_excluded_variable" : "txn.http_tracking_excluded",
"http_authenticated_header" : "X-HA-Authenticated",
"http_authenticated_cookie" : "X-HA-Authenticated",
"http_authenticated_cookie_max_age" : 3600,
"http_authenticated_path" : parameters_get ("authenticate_path"),
"http_authenticated_query" : "__authenticate",
"http_authenticated_variable" : "txn.http_authenticated",
"http_authenticated_netfilter_mark" : None,
"http_debug_enabled_variable" : "txn.http_debugging_enabled",
"http_debug_excluded_variable" : "txn.http_debugging_excluded",
"http_debug_timestamp_header" : "X-HA-Timestamp",
"http_debug_frontend_header" : "X-HA-Frontend",
"http_debug_backend_header" : "X-HA-Backend",
"http_debug_counters_header" : "X-HA-Counters",
"http_errors_marker" : "X-Ha-Error-Proxy",
"http_errors_method" : "X-HA-Error-Method",
"http_errors_status" : "X-HA-Error-Status",
"http_harden_level" : "standard",
"http_harden_allowed_methods_paranoid" : ("GET"),
"http_harden_allowed_methods_strict" : ("GET"),
"http_harden_allowed_methods_standard" : ("HEAD", "GET", "OPTIONS"),
"http_harden_allowed_methods_extra" : None,
"http_harden_allowed_methods" : parameters_choose_match (
parameters_get ("http_harden_level"),
("paranoid", (parameters_get ("http_harden_allowed_methods_paranoid"), parameters_get ("http_harden_allowed_methods_extra"))),
("strict", (parameters_get ("http_harden_allowed_methods_strict"), parameters_get ("http_harden_allowed_methods_extra"))),
("standard", (parameters_get ("http_harden_allowed_methods_standard"), parameters_get ("http_harden_allowed_methods_extra"))),
),
"http_harden_allowed_status_codes_paranoid" : http_status_codes["harden_allowed_paranoid"],
"http_harden_allowed_status_codes_strict" : http_status_codes["harden_allowed_strict"],
"http_harden_allowed_status_codes_standard" : http_status_codes["harden_allowed_standard"],
"http_harden_allowed_status_codes_extra" : (
parameters_get ("http_harden_allowed_get_status_codes_extra"),
parameters_get ("http_harden_allowed_post_status_codes_extra"),
),
"http_harden_allowed_status_codes" : (
parameters_choose_match (
parameters_get ("http_harden_level"),
("paranoid", (parameters_get ("http_harden_allowed_status_codes_paranoid"), parameters_get ("http_harden_allowed_status_codes_extra"))),
("strict", (parameters_get ("http_harden_allowed_status_codes_strict"), parameters_get ("http_harden_allowed_status_codes_extra"))),
("standard", (parameters_get ("http_harden_allowed_status_codes_standard"), parameters_get ("http_harden_allowed_status_codes_extra"))),
),
parameters_choose_if (parameters_get ("http_harden_allowed_not_found"), http_status_codes["not_found"]),
parameters_choose_if (parameters_get ("http_harden_allowed_redirect"), http_status_codes["redirect"]),
),
"http_harden_allowed_not_found" : False,
"http_harden_allowed_redirect" : False,
"http_harden_allowed_get_status_codes_paranoid" : http_status_codes["harden_allowed_get_paranoid"],
"http_harden_allowed_get_status_codes_strict" : http_status_codes["harden_allowed_get_strict"],
"http_harden_allowed_get_status_codes_standard" : http_status_codes["harden_allowed_get_standard"],
"http_harden_allowed_get_status_codes_extra" : None,
"http_harden_allowed_get_status_codes" : (
parameters_choose_match (
parameters_get ("http_harden_level"),
("paranoid", (parameters_get ("http_harden_allowed_get_status_codes_paranoid"), parameters_get ("http_harden_allowed_get_status_codes_extra"))),
("strict", (parameters_get ("http_harden_allowed_get_status_codes_strict"), parameters_get ("http_harden_allowed_get_status_codes_extra"))),
("standard", (parameters_get ("http_harden_allowed_get_status_codes_standard"), parameters_get ("http_harden_allowed_get_status_codes_extra"))),
),
parameters_choose_if (parameters_get ("http_harden_allowed_get_not_found"), http_status_codes["not_found"]),
parameters_choose_if (parameters_get ("http_harden_allowed_get_redirect"), http_status_codes["redirect"]),
),
"http_harden_allowed_get_not_found" : parameters_get ("http_harden_allowed_not_found"),
"http_harden_allowed_get_redirect" : parameters_get ("http_harden_allowed_redirect"),
"http_harden_allowed_post_status_codes_paranoid" : http_status_codes["harden_allowed_post_paranoid"],
"http_harden_allowed_post_status_codes_strict" : http_status_codes["harden_allowed_post_strict"],
"http_harden_allowed_post_status_codes_standard" : http_status_codes["harden_allowed_post_standard"],
"http_harden_allowed_post_status_codes_extra" : None,
"http_harden_allowed_post_status_codes" : (
parameters_choose_match (
parameters_get ("http_harden_level"),
("paranoid", (parameters_get ("http_harden_allowed_post_status_codes_paranoid"), parameters_get ("http_harden_allowed_post_status_codes_extra"))),
("strict", (parameters_get ("http_harden_allowed_post_status_codes_strict"), parameters_get ("http_harden_allowed_post_status_codes_extra"))),
("standard", (parameters_get ("http_harden_allowed_post_status_codes_standard"), parameters_get ("http_harden_allowed_post_status_codes_extra"))),
),
parameters_choose_if (parameters_get ("http_harden_allowed_post_not_found"), http_status_codes["not_found"]),
parameters_choose_if (parameters_get ("http_harden_allowed_post_redirect"), http_status_codes["redirect"]),
),
"http_harden_allowed_post_not_found" : parameters_get ("http_harden_allowed_not_found"),
"http_harden_allowed_post_redirect" : parameters_get ("http_harden_allowed_redirect"),
"http_harden_hsts_enabled" : True,
"http_harden_hsts_interval" : parameters_choose_match (
parameters_get ("http_harden_level"),
("paranoid", 4 * 4 * 365 * 24 * 3600),
("strict", 4 * 365 * 24 * 3600),
("standard", 28 * 24 * 3600),
),
"http_harden_hsts_descriptor" : parameters_format ("max-age=%d", parameters_get ("http_harden_hsts_interval")),
"http_harden_csp_descriptor" : "upgrade-insecure-requests",
"http_harden_fp_descriptor" : "accelerometer 'none'; ambient-light-sensor 'none'; autoplay 'none'; camera 'none'; display-capture 'none'; document-domain 'none'; encrypted-media 'none'; fullscreen 'none'; geolocation 'none'; gyroscope 'none'; magnetometer 'none'; microphone 'none'; midi 'none'; payment 'none'; picture-in-picture 'none'; publickey-credentials-get 'none'; sync-xhr 'none'; usb 'none'; xr-spatial-tracking 'none'",
"http_harden_referrer_descriptor" : "strict-origin-when-cross-origin",
"http_harden_frames_descriptor" : "SAMEORIGIN",
"http_harden_cto_descriptor" : "nosniff",
"http_harden_xss_descriptor" : "1; mode=block",
"http_harden_coop_descriptor" : "same-origin",
"http_harden_corp_descriptor" : "same-origin",
"http_harden_coep_descriptor" : "unsafe-none",
"http_harden_netfilter_mark_allowed" : None,
"http_harden_netfilter_mark_denied" : None,
"http_harden_enabled_variable" : "txn.http_harden_enabled",
"http_harden_excluded_variable" : "txn.http_harden_excluded",
"http_harden_headers_extended" : True,
"http_hardened_header" : "X-HA-Hardened",
"http_drop_caching_enabled_variable" : "txn.http_drop_caching_enabled",
"http_drop_caching_excluded_variable" : "txn.http_drop_caching_excluded",
"http_force_caching_enabled_variable" : "txn.http_force_caching_enabled",
"http_force_caching_excluded_variable" : "txn.http_force_caching_excluded",
"http_drop_cookies_enabled_variable" : "txn.http_drop_cookies_enabled",
"http_drop_cookies_excluded_variable" : "txn.http_drop_cookies_excluded",
"http_force_cors_enabled_variable" : "txn.http_force_cors_enabled",
"http_force_cors_excluded_variable" : "txn.http_force_cors_excluded",
"http_force_cors_allowed_variable" : "txn.http_force_cors_allowed",
"http_force_cors_origin_variable" : "txn.http_force_cors_origin",
"http_force_cors_origin_present_variable" : "txn.http_force_cors_origin_present",
"http_force_cors_options_present_variable" : "txn.http_force_cors_options_present",
"http_ranges_allowed_variable" : "txn.http_ranges_allowed",
"letsencrypt_backend_identifier" : "letsencrypt",
"letsencrypt_server_ip" : "127.0.0.1",
"letsencrypt_server_port" : 445,
"letsencrypt_server_endpoint" : parameters_format ("ipv4@%s:%d", parameters_get ("letsencrypt_server_ip"), parameters_get ("letsencrypt_server_port")),
"letsencrypt_frontend_rules_order" : -9100,
"letsencrypt_frontend_routes_order" : -9100,
"letsencrypt_path" : "/.well-known/acme-challenge",
"varnish_backend_identifier" : "varnish",
"varnish_downstream_ip" : "127.0.0.1",
"varnish_downstream_port" : 6083,
"varnish_downstream_endpoint" : parameters_format ("ipv4@%s:%d", parameters_get ("varnish_downstream_ip"), parameters_get ("varnish_downstream_port")),
"varnish_downstream_send_proxy_enabled" : parameters_get ("varnish_send_proxy_enabled"),
"varnish_upstream_ip" : "127.0.0.1",
"varnish_upstream_port" : 6081,
"varnish_upstream_endpoint" : parameters_format ("ipv4@%s:%d", parameters_get ("varnish_upstream_ip"), parameters_get ("varnish_upstream_port")),
"varnish_upstream_send_proxy_enabled" : parameters_get ("varnish_send_proxy_enabled"),
"varnish_management_ip" : "127.0.0.1",
"varnish_management_port" : 6082,
"varnish_management_endpoint" : parameters_format ("ipv4@%s:%d", parameters_get ("varnish_management_ip"), parameters_get ("varnish_management_port")),
"varnish_frontend_rules_order" : -5100,
"varnish_frontend_routes_order" : -5100,
"varnish_drop_caching_enabled" : False,
"varnish_drop_cookies_enabled" : False,
"varnish_internals_path_prefix" : parameters_format ("%s%s", parameters_get ("internals_path_prefix"), "varnish/"),
"varnish_internals_rules_order_allow" : parameters_get ("internals_rules_order_allow"),
"varnish_internals_rules_order_deny" : parameters_get ("internals_rules_order_deny"),
"varnish_heartbeat_enabled" : True,
"varnish_heartbeat_path" : parameters_format ("%s%s", parameters_get ("varnish_internals_path_prefix"), "heartbeat"),
"varnish_heartbeat_interval" : 1,
"varnish_min_connections_active_count" : parameters_math ("//", parameters_get ("varnish_max_connections_active_count"), 4, True),
"varnish_max_connections_active_count" : parameters_math ("//", parameters_get ("frontend_max_connections_active_count"), 4, True),
"varnish_max_connections_queue_count" : parameters_math ("*", parameters_get ("varnish_max_connections_active_count"), 4, True),
"varnish_max_connections_full_count" : parameters_math ("//", parameters_get ("varnish_max_connections_queue_count"), 8, True),
"varnish_keep_alive_reuse" : "always",
"varnish_keep_alive_mode" : "keep-alive",
"varnish_keep_alive_timeout" : 3600,
"varnish_send_proxy_enabled" : False,
"samples_via_tls_method" : "ssl_fc",
"samples_client_ip_method" : "src",
"minimal_configure" : False,
"only_frontends_and_backends" : parameters_get ("minimal_configure"),
"minimal_global_configure" : parameters_get ("minimal_configure"),
"minimal_defaults_configure" : parameters_get ("minimal_configure"),
"minimal_frontend_configure" : parameters_get ("minimal_configure"),
"minimal_backend_configure" : parameters_get ("minimal_configure"),
"global_configure" : parameters_and (
parameters_not (parameters_get ("only_frontends_and_backends")),
parameters_not (parameters_get ("minimal_global_configure"))),
"global_identity_configure" : parameters_not (parameters_get ("minimal_global_configure")),
"global_daemon_configure" : parameters_not (parameters_get ("minimal_global_configure")),
"global_connections_configure" : parameters_not (parameters_get ("minimal_global_configure")),
"global_checks_configure" : parameters_not (parameters_get ("minimal_global_configure")),
"global_compression_configure" : parameters_not (parameters_get ("minimal_global_configure")),
"global_tls_configure" : parameters_not (parameters_get ("minimal_global_configure")),
"global_tune_configure" : parameters_not (parameters_get ("minimal_global_configure")),
"global_tune_buffers_configure" : parameters_get ("global_tune_configure"),
"global_tune_sockets_configure" : parameters_get ("global_tune_configure"),
"global_tune_tls_configure" : parameters_get ("global_tune_configure"),
"global_tune_http_configure" : parameters_get ("global_tune_configure"),
"global_tune_http2_configure" : parameters_get ("global_tune_configure"),
"global_stats_configure" : parameters_not (parameters_get ("minimal_global_configure")),
"global_logging_configure" : parameters_not (parameters_get ("minimal_global_configure")),
"global_logging_quiet" : True,
"global_state_configure" : parameters_and (parameters_not (parameters_get ("minimal_global_configure")), parameters_get ("state_configure")),
"global_experimental_configure" : parameters_not (parameters_get ("minimal_global_configure")),
"global_experimental_enabled" : False,
"global_http_uri_length_max" : 4 * 1024,
"global_http_headers_count_max" : 64,
"global_http2_headers_table_size" : 16 * 1024,
"global_http2_window_initial_size" : 128 * 1024,
"global_http2_streams_count_max" : 128,
"global_compression_rate_max" : 0,
"global_compression_cpu_max" : 25,
"global_compression_mem_max" : 128,
"global_compression_level_max" : 9,
"global_buffers_size" : 128 * 1024,
"global_buffers_rewrite" : 16 * 1024,
"global_buffers_count_max" : 4096,
"global_buffers_count_reserved" : 16,
"defaults_configure" : parameters_and (
parameters_not (parameters_get ("only_frontends_and_backends")),
parameters_not (parameters_get ("minimal_defaults_configure"))),
"defaults_connections_configure" : parameters_not (parameters_get ("minimal_defaults_configure")),
"defaults_timeouts_configure" : parameters_not (parameters_get ("minimal_defaults_configure")),
"defaults_servers_configure" : parameters_not (parameters_get ("minimal_defaults_configure")),
"defaults_http_configure" : parameters_not (parameters_get ("minimal_defaults_configure")),
"defaults_compression_configure" : parameters_not (parameters_get ("minimal_defaults_configure")),
"defaults_errors_configure" : parameters_not (parameters_get ("minimal_defaults_configure")),
"defaults_stats_configure" : parameters_not (parameters_get ("minimal_defaults_configure")),
"defaults_logging_configure" : parameters_not (parameters_get ("minimal_defaults_configure")),
"defaults_state_configure" : parameters_and (parameters_not (parameters_get ("minimal_defaults_configure")), parameters_get ("state_configure")),
"frontend_minimal" : parameters_get ("minimal_frontend_configure"),
"frontend_bind_minimal" : parameters_get ("frontend_minimal"),
"frontend_bind_tls_minimal" : parameters_get ("frontend_bind_minimal"),
"frontend_configure" : parameters_not (parameters_get ("frontend_minimal")),
"frontend_connections_configure" : parameters_get ("frontend_configure"),
"frontend_timeouts_configure" : parameters_get ("frontend_configure"),
"frontend_http_configure" : parameters_get ("frontend_configure"),
"frontend_compression_configure" : parameters_get ("frontend_configure"),
"frontend_stick_configure" : parameters_get ("frontend_configure"),
"frontend_monitor_configure" : parameters_get ("frontend_configure"),
"frontend_logging_configure" : parameters_get ("frontend_configure"),
"frontend_stats_configure" : parameters_get ("frontend_configure"),
"backend_minimal" : parameters_get ("minimal_backend_configure"),
"backend_configure" : parameters_not (parameters_get ("backend_minimal")),
"backend_connections_configure" : parameters_get ("backend_configure"),
"backend_timeouts_configure" : parameters_get ("backend_configure"),
"backend_servers_configure" : parameters_get ("backend_configure"),
"backend_check_configure" : parameters_get ("backend_configure"),
"backend_forward_configure" : parameters_get ("backend_configure"),
"state_configure" : parameters_not (parameters_get ("minimal_configure")),
"sections_extra_separation" : parameters_not (parameters_get ("minimal_configure")),
}
|
print("Enter the data to list:")
l=[int(x) for x in input().split()]
print(l)
small=l[0]
large=l[0]
for i in range(0,len(l)):
if(l[i]<small):
small=l[i]
else:
large=l[i]
print("The smallest number is:",small)
print("The lagrest number is:",large) |
def fuel(start):
current = start // 3 - 2
while current > 0:
yield current
current = current // 3 - 2
with open("input1") as f:
lines = f.readlines()
result = 0
for mass in lines:
result += sum(fuel(int(mass)))
print(result) |
#컴퓨팅 사고력 카피체크_프로그램
import glob
import chardet
import difflib
import os
import sys
from multiprocessing.pool import ThreadPool
import multiprocessing
# class for unionfind
class disjointSet:
def __init__(self):
self.elements = {}
def makeSet(self, x):
if x not in self.elements:
self.elements[x] = [x, 0]
return
def find(self, x):
if self.elements[x][0][:8] == x:
return x
self.elements[x][0] = self.find(self.elements[x][0])
return self.elements[x][0][:8]
def union(self, x, y):
xRoot = self.find(x)
yRoot = self.find(y)
if xRoot == yRoot:
return
if self.elements[xRoot][1] < self.elements[yRoot][1]:
self.elements[xRoot][0] = yRoot
elif self.elements[xRoot][1] > self.elements[yRoot][1]:
self.elements[yRoot][0] = xRoot
else:
self.elements[yRoot][0] = xRoot
self.elements[xRoot][1] += 1
def print(self, fd):
for k, l in sorted(self.elements.items()):
if l[0][:8] != k:
new_k = self.find(k)
self.elements[new_k][0] += ', ' + k
del self.elements[k]
for k, l in self.elements.items():
print(l[0], file=fd)
copy_pair = disjointSet()
file_list = []
file_list_removed = []
pair_list=[]
def commentRemove( idx ):
eni = chardet.detect(open(file_list[idx], 'rb').read())['encoding']
fr = open(file_list[idx], "r", encoding=eni)
# 해당 문자열 set으로 파일 오픈
fw = open("주석제거_" + file_list[idx], "w", encoding=eni)
flag = False; # block comment 체크용 flag
for aLine in fr:
buffer = [] # 한 줄 씩 비교하면서 주석을 제외한 내용을 임시 저장하는 buffer
# block comment 시작과 끝 체크
count1 = aLine.count("\'\'\'")
count2 = aLine.count("\"\"\"")
if (count1 > 0) or (count2 > 0):
if flag == False:
flag = True
if count1 > 1 or count2 > 1:
flag = False
continue
elif flag == True:
flag = False
continue
if flag == False: # block comment가 아닌 정상적인 code일 경우
# '#'기호가 나올때까지 해당 문자를 buffer에 append
flag_string = False # string 내부 '#'인지 체크
for c in range(len(aLine)):
if aLine[c] == '\'' or aLine[c] == '\"':
if flag_string == False:
flag_string = True
else:
flag_string = False
if aLine[c] == '#' and flag_string == False:
buffer.append("\n")
break
buffer.append(aLine[c])
fw.write("".join(buffer))
fr.close()
fw.close()
def copyCheck(idx):
for j in range(idx):
# 사용하고 있는 문자열 셋 체크 ex. UTF-8, EUC-KR ...
eni = chardet.detect(open(file_list_removed[idx], 'rb').read())['encoding']
enj = chardet.detect(open(file_list_removed[j], 'rb').read())['encoding']
# 해당 encoding으로 open한 뒤 difflib 내의 함수를 통해 차이 체크
f_r = difflib.SequenceMatcher(
a=open(file_list_removed[idx], encoding=eni).read(),
b=open(file_list_removed[j], encoding=enj).read()
).ratio()
# 0 <= f <= 1, 0인 경우 완벽하게 다른 파일, 1인 경우 동일한 파일
# f >= 0.9인 경우 파일 이름을 출력
# 파일 이름은 folder/filename.py이므로, filename만을 출력하기 위해 replace & split
if f_r >= copy_percentage:
file_i = file_list_removed[idx].split(']')[-2].split('-')[-1]
file_j = file_list_removed[j].split(']')[-2].split('-')[-1]
#pair_list.append((file_i,file_j))
copy_pair.makeSet(file_i)
copy_pair.makeSet(file_j)
copy_pair.union(file_i, file_j)
break
folder_name = input("Folder Name : ")
copy_percentage=float(input("Copy Percentage(1~100) : "))/100
print("------------------------------------------------------------")
print("폴더 내 파일의 주석제거를 시작합니다.")
print("------------------------------------------------------------")
if not os.path.isdir('주석제거_'+folder_name):
os.mkdir('주석제거_'+folder_name)
#Pass 1: 모든 파이썬 코드의 주석을 제거하고 이를 주석제거_folder_name에 저장
# folder 내에 있는 모든 파일 이름을 append
for s in glob.glob(folder_name+'/*'):
file_list.append(s)
file_list.sort()
# i번째 파일에 작업 수행
with ThreadPool(multiprocessing.cpu_count()) as pool1:
pool1.map(commentRemove, range(len(file_list)))
pool1.close()
pool1.join()
print("\n------------------------------------------------------------")
print("폴더 내 파일의 주석제거가 끝났습니다. 카피체크를 시작합니다.")
print("------------------------------------------------------------")
#Pass 2: 카피체크 수행
folder_name_removed = '주석제거_'+folder_name
f_result = open(folder_name+"_카피체크_학번.txt",'w')
# folder 내에 있는 모든 파일 이름을 append
for s in glob.glob(folder_name_removed+'/*'):
file_list_removed.append(s)
file_list_removed.sort()
# i번째 파일과 j번째 파일을 비교
with ThreadPool(multiprocessing.cpu_count()) as pool2:
pool2.map(copyCheck, range(len(file_list_removed)))
pool2.close()
pool2.join()
copy_pair.print(f_result)
f_result.close()
print("\n------------------------------------------------------------")
print("카피체크가 끝났습니다. 학번과 비밀번호를 매칭중입니다.")
print("------------------------------------------------------------")
print("잠시만 기다려 주세요.")
#Pass 3: 비밀번호 매칭
password_dict={}
f_dict=open("password.csv","r",encoding='utf-8')
for aLine in f_dict.readlines():
temp=aLine.replace('\n','').split(',')
password_dict[temp[0]] = temp[1]
f_pass=open(folder_name+"_카피체크_학번.txt","r")
f_final=open(folder_name+"_카피체크_비번.txt","w")
for aLine in f_pass.readlines():
tmp_pass=aLine.replace('\n','').split(', ')
print("Copy set : ",end='',file=f_final)
for i in range(len(tmp_pass)):
print(password_dict[tmp_pass[i]],end=' ',file=f_final)
print(file=f_final)
f_final.close()
print("------------------------------------------------------------")
print("카피체크가 모두 끝났습니다. 종료하려면 아무 키나 누르세요.")
print("------------------------------------------------------------")
input()
|
import numpy as np
def lumpy_backround(dim=(64, 64), nbar=200, dc=10, lump_function="GaussLmp", pars=(1, 10),
discretize_lumps_positions=False, rng=None):
"""
: param dim: Output image dimensions. Can be 2D tuple or int (will convert it to a square image)
: param nbar: Mean number of lumps in the image
: param dc: DC offset of output image
: param lump_function: Either 'GaussLmp' or 'CircLmp', for Gaussian or Circular lumps
: param pars: (magnitude, stddev) for 'GaussLmp'
(magnitude, radius) for 'CircLmp'
: param discretize_lumps_positions: If True, all positions are ints, else, they can be floats
: return: (image, n, lumps_pos)
image: Generated image with lumps
N: Number of lumps
lumps_pos: Position of every lump in image
"""
# Assume square image if dim is an integer
if isinstance(dim, int):
dim = (dim, dim)
# Initialize values that will be returned
image = dc * np.ones(dim)
n = np.random.poisson(nbar)
lumps_pos = []
for i in range(n):
# Random position of lump, uniform throughout image
if discretize_lumps_positions:
pos = [int(np.random.rand() * d) for d in dim]
else:
pos = [np.random.rand() * d for d in dim]
pos = tuple(pos)
lumps_pos.append(pos)
# Set up a grid of points
x, y = np.meshgrid(np.array(range(dim[0])) - pos[0],
np.array(range(dim[1])) - pos[1])
# Generate a lump centered at pos
if lump_function == "GaussLmp":
lump = pars[0] * np.exp(-0.5 * (x ** 2 + y ** 2) / (pars[1] ** 2))
elif lump_function == "CircLmp":
lump = pars[0] * ((x ** 2 + y ** 2) <= (pars[1] ** 2))
else:
raise Exception("Unknown lump function '{}'".format(lump_function))
# Add lump to the image
image = image + lump
# Rescale image to range rng
if rng is not None:
# If range is int, assume rng gomes from 0
if isinstance(rng, int):
rng = (0, rng)
min_v = image.min()
max_v = image.max()
if min_v == max_v: # Avoid dividing by zero
image = rng[0] * np.ones(dim)
else:
image = (image - min_v) / (max_v - min_v) * (rng[1] - rng[0]) + rng[0]
return image, n, lumps_pos
def create_lumps_pos_matrix(lumps_pos, dim=(64, 64), discrete_lumps_positions=False):
"""
:param dim: Output image dimensions. Can be 2D tuple or int (will convert it to a square image)
:param lumps_pos: Position of every lump in image
:param discrete_lumps_positions: If True, all positions will be discretized (floored), else,
they can be floats
:return: matrix with lumps positions
"""
# Assume square image if dim is an integer
if isinstance(dim, int):
dim = (dim, dim)
# Put a 1 in the matrix in all the lump positions.
# If the position is not discrete, split this 1 among the discrete positions in image
image = np.zeros(dim)
for pos in lumps_pos:
if discrete_lumps_positions:
image[int(pos[1]), int(pos[0])] += 1
else:
x = pos[0]
xl_pos = int(x)
xh_pos = int(x) + 1
xl = x - xl_pos
xh = xh_pos - x
y = pos[1]
yl_pos = int(y)
yh_pos = int(y) + 1
yl = y - yl_pos
yh = yh_pos - y
image[yl_pos, xl_pos] += yh * xh
if xh_pos < dim[0]:
image[yl_pos, xh_pos] += yh * xl
if yh_pos < dim[1]:
image[yh_pos, xl_pos] += yl * xh
if xh_pos < dim[0] and yh_pos < dim[1]:
image[yh_pos, xh_pos] += yl * xl
return image
if __name__ == "__main__":
DIM = 5
NBAR = 2
DC = 0
LUMP_FUNCTION = "GaussLmp"
PARS = (1, 1)
DISCRETE_LUMPS = False
RANGE_VALUES = (0, 255)
image, n, lumps_pos = lumpy_backround(dim=DIM, nbar=NBAR, dc=DC, lump_function=LUMP_FUNCTION,
pars=PARS, discretize_lumps_positions=DISCRETE_LUMPS,
rng=RANGE_VALUES)
image_pos = create_lumps_pos_matrix(dim=DIM, lumps_pos=lumps_pos)
print("N:", n)
print("Lumps position:", lumps_pos)
print("Image:\n", image)
print("Position matrix:\n", image_pos)
|
import Information_Retrieval as IR
from tkinter.filedialog import askopenfilename
from PIL import Image
from tkinter import messagebox
import os
try:
# Python2
import Tkinter as tk
except ImportError:
# Python3
import tkinter as tk
window = tk.Tk()
window.eval('tk::PlaceWindow %s center' % window.winfo_toplevel())
window.withdraw()
if messagebox.askyesno('Question', "Click yes if you want to test image retrieval program, otherwise click no"):
window.deiconify()
window.destroy()
window.quit()
root = tk.Tk()
root.withdraw()
root.update()
# get a series of gif images you have in the working folder
# or use full path, or set directory to where the images are
image_files = []
img_path = askopenfilename(filetypes=[("Image File", "*.jpg")])
if img_path:
input_img = Image.open(img_path)
input_img.show()
images = IR.get_similar(input_img)
for img_name in images:
modified_name = os.path.splitext(img_name)[0]
img = Image.open('Dataset/' + img_name)
IR.png_converter(img, modified_name)
image_files.append("Dataset/PNG/" + modified_name + ".png")
root.destroy()
# set milliseconds time between slides
delay = 3500
# upper left corner coordinates of app window
x = 100
y = 50
app = IR.App(image_files, x, y, delay)
app.show_slides()
app.run()
else:
exit(0)
window.deiconify()
window.destroy()
window.quit()
|
__all__ = ()
from scarletio import RichAttributeErrorBaseType
CATEGORIES = {}
class TriviaCategory(RichAttributeErrorBaseType):
"""
Represents a trivia category.
Attributes
----------
id : `int`
The category's identifier.
items : `tuple` of ``TriviaItem``
Possibilities suggested to the user. The 0th is always the correct one.
"""
__slots__ = ('id', 'items', 'name')
def __init__(self, name, items):
"""
Creates a new trivia item with the given options.
Parameters
----------
name : `str`
The name of the category.
items : `tuple` of ``TriviaItem``
Items under the category.
"""
category_id = len(CATEGORIES) + 1
self.id = category_id
self.items = items
self.name = name
CATEGORIES[category_id] = self
def __repr__(self):
"""
Returns the trivia item's representation.
"""
repr_parts = ['<', self.__class__.__name__]
repr_parts.append(', name = ')
repr_parts.append(repr(self.name))
repr_parts.append(', item = ')
repr_parts.append(repr(self.items))
repr_parts.append('>')
return ''.join(repr_parts)
|
#!/bin/python3
import sys
N = int(input().strip())
L=[]
for a0 in range(N):
firstName,emailID = input().strip().split(' ')
firstName,emailID = [str(firstName),str(emailID)]
if emailID.find('@gmail.com')!=-1 :
L.append(firstName)
L.sort()
for tmp in L:
print(tmp)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-19 23:29
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0087_auto_20170120_0201'),
]
operations = [
migrations.AddField(
model_name='user',
name='active_is_structure',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='userprofile',
name='key_expires',
field=models.DateTimeField(blank=True, default=datetime.datetime(2017, 1, 20, 2, 29, 5, 945000), null=True),
),
]
|
from Prac_06.guitar import Guitar
def main():
list_of_guitars = []
print("My Guitars!")
name = input("Name: ")
while name != "":
year = int(input("Year: "))
cost = float(input("Cost: $"))
guitar_to_add = Guitar(name, year, cost)
list_of_guitars.append(guitar_to_add)
print(guitar_to_add, "added.\n")
name = input("Name: ")
# list_of_guitars.append(Guitar("Gibson L-5 CES", 1922, 16035.40))
# list_of_guitars.append(Guitar("Fender Stratocaster", 2014, 765.40))
if list_of_guitars:
print("\nThese are mine, you are not allowed to touch. But you may look")
for i, guitar in enumerate(list_of_guitars, 1):
vintage_string = ""
if guitar.is_vintage():
vintage_string = "(vintage)"
print("Guitar {}: {} ({}), worth ${} {}".format(i, guitar.name, guitar.year, guitar.cost,
vintage_string))
else:
print("Why you no have guitar, go go go and get one!")
main()
|
from Purchase import Purchase
class Splitter:
def split_request(self, post_body):
print(post_body)
catagories = post_body.decode("utf-8").split("&")
phones = catagories[0].replace('+', ' ')[catagories[0].index("=") + 1:].split("%2C")
phone_lines = catagories[1][catagories[1].index("=") + 1:]
inter_connections = False if catagories[2][catagories[2].index("=") + 1:] == "false" else True
self.create_purchase(phones, phone_lines, inter_connections)
return phones, phone_lines, inter_connections
def create_purchase(self, phones, phone_lines, inter_connections):
purchase = Purchase()
for phone in phones:
purchase.select_cellphone(phone)
for phone_line in range(int(phone_lines)):
purchase.inc_total_phone_lines()
purchase.internet_package(inter_connections)
purchase.buy_message() |
#coding:utf8
import os
import json
import sys
import re
import MySQLdb
import time
reload(sys)
from jobs import utils
from jobs.majorposition import get_position
import codecs
sys.setdefaultencoding('utf8')
import pdb
start = time.clock()
# postdct = get_position.get_pos()
def get_position_meta():
position_dct = {}
with codecs.open('position_meta.txt') as file:
lines = file.readlines()
for linet in lines:
line = linet[:-2]
uline = unicode(line)
position_dct[uline] = '1'
return position_dct
# position_dct = get_position_meta()
def get_salary_prob(key, salaryprobdct, salarys):
# salarys = convert_pos(position_dct, salarys)
if salaryprobdct[key]['salary1'].has_key(salarys[0][1]):
salary1prob = 1*float(salaryprobdct[key]['salary1'][salarys[0][1]])/salaryprobdct[key]['salary1']['total']
else:
salary1prob = 0.0001/salaryprobdct[key]['salary1']['total']
if salaryprobdct[key]['salary2'].has_key(salarys[1][1]):
salary2prob = 1*float(salaryprobdct[key]['salary2'][salarys[1][1]])/salaryprobdct[key]['salary2']['total']
else:
salary2prob = 0.0001/salaryprobdct[key]['salary2']['total']
if salaryprobdct[key]['industry1'].has_key(salarys[0][0]):
industry1prob = float(salaryprobdct[key]['industry1'][salarys[0][0]])/salaryprobdct[key]['industry1']['total']
else:
industry1prob = 0.0001/salaryprobdct[key]['industry1']['total']
if salaryprobdct[key]['industry2'].has_key(salarys[1][0]):
industry2prob = float(salaryprobdct[key]['industry2'][salarys[1][0]])/salaryprobdct[key]['industry2']['total']
else:
industry2prob = 0.0001/salaryprobdct[key]['industry2']['total']
total = float(salaryprobdct[key]['total'])/salaryprobdct['total']
# total = total*(salary1prob + salary2prob + industry1prob + industry2prob)
total = total*salary1prob*salary2prob
# pdb.set_trace()
return total
try:
conn = MySQLdb.connect(host='localhost', user='root', passwd='123456', db='jobs', use_unicode=True, charset='utf8')
cur = conn.cursor()
cur.execute('set character_set_client=utf8')
cur.execute('set character_set_connection=utf8')
cur.execute('set character_set_database=utf8')
cur.execute('set character_set_results=utf8')
cur.execute('set character_set_server=utf8')
sql = 'select industry, salary from work_sizetest'
cur.execute(sql)
salaryprobdct = utils.read_rst('salaryprobdct')
worklst = cur.fetchall()
i = 0
result = []
for j in xrange(20000):
salarys = worklst[i:i+2]
i += 2
salary_prob = {}
for key in range(7):
# pdb.set_trace()
salary_prob[key] = get_salary_prob(key, salaryprobdct, salarys)
# pdb.set_trace()
sortedprob = sorted(salary_prob.iteritems(), key=lambda jj:jj[1], reverse=True)
# for prob in sortedprob:
# print prob[0] + str(prob[1])
result.append(sortedprob[0][0])
utils.store_rst(result, 'salary')
conn.commit()
conn.close()
except Exception as e:
conn.close()
print e
end = time.clock()
print (end - start) |
from structlog import get_logger
logger = get_logger()
class Manage:
def __init__(self, config, engine):
self._config = config
self._engine = engine
def drop(self):
db_connection = self._config['uri']
db_schema = self._config['schema']
# fix-up the postgres schema:
from ras_common_utils.ras_database.base import Base
Base.metadata.schema = db_schema if db_connection.startswith('postgres') else None
logger.info("Dropping database tables.")
if db_connection.startswith('postgres'):
logger.info("Dropping schema {}.".format(db_schema))
self._engine.execute("DROP SCHEMA IF EXISTS {} CASCADE".format(db_schema))
else:
Base.metadata.drop_all(self._engine)
logger.info("Ok, done.")
def create(self):
db_connection = self._config['uri']
db_schema = self._config['schema']
# fix-up the postgres schema:
from ras_common_utils.ras_database.base import Base
if db_connection.startswith('postgres'):
for t in Base.metadata.sorted_tables:
t.schema = db_schema
logger.info("Creating database with uri '{}'".format(db_connection))
if db_connection.startswith('postgres'):
logger.info("Creating schema {}.".format(db_schema))
self._engine.execute("CREATE SCHEMA IF NOT EXISTS {}".format(db_schema))
logger.info("Creating database tables.")
Base.metadata.create_all(self._engine)
logger.info("Ok, database tables have been created.")
|
#! /usr/bin/env python3
"""Firmware implementing echoing line inputs."""
import sys
def main():
"""Print some header and echo the output."""
print("Starting RIOT Ctrl")
print("This example will echo")
while True:
print(input())
if __name__ == "__main__":
sys.exit(main())
|
#!/usr/bin/python3
# coding: utf-8
"""
Programme : fichierCsv.py version : 1.0
Auteur : H. Dugast
Date : 02-05-2017
Matériel utilisé : ordinateur sous windows (avec wing ide par exemple)
Fonctionnement programme :
Manipulation de données dans un fichier au format CSV
"""
import os
import csv
import time
pathFich = "../releve/" # chemin relatif du dossier contenant le fichier csv
nomChamp = ("Capteur", "Valeur", "Unite")
listeReleve = ( ("Temperat. ext.", 8, "degC"), ("Temperat. int.", 19.5, "degC"),
("Pression", 1021,"mbar") )
def fich_creerNom():
""" Crée le nom du fichier pour chaque jour de l'année s'il n'existe pas
Retour : chaine caractères contenant nom fichier au format :
releve_AAAA_MM_JJ exemple : releve_2017_05_02"""
dateNow = time.localtime() # récupère la date du jour
# crée le nom du fichier contenant la date
fichNom = "releve_" + str('{:04d}'.format(dateNow.tm_year)) + "_" \
+ str('{:02d}'.format(dateNow.tm_mon)) + "_" + str('{:02d}'.format(dateNow.tm_mday)) + ".csv"
return fichNom
# définit chemin et nom du fichier à ouvrir ou créer
pathAndFile = pathFich + fich_creerNom()
try:
# Ouvre un fichier ou le crée s'il n'existe pas à l'emplacement indiqué
with open(pathAndFile, 'a', newline='\n', encoding='utf-8') as fich:
writer = csv.writer(fich, delimiter = ';')
if os.path.getsize(pathAndFile) == 0: # si fichier vide
writer.writerow(nomChamp)
for releve in listeReleve:
writer.writerow(releve)
fich.close()
# Lit et affiche le contenu d'un fichier csv
with open(pathAndFile, 'r', newline='', encoding='utf-8') as fich:
reader = csv.reader(fich)
for row in reader:
print(row)
fich.close()
except FileNotFoundError as err:
print(err)
except PermissionError as err:
print(str(err) + '. Fichier ouvert par une application ?')
except:
print("Erreur opération fichier")
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# To run in terminal
# $ cd /home/james/Documents/Edoc/3Nohtyp/Python_By_Example/Example_051
# $ python3 Example_051.py
# $ python3 Example_051.py <Input.txt >Output.txt
"""
Python by Example: Learning to Program in 150 Challenges by Nichola Lacey
051
Using the song “10 green bottles”, display the lines “There are [num] green bottles
hanging on the wall, [num] green bottles hanging on the wall, and if 1 green bottle
should accidentally fall”. Then ask the question “how many green bottles will be
hanging on the wall?” If the user answers correctly, display the message “There will be
[num] green bottles hanging on the wall”. If they answer incorrectly, display the
message “No, try again” until they get it right. When the number of green bottles gets
down to 0, display the message “There are no more green bottles hanging on the wall”.
"""
print(__doc__)
num = 10
while num != 0 :
print(f'There are {num} green bottles hanging on the wall, '
f'{num} green bottles hanging on the wall, '
f'and if 1 green bottle should accidentally fall.')
print()
num -= 1
guess = int(input('How many green bottles will be hanging on the wall?'))
print()
print(f'The input for a number : {num}.')
if guess == num :
print(f'There will be {num} green bottles hanging on the wall.')
elif guess != num :
print('No, try again.')
elif num == 0 :
print('There are no more green bottles hanging on the wall.')
print()
|
from django.db import models
import datetime
# Create your models here.
class Bucket(models.Model):
name = models.CharField("Bucket Name", max_length=200)
datetime = models.DateTimeField("Date TIme", default=datetime.datetime.now())
def __str__(self):
return self.name
class Todo(models.Model):
bucket = models.ForeignKey(Bucket, related_name="todo_bucket", on_delete=models.CASCADE)
name = models.CharField("Todo", max_length=200)
details = models.TextField("Details")
target = models.DateTimeField("Date TIme", null=True, blank=True)
created_datetime = models.DateTimeField("Date TIme", default=datetime.datetime.now())
completed = models.BooleanField(default=False)
def __str__(self):
return self.name
|
# -*- coding: utf-8 -*-
from apollo.factory import create_celery_app
celery = create_celery_app()
from apollo.formsframework.tasks import update_submissions
from apollo.messaging.tasks import send_messages, send_email
from apollo.participants.tasks import import_participants
from apollo.locations.tasks import import_locations
from apollo.submissions.tasks import init_submissions
|
# -*- coding: utf-8 -*-
# Created on Mon Jul 17 2018 15:35:57
# Author: WuLC
# EMail: liangchaowu5@gmail.com
class Solution(object):
def binaryGap(self, N):
"""
:type N: int
:rtype: int
"""
result = 0
pre, curr = -1, 0
while N:
if (N&1) == 1:
if pre != -1:
result = max(result, curr - pre)
pre = curr
curr += 1
N >>= 1
return result
|
"""
Time Based Key-Value Store
Create a time based key-value store class TimeMap, that supports two operations.
1. set(string key, string value, int timestamp)
Stores the key and value, along with the given timestamp.
2. get(string key, int timestamp)
Returns a value such that set(key, value, timestamp_prev) was called previously,
with timestamp_prev <= timestamp.
If there are multiple such values, it returns the one with the largest timestamp_prev.
If there are no values, it returns the empty string ("").
Note: The timestamps for all TimeMap.set operations are strictly increasing.
"""
from collections import defaultdict
from typing import Tuple, List
class TimeMap:
def __init__(self):
self.hashmap = defaultdict(list)
def set(self, key: str, value: str, timestamp: int) -> None:
# setting values in increasing order
# otherwise insert in sorted order
self.hashmap[key].append((timestamp, value))
def bin_search(
self, values: List[Tuple[int, str]], timestamp: int
) -> Tuple[int, str]:
start, end = 0, len(values) - 1
while start < end:
mid = (start + end) >> 1
if values[mid][0] == timestamp:
return values[mid]
elif values[mid][0] > timestamp:
end = mid - 1
else:
start = mid + 1
if values[start][0] <= timestamp:
return values[start]
elif start > 0 and values[start-1][0] <= timestamp:
return values[start-1]
return timestamp, ""
def get(self, key: str, timestamp: int) -> str:
# binary search the largest value, greater than or equal to the one we want
if key in self.hashmap:
return self.bin_search(self.hashmap[key], timestamp)[1]
return ""
"""
["TimeMap","set","set","get","get","get","get","get"]
[[],["love","high",10],["love","low",20],["love",5],["love",10],["love",15],["love",20],["love",25]]
"""
if __name__ == "__main__":
tm = TimeMap()
tm.set("foo", "bar", 1)
assert tm.get("foo", 1) == "bar"
assert tm.get("foo", 3) == "bar"
tm.set("foo", "bar2", 4)
assert tm.get("foo", 4) == "bar2"
assert tm.get("foo", 5) == "bar2"
tm2 = TimeMap()
tm2.set("love", "high", 10)
tm2.set("love", "low", 20)
assert tm2.get("love", 5) == ""
assert tm2.get("love", 10) == "high"
assert tm2.get("love", 15) == "high"
assert tm2.get("love", 20) == "low"
assert tm2.get("love", 25) == "low"
|
import tensorflow as tf
import vectorize_graph
import numpy
import random
tf.set_random_seed(7)
# Parameters
learning_rate = 0.001
training_epochs = 100
#batch_size = 100
display_step = 1
# Network Parameters
n_hidden_1 = 3 # 1st layer number of features
n_hidden_2 = 3 # 2nd layer number of features
n_input = 5 # MNIST data input (img shape: 28*28)
n_classes = 1 # MNIST total classes (0-9 digits)
# tf Graph input
x = tf.placeholder("float", [None, n_input])
y = tf.placeholder("float", [None, n_classes])
def multilayer_perceptron(x, weights, biases):
# Hidden layer with RELU activation
layer_1 = tf.add(tf.matmul(x, weights['h1']), biases['b1'])
layer_1 = tf.nn.relu(layer_1)
# Hidden layer with RELU activation
#layer_2 = tf.add(tf.matmul(layer_1, weights['h2']), biases['b2'])
#layer_2 = tf.nn.relu(layer_2)
# Output layer with linear activation
out_layer = tf.matmul(layer_1, weights['out']) + biases['out']
out_layer=tf.nn.sigmoid(out_layer)
return out_layer
weights = {
'h1': tf.Variable(tf.random_normal([n_input, n_hidden_1])),
'h2': tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2])),
'out': tf.Variable(tf.random_normal([n_hidden_2, n_classes]))
}
biases = {
'b1': tf.Variable(tf.random_normal([n_hidden_1])),
'b2': tf.Variable(tf.random_normal([n_hidden_2])),
'out': tf.Variable(tf.random_normal([n_classes]))
}
tf_threshold=tf.Variable(0.8)
# Construct model
pred = multilayer_perceptron(x, weights, biases)
# Define loss and optimizer
#cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(pred, y))
#cost = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(pred, y))
cost = tf.reduce_mean(tf.square(pred-y))
#cost= tf.reduce_mean(tf.nn.softmax(pred))
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)
# Initializing the variables
init = tf.initialize_all_variables()
cursor=0
# Launch the graph
with tf.Session() as sess:
sess.run(init)
D=vectorize_graph.vectorize_graph('../graphs_all/')
U=vectorize_graph.flatten(D[:450])
data=U
total_batch=len(data)
# Training cycle
for epoch in range(training_epochs):
avg_cost = 0.
#total_batch = int(len(data)/batch_size)
# Loop over all batches
random.shuffle(data)
cursor=0
#for i in range(len(data)):
while cursor< len(data):
#batch_x, batch_y = mnist.train.next_batch(batch_size)
#if cursor>total_batch:
# random.shuffle(data)
# cursor=0
#batch_x,batch_y=data[cursor:cursor+20]
batch_x=numpy.array([i[0] for i in data[cursor:cursor+20]])
#print(batch_x)
batch_y=numpy.array([i[1] for i in data[cursor:cursor+20]])
#batch_y=data[1][cursor:cursor+20]
cursor+=20
# Run optimization op (backprop) and cost op (to get loss value)
ctot=0
for i in range(len(batch_x)):
_, c = sess.run([optimizer, cost], feed_dict={x: batch_x[i],
y: batch_y[i]})
ctot+=c
# Compute average loss
#tf.Print(weights['h1'],[weights['h1']])
avg_cost += c / total_batch
# Display logs per epoch step
if epoch % display_step == 0:
print( "Epoch:", '%04d' % (epoch+1), "cost=", \
"{:.9f}".format(avg_cost))
print ("Optimization Finished!")
#tot=0
#for i in D[50:]:
# true_count+=(eval_correct, feed_dict=feed_dict)
# Test model
eval_dat=D[450:]#[:50]
tot=0
c_w=0
for i in range(len(eval_dat)):
#b_x=numpy.append(b_x,eval_dat[i][0],axis=0)
#b_y=numpy.append(b_y,eval_dat[i][1],axis=0)
b_x=eval_dat[i][0]#numpy.array([i[0] for i in eval_dat])
#print(b_x.shape)
#b_x=eval_dat[i][0]
b_y=eval_dat[i][1]#numpy.array([i[1] for i in eval_dat])
#b_y=eval_dat[i][1]
correct_prediction=tf.equal(tf.argmax(pred,0),tf.argmax(y,0))
#correct_prediction=tf.argmax(pred,0)#tf.greater_equal(pred,tf_threshold)
#c=pred.eval({x:b_x,y:b_y})
#print (c)
# c=tf.argmax(pred,0).eval({x:b_x,y:b_y})
c=correct_prediction.eval({x:b_x,y:b_y})
tot+=1
if c[0]==True and numpy.count_nonzero(b_y)!=0:
c_w+=1
#if c[0]==True:break
print( c_w/tot)
# print (b_y.shape)
# correct_prediction = tf.equal(tf.argmax(pred, 1), y)
# # Calculate accuracy
# accuracy = tf.reduce_mean(tf.cast(correct_prediction, "float"))
# print ("Accuracy:", accuracy.eval(feed_dict={x:b_x , y:b_y }))
|
from controller.DisciplineController import DisciplineController
from controller.GradeController import GradeController
from controller.StudentController import StudentController
from domain.Discipline import Discipline
from domain.Grade import Grade
from domain.Student import Student
from repository.DisciplineRepository import DisciplineRepository
from repository.GradeRepository import GradeRepository
from repository.StudentRepository import StudentRepository
class UI:
def printMenu(self):
s = "\n \t \t MENU \n"
s += "\n \t 1. Print students."
s += "\n \t 2. Add student."
s += "\n \t 3. Remove student."
s += "\n \t 4. Update student."
s += "\n \t 5. Print disciplines."
s += "\n \t 6. Add discipline"
s += "\n \t 7. Remove discipline."
s += "\n \t 8. Update discipline."
s += "\n \t 9. Add grade."
s += "\n \t 10. Print grades."
s += "\n \t 0. Exit."
print(s)
def mainMenu(self):
studentRepo = StudentRepository()
disciplineRepo = DisciplineRepository()
gradeRepo = GradeRepository()
studentRepo.add(Student(1, "Andrei"))
studentRepo.add(Student(2, "Diana"))
studentRepo.add(Student(3, "Tudor"))
studentRepo.add(Student(4, "Alex"))
studentRepo.add(Student(5, "Daniel"))
studentRepo.add(Student(6, "Alin"))
studentRepo.add(Student(7, "Sergiu"))
studentRepo.add(Student(8, "Mihai"))
studentRepo.add(Student(9, "Monica"))
studentRepo.add(Student(10, "Denisa"))
disciplineRepo.add(Discipline(1, "Fundaments of Programming"))
disciplineRepo.add(Discipline(2, "Object oriented Programming"))
disciplineRepo.add(Discipline(3, "Advanced Methods of Programming"))
disciplineRepo.add(Discipline(4, "Computer Networks"))
disciplineRepo.add(Discipline(5, "Computer's Architecture"))
disciplineRepo.add(Discipline(6, "Operating Systems"))
disciplineRepo.add(Discipline(7, "Database Management"))
disciplineRepo.add(Discipline(8, "Mobile Applications"))
disciplineRepo.add(Discipline(9, "Web Programming"))
disciplineRepo.add(Discipline(10, "Artificial Intelligence"))
gradeRepo.add(Grade(1, 1, 10))
gradeRepo.add(Grade(1, 1, 7))
gradeRepo.add(Grade(1, 1, 8))
gradeRepo.add(Grade(1, 2, 9))
gradeRepo.add(Grade(1, 3, 9))
gradeRepo.add(Grade(1, 4, 9))
gradeRepo.add(Grade(1, 5, 9))
gradeRepo.add(Grade(2, 5, 10))
gradeRepo.add(Grade(2, 6, 10))
gradeRepo.add(Grade(2, 7, 10))
gradeRepo.add(Grade(2, 8, 10))
gradeRepo.add(Grade(2, 9, 10))
gradeRepo.add(Grade(2, 10, 10))
gradeRepo.add(Grade(3, 7, 6))
gradeRepo.add(Grade(4, 3, 5))
gradeRepo.add(Grade(5, 4, 7))
studentCtrl = StudentController(studentRepo)
disciplineCtrl = DisciplineController(disciplineRepo)
gradeCtrl = GradeController(gradeRepo)
while True:
self.printMenu()
cmd = int(input("Enter command: "))
if cmd == 0:
break
elif cmd == 1:
for s in studentCtrl.getAll():
print(str(s))
elif cmd == 2:
s = UI.readStudent()
studentCtrl.add(s)
elif cmd == 3:
s = UI.readStudentID()
studentCtrl.remove(s.getStudentID())
gradeCtrl.removeByStudent(s.getStudentID())
elif cmd == 4:
s = UI.readStudent()
studentCtrl.update(s)
elif cmd == 5:
for d in disciplineCtrl.getAll():
print(str(d))
elif cmd == 6:
d = UI.readDiscipline()
disciplineCtrl.add(d)
elif cmd == 7:
d = UI.readDisciplineID()
disciplineCtrl.remove(d.getDisciplineID())
gradeCtrl.removeByDiscipline(d.getDisciplineID())
elif cmd == 8:
d = UI.readDiscipline()
disciplineCtrl.update(d)
elif cmd == 9:
g = UI.readGrade()
gradeCtrl.add(g)
elif cmd == 10:
for g in gradeCtrl.getAll():
print(str(g))
else:
print("Invalid command!")
@staticmethod
def readStudent():
"""
Reads a student
"""
try:
id = int(input("\n \t Student ID: "))
name = input("\n \t Student name: ")
return Student(id, name)
except ValueError:
return Student(0, '')
@staticmethod
def readStudentID():
"""
Reads a student
"""
try:
id = int(input("\n \t Student ID: "))
return Student(id, '')
except ValueError:
return Student(0, '')
@staticmethod
def readDiscipline():
"""
Reads a discipline
"""
try:
id = int(input("\n \t Discipline ID: "))
name = input("\n \t Discipline name: ")
return Discipline(id, name)
except ValueError:
return Discipline(0, '')
@staticmethod
def readDisciplineID():
"""
Reads a discipline
"""
try:
id = int(input("\n \t Discipline ID: "))
return Discipline(id, '')
except ValueError:
return Discipline(0, '')
@staticmethod
def readGrade():
"""
Reads a grade of a discipline and student
"""
try:
dID = int(input("\n \t Discipline ID: "))
sID = int(input("\n \t Student ID: "))
grade = int(input("\n \t Grade: "))
return Grade(dID, sID, grade)
except ValueError:
return Grade(0, 0, 0)
|
#!/usr/bin/env python3
import os
import pandas as pd
import xlsxwriter
def save(result, x):
"""
This function saves the results(concatenated dataframes) into an Excel file.
:param result: concatenated dataframes
:param x: number of dataframes
"""
filename = 'New %s files.xlsx' % x
writer = pd.ExcelWriter(filename, engine='xlsxwriter')
result.to_excel(writer)
writer.save()
files = []
path = '.'
filenames = os.listdir(path)
for filename in filenames:
if filename.startswith('Output'):
fil = pd.read_excel(filename)
files.append(fil)
a = 0
for x in range(1, len(files) + 1):
if x % 11 == 0:
result = pd.concat(files[a:x], ignore_index=True)
save(result, x)
a += 11
elif x == len(files):
result = pd.concat(files[a:x], ignore_index=True)
save(result, x)
|
import os
import sys
import re
from collections import defaultdict, deque
def breadth_search(graph, start):
visited, queue = set(), deque(start)
while queue:
vertex = queue.popleft()
if vertex not in visited:
visited.add(vertex)
queue.extend(graph[vertex])
print(vertex)
return visited
if __name__ == '__main__':
graph = defaultdict(list)
with open(sys.argv[1]) as f:
for line in f:
line_lst = re.findall(r'\d+',line)
graph[line_lst[0]] += line_lst[1:]
breadth_search(graph, sys.argv[2])
|
import dash
import dash_core_components as dcc
import dash_html_components as html
import dash_bootstrap_components as dbc
import pandas as pd
import plotly.graph_objs as go
from dash.dependencies import Input, Output
import dash_table
from webapp import app
import ctransforms
df = ctransforms.df
layout = html.Div(
id='table-paging-with-graph-container',
className="five columns"
)
@app.callback(Output('table-paging-with-graph-container', "children"),
[Input('rating-95', 'value')
, Input('price-slider', 'value')
, Input('month-slider', 'value')
, Input('day-slider', 'value')
, Input('location', 'value')
])
def update_graph(ratingcheck, prices ,month, day, location):
dff = df
low = prices[0]
high = prices[1]
jan=month[0]
dec=month[1]
mind=day[0]
maxd=day[1]
dff1=[]
if 'All' in location:
dff=ctransforms.df
else:
for i in location:
dff = ctransforms.df
if location=={'label': 'All', 'value': 'All'}:
dff=dff
else:
dff1.append(dff.loc[(dff['location']==i)])
dff = pd.concat(dff1)
dff = dff.loc[(dff['year'] >= low) & (dff['year'] <= high)]
dff = dff.loc[(dff['month'] >= jan) & (dff['month'] <= dec)]
dff = dff.loc[(dff['day'] >= mind) & (dff['day'] <= maxd)]
trace1 = go.Scattergl(x = dff['date']
, y = dff['total_cases']
, text=dff['location']
, mode='markers'
, opacity=0.7
, marker={
'size': 8
, 'line': {'width': 0.5, 'color': 'white'}
}
, name='total_cases vs date'
, hoverinfo='x+y+text'
)
return html.Div([
dcc.Graph(
id='rating-price'
, figure={
'data': [trace1
# dict(
# x=df['price'],
# y=df['rating'],
# #text=df[df['continent'] == i]['country'],
# mode='markers',
# opacity=0.7,
# marker={
# 'size': 8,
# 'line': {'width': 0.5, 'color': 'white'}
# },
# name='Price v Rating'
#)
],
'layout': dict(
xaxis={'title': 'date'},
yaxis={'title': 'total_cases'},
margin={'l': 40, 'b': 40, 't': 10, 'r': 10},
legend={'x': 0, 'y': 1},
hovermode='closest'
)
}
)
])
|
for i in range(1,21):
if i % 2 == 0:
print(i,"on paarisarv")
else:
print(i,"on paaritu arv") |
# bbc-text.csv
import csv
import tensorflow as tf
from tensorflow.keras import layers, Input, regularizers
from tensorflow.keras.preprocessing.text import Tokenizer
from tensorflow.keras.preprocessing.sequence import pad_sequences
import tensorflow_datasets as tfds
import numpy as np
import matplotlib.pyplot as mpplot
# Get Data
def remove_stopwords(text, stopwords):
words_without_stopwords = []
for word in text.split():
if not (word in stopwords):
words_without_stopwords.append(word)
return ' '.join(words_without_stopwords)
texts, labels = [], []
with open('/tmp/bbc-text.csv', 'r') as file:
reader = csv.reader(file)
next(reader)
for row in reader:
labels.append(row[0])
texts.append(remove_stopwords(row[1].lower().strip(), stopwords))
# Prepare Data
def texts_to_sequences(texts, max_length, tokenizer=None, oov_token=None):
padding = 'post'
truncating = 'post'
if tokenizer is None:
tokenizer = Tokenizer(oov_token=oov_token)
tokenizer.fit_on_texts(texts)
num_words = len(tokenizer.word_index) + 1
seqs = tokenizer.texts_to_sequences(texts)
padded_seqs = pad_sequences(seqs, maxlen=max_length, padding=padding, truncating=truncating)
return padded_seqs, num_words, tokenizer
max_length = 120
x_data, num_words, tokenizer = texts_to_sequences(texts, max_length)
y_data, num_classes, _ = texts_to_sequences(labels, 1)
# Model
embedding_dim = 100
model = tf.keras.models.Sequential([
Input(shape=(None,)),
layers.Embedding(num_words, embedding_dim),
# layers.Conv1D(64, 5, activation='relu'),
# layers.MaxPooling1D(4),
# layers.Bidirectional(layers.LSTM(32, return_sequences=True)),
layers.Bidirectional(layers.LSTM(32)),
layers.Dense(32, activation='relu'),
# layers.Dropout(0.5),
layers.Dense(num_classes, activation='softmax')
])
model.compile(
optimizer='rmsprop',
loss='sparse_categorical_crossentropy',
metrics=['acc']
)
histories = []
# Train
def callback_of_stop_training(condition, message='Cancel training...'):
class StopTraining(tf.keras.callbacks.Callback):
def on_epoch_end(self, epoch, logs=None):
if condition(logs):
print('\n{}'.format(message))
self.model.stop_training = True
return StopTraining()
stop_training = callback_of_stop_training(
lambda logs: logs.get('acc') > 0.8 and logs.get('val_acc') > 0.8
)
history = model.fit(x_data, y_data, shuffle=True, validation_split=0.2, epochs=50, callbacks=[stop_training])
histories.append(history.history)
# History
def flat_histories(histories):
history = {}
for h in histories:
for metric, values in h.items():
if not history.get(metric):
history[metric] = []
for value in values:
history[metric].append(value)
return history
def plot_history(history, metrics=('loss',)):
mpplot.figure(figsize=(10, 6))
epochs = range(len(history[metrics[0]]))
for metric in metrics:
mpplot.plot(epochs, history[metric], label=metric)
mpplot.legend()
my_history = flat_histories(histories)
plot_history(my_history, ('acc', 'val_acc'))
# Save Model
model.save('model_q11_1.h5')
|
# -*- coding:utf-8 -*-
from typing import List
class Solution:
def PredictTheWinner(self, nums: List[int]) -> bool:
nums_len = len(nums)
if nums_len == 0 or nums_len == 1: return True
# dp[i][j]表示nums从i到j中, 先手比后手多的数值
dp = [[0 for j in range(nums_len)] for i in range(nums_len)]
for i in range(nums_len): dp[i][i] = nums[i]
for i in range(nums_len - 1, -1, -1):
for j in range(i + 1, nums_len):
# print(i, j)
dp[i][j] = max(nums[i] + (-dp[i + 1][j]), nums[j] + (-dp[i][j - 1]))
# print(dp)
if dp[0][nums_len - 1] >= 0:
return True
else:
return False
if __name__ == '__main__':
nums = [1, 5, 2]
ans = Solution().PredictTheWinner(nums)
print(ans)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.