text stringlengths 38 1.54M |
|---|
import requests
from bs4 import BeautifulSoup
from oauth2client.service_account import ServiceAccountCredentials
import gspread
import time
# __init__
gc_key ='your gc_key'
# set oauth
scope = ['https://spreadsheets.google.com/feeds',
'https://www.googleapis.com/auth/drive']
credentials = ServiceAccountCredentials.from_json_keyfile_name('ALCUKPracticeChecker.json', scope)
gc = gspread.authorize(credentials)
# open and select worksheet
wks = gc.open_by_key(gc_key)
def excel_style(row, col):
# http://code.activestate.com/recipes/578941-spread-sheet-style-column-enumeration-letter-to-nu/
""" Convert given row and column number to an Excel-style cell name. """
quot, rem = divmod(col - 1, 26)
return ((chr(quot - 1 + ord('A')) if quot else '') +
(chr(rem + ord('A')) + str(row)))
def writeProbdb(id, idx):
# bs4
res = requests.get('https://www.acmicpc.net/user/' + id)
soup = BeautifulSoup(res.content, 'html.parser')
num = soup.select(
"body > div.wrapper > div.container.content > div.row > div:nth-child(2) > div:nth-child(3) > div.col-md-9 > div:nth-child(1) > div.panel-body > span > a")
if(idx ==0):
worksheet = wks.get_worksheet(0);
cell_list = worksheet.range(excel_style(int(1000), int(idx)) + ':' + excel_style(int(26000), int(idx)))
pos = 1000
problist = []
for n in range(0, len(num), 2):
# print(num[n].get_text(), num[n + 1].get_text())
probNum = int(num[n].get_text())
if (probNum > pos):
while (probNum > pos):
problist.append(0)
pos = pos + 1
if (probNum == pos):
problist.append(1)
pos = pos + 1
# print(problist)
for i, val in enumerate(problist):
cell_list[i].value = val
return cell_list
# probDB sheet
worksheet = wks.get_worksheet(0);
cell_list = worksheet.range(excel_style(int(1000), int(idx)) + ':' + excel_style(int(26000), int(idx)))
pos = 1000
problist = []
for n in range(0, len(num), 2):
# print(num[n].get_text(), num[n + 1].get_text())
probNum = int(num[n].get_text())
if (probNum > pos):
while (probNum > pos):
problist.append(0)
pos = pos + 1
if (probNum == pos):
problist.append(1)
pos = pos + 1
# print(problist)
for i, val in enumerate(problist):
cell_list[i].value = val
return cell_list
#worksheet.update_cells(cell_list)
#time.sleep(2)
def read(obj):
if obj == "prob":
obj = 0
elif obj == "user":
obj = 1
elif obj == "prac":
obj = 2
worksheet = wks.get_worksheet(int(obj))
res = worksheet.get_all_values()
return res
def wirteid(size):
worksheet = wks.get_worksheet(3)
cell_list = worksheet.range(excel_style(int(2), int(4)) + ':' + excel_style(int(3), int(4)+int(size)-1))
i=0
for cell in cell_list:
if (i>=len(cell_list)/2):
cell.value = userIDlist[i-int(len(cell_list)/2)][1]
else:
cell.value = userIDlist[i][0]
i+=1
print(cell_list)
worksheet.update_cells(cell_list)
worksheet = wks.get_worksheet(0)
cell_list = worksheet.range(excel_style(int(997), int(1)) + ':' + excel_style(int(998), int(1) + int(size) - 1))
i = 0
for cell in cell_list:
if (i >= len(cell_list) / 2):
cell.value = userIDlist[i - int(len(cell_list) / 2)][1]
else:
cell.value = userIDlist[i][0]
i += 1
print(cell_list)
cell_list()
worksheet.update_cells(cell_list)
def grading():
# select pracres
worksheet = wks.get_worksheet(3);
# 채점할 과제 선택
for i in range(0, len(praclist)):
print(praclist[i])
pracres = []
temp = []
for t in range(0, len(userIDlist)):
temp.append(0)
# 그 과제의 문제 번호들 마다 ProbDB접근
for j in range(7, len(praclist[i])):
print(praclist[i][j])
if (praclist[i][j] == ""):
break
else:
prob = problist[int(praclist[i][j]) - 1]
print(prob)
for k in range(0, len(userIDlist)):
if (prob[k] == '1'):
temp[k] += 1
print(temp)
pracres.append(praclist[i][0])
pracres.append(praclist[i][1])
pracres.append(praclist[i][2])
for j in range(0, len(userIDlist)):
if (pracres[1] == userIDlist[j][3]):
if (int(pracres[2]) <= int(temp[j])):
pracres.append("완료")
else:
pracres.append("안함")
else:
pracres.append("-")
print(pracres)
cell_list = worksheet.range(excel_style(int(i) + int(4), int(1)) + ':' + excel_style(int(i) + int(4), int(40)))
for j in range(0, len(pracres)):
cell_list[j].value = pracres[j]
worksheet.update_cells(cell_list)
time.sleep(2)
# getUserIDlist
userIDlist = read("user")
#writeIDlist
for i in range(0,len(userIDlist)):
id = userIDlist[i][0]
idx = userIDlist[i][2]
writeProbdb(id, idx)
print(userIDlist[i][1])
# getPracList
praclist = read("prac")
# getProbList
problist = read("prob")
grading()
wirteid(len(userIDlist))
|
import re
CONFIG = re.compile(r"(?P<name>[a-z0-9\_]*)\s*(?P<value>.*);")
INCLUDE = re.compile(r"[^\#]include\s*\"(?P<file>[^\0]+)\";")
ASTERISK = re.compile(r"\*$")
WHITESPACE = re.compile(r"\s\s")
VHOST = re.compile(r"[^\#]server\s\{([^\#].*)\}")
HTTP_BASE = re.compile(r"(?P<type>GET|POST|HEAD)\s(?P<uri>.*)\sHTTP\/(?P<dialect>1\.(0|1))")
HTTP_HOST = re.compile(r"Host\:\s(?P<host>[a-z0-9\.\-\:]*)")
MODIFIED_SINCE = re.compile(r"If-Modified-Since:\s(?P<date>[a-zA-Z]{3}\,\s[0-9]{1,2}\s[a-zA-Z]{3}\s[0-9]{4}\s[0-9]{2}\:[0-9]{2}\:[0-9]{2})")
COMPRESS_HEADER = re.compile(r"Accept-Encoding:\s(gzip|deflate),?(gzip|deflate)?")
CRNL = re.compile(r"^\r\n$")
NL = re.compile(r"\n")
|
class Development:
def __init__(self):
pass
ES_HOST = 'search-challenge-tzmrlhv2uxu3u27atrvo5mxf2u.us-east-1.es.amazonaws.com'
ES_PORT = 80
ES_TIMEOUT = 10
class Test:
def __init__(self):
pass
ES_HOST = 'test_env'
ES_PORT = 0
ES_TIMEOUT = 10
class Production:
def __init__(self):
pass
ES_HOST = 'prod_env'
ES_PORT = 0
ES_TIMEOUT = 10
|
#!/usr/bin/env python3
# Mara Huldra & Kvaciral 2021
# SPDX-License-Identifier: MIT
import argparse
import math
import sys
import psutil
import time
from ckbpipe import CKBPipe
class Handler:
def __init__(self, ckb):
self.ckb = ckb
def set_lightbar(self, color, memory_usage=100, threshold=0):
urgency = math.ceil(abs(threshold - memory_usage) / ((100 - threshold) / 10))
lightbar_scope = {}
for bar_n in range(1,16):
lightbar_scope["topbar" + str(bar_n)] = "00000000"
for scope in range(urgency):
lightbar_scope["topbar" + str(10 - scope)] = color
lightbar_scope["topbar" + str(10 + scope)] = color
self.ckb.set(lightbar_scope)
def parse_args():
'''Parse command line arguments.'''
parser = argparse.ArgumentParser(description="Use K95's lightbar to warn user of system's memory usage if equal or greater than given threshold")
parser.add_argument('--ckb-pipe', '-c', required=True, help='The ckb-pipe-socket (/tmp/ckbpipeNNN')
parser.add_argument('--set-color', '-col', required=True, help='The warning-color')
parser.add_argument('--set-threshold', '-t', type=int, default=75, help='The memory-usage threshold in percentage')
return parser.parse_args()
def main():
args = parse_args()
ckb = CKBPipe(args.ckb_pipe)
handler = Handler(ckb)
color = args.set_color + "ff"
threshold = args.set_threshold
while True:
memory_usage = psutil.virtual_memory()[2]
if memory_usage >= threshold:
handler.set_lightbar(color, memory_usage, threshold)
else:
handler.set_lightbar("00000000")
time.sleep(5)
if __name__ == '__main__':
main()
|
from django import forms
from signup.fields import ListTextWidget
class SignupForm(forms.Form):
county = forms.CharField(required=True, label='')
def __init__(self, *args, **kwargs):
_country_list = kwargs.pop('data_list', None)
super(SignupForm, self).__init__(*args, **kwargs)
self.fields['county'].widget = ListTextWidget(data_list=_country_list, name='country-list') |
from django.urls import path
from django.views.generic.base import TemplateView
from . import views
from django.contrib.auth import views as auth_views # for login and logout
from .forms import (UserLoginForm, PwdResetForm, PwdResetConfirmForm)
app_name ='account'
urlpatterns = [
path('register/', views.account_register, name = 'register'),
path('activate/<slug:uidb64>/<slug:token>/', views.account_activate, name = 'activate'),
#user dashboard
path('dashboard/', views.dashboard, name = 'dashboard'),
path('login/', auth_views.LoginView.as_view(template_name='account/registration/login.html',
form_class=UserLoginForm), name='login'),
path('logout/', auth_views.LogoutView.as_view(next_page='/account/login/'), name='logout'),
path('profile/edit/', views.edit_details, name ='edit_details'),
path('profile/delete_user/', views.delete_user, name ='delete_user'),
path('profile/delete_confirm/', TemplateView.as_view(template_name="account/user/delete_confirm.html"), name='delete_confirmation'),
# password reset
path('password_reset/', auth_views.PasswordResetView.as_view(template_name="account/user/password_reset_form.html",
success_url='password_reset_email_confirm',
email_template_name='account/user/password_reset_email.html',
form_class=PwdResetForm), name='pwdreset'),
path('password_reset_confirm/<uidb64>/<token>', auth_views.PasswordResetConfirmView.as_view(template_name='account/user/password_reset_confirm.html',
success_url='/account/password_reset_complete/',
form_class=PwdResetConfirmForm),name="password_reset_confirm"),
path('password_reset/password_reset_email_confirm/',TemplateView.as_view(template_name="account/user/reset_status.html"), name='password_reset_done'),
path('password_reset_complete/',TemplateView.as_view(template_name="account/user/reset_status.html"), name='password_reset_complete'),
] |
import streamlit as st
import numpy as np
import utils
import pandas as pd
from sklearn.svm import SVC
from sklearn.linear_model import LogisticRegression
from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import precision_score, recall_score
from SessionState import get
st.set_option('deprecation.showPyplotGlobalUse', False)
def main():
st.title("Heart Disease Prediction by Xiupeng")
st.sidebar.title("Heart Disease Prediction ")
st.markdown("Do you have heart disease? ❤️")
st.sidebar.markdown("Do you have heart disease? ❤️")
df = utils.load_data()
x_train, x_test, y_train, y_test = utils.split(df)
# Data analysis
class_names = ["Low Risk", "High Risk"]
numeric_features = ['cigsPerDay', 'totChol', 'sysBP', 'diaBP', 'BMI', 'glucose']
st.sidebar.subheader("Data Visualization and Analytics")
if st.sidebar.checkbox("Show raw data", False):
st.subheader("Heart Disease Data Set (Prediction)")
st.write(df)
filename = 'heart_data.xlsx'
download_button_str = utils.download_button(df, filename, f'Click here to download {filename}', pickle_it=False)
st.markdown(download_button_str, unsafe_allow_html=True)
if st.sidebar.checkbox("Show distribution plot", False):
st.subheader("Heart Disease Data Set Distribution plot")
utils.plot_displot(df, numeric_features)
if st.sidebar.checkbox("Show box plot", False):
st.subheader("Blood Pressure by Age Group")
utils.plot_barplot(df)
if st.sidebar.checkbox("Show scatter plot", False):
st.subheader("Scatterplot Matrix")
utils.plot_scatterplot(df)
# Machine learning
st.sidebar.subheader("Machine Learning Prediction")
st.sidebar.subheader("Choose Classifier")
classifier = st.sidebar.selectbox("Classifier", ("Support Vector Machine (SVM)", "Logistic Regression",
"Random Forest Classification"))
if classifier == 'Support Vector Machine (SVM)':
st.sidebar.subheader("Model Hyperparameters")
C = st.sidebar.number_input("C (Regularization parameter)", 0.01, 10.0, step=0.01, key='C')
kernel = st.sidebar.radio("Kernel", ("rbf", "linear"), key='kernel')
gamma = st.sidebar.radio("Gamma (Kernel Coefficient)", ("scale", "auto"), key='gamma')
metrics = st.sidebar.multiselect("What matrix to plot?", ("Confusion Matrix", "ROC Curve",
"Precision-Recall Curve"))
if st.sidebar.button("Classify", key="classify"):
st.subheader("Support Vector Machine (SVM) Results")
model = SVC(C=C, kernel=kernel, gamma=gamma)
model.fit(x_train, y_train)
accuracy = model.score(x_test, y_test)
y_pred = model.predict(x_test)
st.write("Accuracy: ", accuracy.round(2))
st.write("Precision: ", precision_score(y_test, y_pred, labels=class_names).round(2))
st.write("Recall: ", recall_score(y_test, y_pred, labels=class_names).round(2))
utils.plot_metrics(metrics, model, x_test, y_test, class_names)
if classifier == 'Logistic Regression':
st.sidebar.subheader("Model Hyperparameters")
C = st.sidebar.number_input("C (Regularization parameter)", 0.01, 10.0, step=0.01, key='Lr')
max_iter = st.sidebar.slider("Maximum no. of iterations", 100, 500, key='max_iter')
metrics = st.sidebar.multiselect("What matrix to plot?", ("Confusion Matrix", "ROC Curve",
"Precision-Recall Curve"))
if st.sidebar.button("Classify", key="classify"):
st.subheader("Logistic Regression Results")
model = LogisticRegression(C=C, max_iter=max_iter)
model.fit(x_train, y_train)
accuracy = model.score(x_test, y_test)
y_pred = model.predict(x_test)
st.write("Accuracy: ", accuracy.round(2))
st.write("Precision: ", precision_score(y_test, y_pred, labels=class_names).round(2))
st.write("Recall: ", recall_score(y_test, y_pred, labels=class_names).round(2))
utils.plot_metrics(metrics, model, x_test, y_test, class_names)
if classifier == 'Random Forest Classification':
st.sidebar.subheader("Model Hyperparameters")
n_estimators = st.sidebar.number_input("This is the number of trees in the forest", 100, 5000, step=10,
key='n_estimators')
max_depth = st.sidebar.number_input("The maximum depth of the tree", 1, 100, step=2, key='max_depth')
bootstrap = st.sidebar.radio("Bootstrap samples when building trees", ("True", "False"), key='bootstrap')
metrics = st.sidebar.multiselect("What matrix to plot?", ("Confusion Matrix", "ROC Curve",
"Precision-Recall Curve"))
if st.sidebar.button("Classify", key="classify"):
st.subheader("Random Forest Results")
model = RandomForestClassifier(n_estimators=n_estimators, max_depth=max_depth, bootstrap=bootstrap, n_jobs=-1)
model.fit(x_train, y_train)
accuracy = model.score(x_test, y_test)
y_pred = model.predict(x_test)
st.write("Accuracy: ", accuracy.round(2))
st.write("Precision: ", precision_score(y_test, y_pred, labels=class_names).round(2))
st.write("Recall: ", recall_score(y_test, y_pred, labels=class_names).round(2))
utils.plot_metrics(metrics, model, x_test, y_test, class_names)
if __name__ == '__main__':
session_state = get(password='')
if session_state.password != 'dss123':
pwd_placeholder = st.sidebar.empty()
pwd = pwd_placeholder.text_input("Password:", value="", type="password")
session_state.password = pwd
if session_state.password == 'dss123':
pwd_placeholder.empty()
main()
else:
st.error("Please type the correct password")
else:
main() |
from django.shortcuts import render,get_object_or_404
from .models import Course ,Subject,Teacher
from .forms import ContactForm,SnippetForm
# Create your views here.
def index(request):
course=Course.objects.all()
return render(request,"index.html", {'course':course})
def courses(request):
course=Course.objects.all()
return render(request,"courses.html", {'course':course})
def coursedetail(request,course_id):
coredetail = get_object_or_404(Course,pk=course_id)
return render(request,"coursedetail.html", {'course':coredetail})
def subjects(request):
subject=Subject.objects.all()
return render(request,"subjects.html", {'subject':subject})
def about(request):
teacher=Teacher.objects.all()
return render(request,"about.html",{'teacher':teacher})
def contact(request):
if request.method == 'POST':
form = ContactForm(request.POST)
if form.is_valid():
name = form.cleaned_data['name']
Email = form.cleaned_data['Email']
print(name,Email)
form = ContactForm()
return render(request,"contact.html",{'form':form})
def snippet_detail(request):
if request.method == 'POST':
form = SnippetForm(request.POST)
if form.is_valid():
form.save()
form = SnippetForm()
return render(request,"contact.html",{'form':form})
|
from random import choice, choices, randrange
from django.contrib.auth.models import Group, User
from django.core.management.base import BaseCommand
class Command(BaseCommand):
"""
Create a bunch of randomly named users with random group affiliations for
testing purposes.
"""
help = 'Create test users and assign them to groups'
def handle(self, *args, **options):
sow = self.stdout.write
first_names = [
"about", "above", "abuzz", "acute", "adult", "adust", "agile",
"alien", "alike", "alive", "alone", "aloof", "amino", "amiss",
"amuck", "anile", "apian", "apish", "avian", "axial", "axile",
"azoic", "beige", "blind", "blond", "bloop", "blown", "blowy",
"bluff", "blunt", "boomy", "bound", "brief", "briny", "brisk",
"broad", "broke", "brood", "brown", "brute", "built", "chief",
"chill", "choky", "close", "cloze", "couth", "crisp", "crook",
"above", "abuzz", "acute", "adult", "adust", "agile", "alien",
"alike", "alive", "alone", "aloof", "amino", "amiss", "amuck",
"anile", "apian", "apish", "avian", "axial", "axile", "azoic",
"beige", "blind", "blond", "bloop", "blown", "blowy", "bluff",
"blunt", "boomy", "bound", "brief", "briny", "brisk", "broad",
"broke", "brood", "brown", "brute", "built", "cross", "crude",
"cruel", "daily", "dairy", "drier", "droll", "drunk", "dying",
"ebony", "equal", "erose", "faint", "flown", "fluid", "fluky",
"flush", "fried", "front", "frore", "gaudy", "gaunt", "gauzy",
"going", "gooey", "goofy", "goosy", "grimy", "gross", "group",
"grown", "gruff", "hairy", "haute", "hyoid", "iliac", "imido",
"imino", "ivied", "ivory", "joint", "juicy", "known", "kooky",
"loony", "loopy", "loose", "loury", "lousy", "loyal", "lying",
"moire", "moist", "moody", "moony", "mousy", "naive", "noisy",
"ovine", "ovoid", "owing", "phony", "pious", "plumb", "plump",
"plumy", "plush", "pricy", "prime", "primo", "print", "prior",
"privy", "prize", "prone", "proof", "prosy", "proud", "pyoid",
"quick", "quiet", "quits", "rainy", "roily", "rooky", "roomy",
"rooty", "rough", "round", "royal", "saucy", "shier", "shiny",
"shoal", "short", "showy", "skimp", "skyey", "slick", "slier",
"slimy", "slink", "smoky", "snide", "snowy", "sooth", "sooty",
"sound", "soupy", "south", "spicy", "spiky", "spiny", "sport",
"squab", "squat", "stiff", "still", "stock", "stone", "stony",
"stoss", "stout", "swift", "swing", "swish", "sworn", "taunt",
"teiid", "thick", "thine", "think", "tough", "trial", "trick",
"tried", "trine", "trite", "union", "usual", "utile", "veiny",
"weird", "which", "white", "whole", "whose", "woody", "wooly",
"woozy", "wrier", "wrong", "wroth", "young",
]
last_names = [
"abohm", "abysm", "abyss", "acorn", "adobe", "adobo", "aging",
"agism", "agita", "agony", "agora", "ahold", "aioli", "aloin",
"alula", "amice", "amide", "amigo", "amine", "amity", "amole",
"amour", "anima", "anime", "anion", "anode", "anole", "aquae",
"aroid", "aroma", "atoll", "atomy", "atony", "axiom", "axion",
"azide", "azine", "azole", "azote", "azoth", "azure", "bairn",
"baisa", "baize", "bayou", "biome", "biota", "blini", "bliss",
"bloke", "coign", "cooky", "coupe", "coypu", "crier", "crime",
"croci", "croft", "crone", "crony", "croup", "croze", "cruet",
"crura", "cruse", "crypt", "cuish", "daisy", "deism", "deity",
"dhole", "dhoti", "diode", "doily", "doing", "doozy", "dough",
"doula", "doura", "doyen", "droit", "dross", "druid", "drupe",
"druse", "dryad", "dryer", "duomo", "edict", "elite", "enoki",
"epoch", "epode", "etude", "etyma", "exine", "exurb", "fairy",
"faith", "fauna", "haick", "haiku", "haint", "haole", "haugh",
"haulm", "hooch", "hooey", "hooky", "houri", "icing", "idiom",
"idiot", "idyll", "ilium", "imide", "imine", "inion", "irony",
"joual", "joule", "kaiak", "kauri", "khoum", "kiosk", "klick",
"kloof", "klutz", "knish", "krill", "krona", "krone", "kroon",
"laird", "laity", "leone", "loofa", "lough", "loupe", "maize",
"maund", "mauve", "mayor", "mbira", "moola", "moose", "mould",
"myoma", "myope", "naiad", "quoit", "quota", "raita", "rayah",
"rayon", "reins", "rhino", "rhumb", "riyal", "saiga", "sault",
"sauna", "saury", "scion", "scone", "scuba", "scudo", "scurf",
"scuta", "scute", "seism", "shire", "shirt", "shiva", "shoat",
"shogi", "shoji", "shoon", "shote", "shott", "shoyu", "skiff",
"skill", "skink", "skort", "skosh", "skull", "sloka", "sloop",
"sloth", "sloyd", "slype", "smith", "smolt", "snoek", "snook",
"snout", "spica", "spick", "spine",
]
num_users = 10
groups = Group.objects.all()
max_num_groups_wanted = 3
max_num_groups = min(groups.count(), max_num_groups_wanted)
password = 'qweqweqwe'
for _ in range(num_users):
first_name = choice(first_names)
last_name = choice(last_names)
username = f'{first_name}_{last_name}'
if not User.objects.filter(username=username).exists():
# Create user
user = User.objects.create_user(
username=username, password=password)
user.first_name = first_name.title()
user.last_name = last_name.title()
user.save()
# Add user to groups
num_groups = randrange(1, max_num_groups)
chosen_groups = choices(groups, k=num_groups)
for g in chosen_groups:
user.groups.add(g)
|
"""
These integration tests exist solely to test the interaction between pyggybank and GPG on the CLI.
All attempts should be made to avoid extending these tests in preference for unit tests of the functions
themselves (where necessary, mocking out the GPG interactions).
TODO: It would be great to bring these tests into the pyggybank.test module, and marking them as
full-blown integration tests.
"""
import pexpect
import sys
import os
import shutil
from pathlib import Path
gpg_vn = 2
def test_gpg_new_key_prompt():
global gpg_vn
# Check that pyggybank drops us into the gpg keygen prompt if we don't have any keys
tmp = Path('tmp')
if tmp.exists():
shutil.rmtree(tmp)
tmp.mkdir()
child = pexpect.spawnu('pyggybank wizard --gpg-home={}'.format(tmp))
# child.logfile = os.fdopen(sys.stdout.fileno(), 'w')
# We just want to check that we have initiated the gpg wizard correctly. The details aren't important.
newer_gpg = True
try:
child.expect('Your selection?', timeout=1)
child.sendline('1')
child.expect('What keysize do you want?', timeout=1)
child.sendline('2048')
newer_gpg = False
gpg_vn = 1
child.expect('key expires in n years', timeout=1)
child.sendline('0')
except pexpect.exceptions.TIMEOUT:
pass
if newer_gpg:
child.expect('Real name:')
child.sendline('Testing Real Me')
child.expect('Email address:')
child.sendline('test@example.com')
child.expect('\(O\)kay\/\(Q\)uit\?')
child.close()
# Let's get a newline afterwards.
assert True
print()
def test_gpg_no_agent():
# Check the pyggybank behaviour when the gpg key hasn't been unlocked
# (i.e. the gpg-agent is fresh)
gpghome = Path(__file__).parent/'gpg'
accounts_file = Path('accounts.encrypted.{}.yml'.format(gpg_vn))
if gpg_vn < 2:
raise RuntimeError('Cant yet handle older gpg.')
if accounts_file.exists():
accounts_file.unlink()
child = pexpect.spawnu('pyggybank wizard --gpg-home={} --accounts-file={}'.format(gpghome, accounts_file))
# child.logfile = os.fdopen(sys.stdout.fileno(), 'w')
child.expect('GPG identity would you like to encrypt with\?', timeout=5)
child.sendline('Testing Name <test@example.com>')
child.expect('Provider:')
child.sendline('Test provider')
child.expect('User ID')
child.sendline('abcdef')
child.expect('password')
child.sendline('123456')
child.expect('Wrote config')
# --------
child = pexpect.spawnu('pyggybank accounts --accounts-file={} --gpg-home={}'.format(accounts_file, gpghome))
#child.logfile = os.fdopen(sys.stdout.fileno(), 'w')
# Will only be called if gpg-agent isn't running.
child.expect('GPG passphrase\:')
child.sendline('Th15154T35t')
child.expect('Test provider')
# Let's get a newline afterwards.
assert True
print()
if __name__ == '__main__':
test_gpg_new_key_prompt()
test_gpg_no_agent()
|
"""
Strat:
Iterate through arr, from right to left, while keeping track
of the largest element we've seen. max_so_far starts out at -1,
because that's what the last elem's value will be. As we iterate,
we compare to current num's value to max_so_far; update as appropiate.
Stats: O(n) time
Runtime: 112 ms, faster than 76.23% of Python online submissions for Replace Elements with Greatest Element on Right Side.
Memory Usage: 14.3 MB, less than 21.28% of Python online submissions for Replace Elements with Greatest Element on Right Side.
"""
class Solution(object):
def replaceElements(self, arr):
"""
:type arr: List[int]
:rtype: List[int]
"""
length = len(arr)
max_so_far = -1
result = [0] * length
#iterate through arr, from right to left, one elem at a time
for i in range(length - 1, -1, -1):
#store max_so_far in results & update it
result[i] = max_so_far
max_so_far = max(max_so_far, arr[i])
return result
|
from flask import redirect, session
from functools import wraps
from twilio.rest import Client
import sqlite3
import arrow
from flask import Flask, render_template, request, session
from tempfile import mkdtemp
from flask_mail import Mail, Message
import os
app = Flask(__name__)
app.config["TEMPLATES_AUTO_RELOAD"] = True
# Ensure responses aren't cached
@app.after_request
def after_request(response):
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Expires"] = 0
response.headers["Pragma"] = "no-cache"
return response
# Configure session to use filesystem (instead of signed cookies)
app.config["SESSION_FILE_DIR"] = mkdtemp()
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
mail_settings = {
"MAIL_SERVER": 'email-smtp.us-east-1.amazonaws.com',
"MAIL_PORT": 465,
"MAIL_USE_TLS": False,
"MAIL_USE_SSL": True,
"MAIL_USERNAME": os.environ['EMAIL_USER'],
"MAIL_PASSWORD": os.environ['EMAIL_PASSWORD']
}
app.config.update(mail_settings)
mail = Mail(app)
def sendEmail(to_email, msg):
with app.app_context():
msg = Message(subject="Hello",
sender="noreply@netprophet.tech",
recipients=[to_email],
body=msg)
mail.send(msg)
def convertLat(lat):
return (1 - (lat - 15 + 90) / 180) * 100
def convertLong(long):
return (long - 14.4 + 180) / 360 * 100
def login_required(f):
"""
Decorate routes to require login.
http://flask.pocoo.org/docs/1.0/patterns/viewdecorators/
"""
@wraps(f)
def decorated_function(*args, **kwargs):
if session.get("user_id") is None:
return redirect("/login")
return f(*args, **kwargs)
return decorated_function
def test_message(to_phone, msg):
account_sid = os.environ['TWILIO_ACCOUNT_SID']
auth_token = os.environ['TWILIO_ACCOUNT_AUTH']
client = Client(account_sid, auth_token)
for char in '- ()_':
to_phone = to_phone.replace(char, '')
message = client.messages \
.create(
body=msg,
from_='+16182074142',
to='+1%s' % to_phone
)
return (message.sid)
def send_reminder_by_id(id):
try:
sqliteConnection = sqlite3.connect('friendzone.db', timeout=20)
cursor = sqliteConnection.cursor()
print("Connected to SQLite")
# Query database
QUERY = """
SELECT users.phone, friends.name, friends.timezone, reminders.contact_method, users.email
FROM reminders
JOIN friends on friends.id = reminders.friend_id
JOIN users on users.id = reminders.user_id
WHERE datetime('now') > time_gmt AND sent IS NOT 1 AND reminders.id = ?;
"""
cursor.execute(QUERY, [id])
reminder = cursor.fetchone()
print("all reminders to send: ", reminder)
# return none if sent=true or not due
if not reminder:
return None
# update sent=true
cursor.execute("UPDATE reminders SET sent = 1 WHERE id = ? ", [id])
sqliteConnection.commit()
cursor.close()
MSG = "It's now %s in %s, don't forget to call %s!" % (
arrow.now(reminder[2]).format(
'ddd HH:mm'), reminder[2], reminder[1]
)
# send message
print('Notify', reminder[3])
if reminder[3] == 'Email':
return sendEmail(reminder[4], MSG)
else:
return test_message(reminder[0], MSG)
except sqlite3.Error as error:
print("Error while connecting to sqlite", error)
finally:
if (sqliteConnection):
sqliteConnection.close()
print("The SQLite connection is closed")
def get_due_reminders():
# return [id for reminders if date < now and sent=false]
try:
sqliteConnection = sqlite3.connect('friendzone.db', timeout=20)
cursor = sqliteConnection.cursor()
print("Connected to SQLite")
# Query database
QUERY = """
SELECT reminders.id, friends.timezone, reminders.contact_method, friends.name
FROM reminders
JOIN friends on friends.id = reminders.friend_id
JOIN users on users.id = reminders.user_id
WHERE datetime('now') > time_gmt AND sent IS NOT 1;
"""
cursor.execute(QUERY)
reminders = cursor.fetchall()
cursor.close()
# return none if sent=true or not due
if not reminders:
return []
return [
{
'reminder_id': r[0],
'friend_timezone': r[1],
'contact_method': r[2],
'friend_name': r[3]
}
for r in reminders
]
except Exception as e:
print(e)
return []
def send_all_due_reminders():
# for id in due_reminders: send_reminder_by_id()
for reminder in get_due_reminders():
print('due', reminder)
print(send_reminder_by_id(reminder['reminder_id']))
|
# -*- coding: utf-8 -*-
lis1=list(range(5,20,1))
lis2=list(range(9,24,1))
lis1.append(sum(lis1))
lis2.append(sum(lis2))
lis1=sorted(lis1,reverse=True)
lis2=sorted(lis2,reverse=True)
print(lis1.pop()*lis2.pop()) |
import datetime
import sqlalchemy
from flask_login import UserMixin
from sqlalchemy import orm
# from sqlalchemy.dialect.postgresql import JSON
from .db_session import SqlAlchemyBase
class Orders(SqlAlchemyBase, UserMixin):
__tablename__ = 'orders'
id = sqlalchemy.Column(
sqlalchemy.Integer, primary_key=True, autoincrement=True)
name_klient = sqlalchemy.Column(sqlalchemy.String, nullable=True)
adress = sqlalchemy.Column(sqlalchemy.String, nullable=True)
email = sqlalchemy.Column(sqlalchemy.String, nullable=True)
phone_number = sqlalchemy.Column(sqlalchemy.String, nullable=True)
times_orders = sqlalchemy.Column(
sqlalchemy.DateTime, default=datetime.datetime.now)
applied_coupons = sqlalchemy.Column(sqlalchemy.String, nullable=True)
order_note = sqlalchemy.Column(sqlalchemy.String, nullable=True)
execution_speed = sqlalchemy.Column(sqlalchemy.String, nullable=True)
payment_method = sqlalchemy.Column(sqlalchemy.String, nullable=True)
structure = sqlalchemy.Column(sqlalchemy.JSON, nullable=True)
back_id = sqlalchemy.Column(sqlalchemy.Integer, nullable=True)
summ = sqlalchemy.Column(sqlalchemy.Integer, nullable=True)
status = sqlalchemy.Column(sqlalchemy.String, nullable=True)
scores = sqlalchemy.Column(sqlalchemy.Integer, nullable=True)
|
import boto3
s3 = boto3.resource('s3')
bucket = s3.Bucket('bioinfobiobureau')
print(bucket)
# files = s3.Bucket('bioinfobiobureau').objects.filter(Prefix='input')
# # print(bucket)
# for file in files:
# print(file)
# for bucket in s3.buckets.all():
# print(bucket.name)
files = []
for key in bucket.objects.filter(Prefix='input/'):
# print(key.key)
files.append(key.key)
print(files[1:])
# files = bucket.objects.filter(Prefix='input/')
|
from django.shortcuts import get_object_or_404
from rest_framework import viewsets, status, filters
from rest_framework.decorators import (
action,
api_view,
)
from rest_framework.generics import ListAPIView
from rest_framework.response import Response
from .filters import FilterByDistance, FilterByDate, FilterByParticipation
from .models import Address, Event, Game
from .serializers import (
AddressSerializer,
EventSerializer,
EventCreateSerializer,
AddressCreateSerializer,
GameSerializer,
)
from ..chats.models import Chat
class AddressViewSet(viewsets.ModelViewSet):
serializer_class = AddressSerializer
authentication_classes = ()
permission_classes = ()
queryset = Address.objects.all()
def get_serializer_class(self):
if self.action in ("update", "partial_update", "create"):
return AddressCreateSerializer
return super().get_serializer_class()
class EventViewSet(viewsets.ModelViewSet):
serializer_class = EventSerializer
queryset = Event.objects.all()
def get_serializer_class(self):
if self.action in ("update", "partial_update", "create"):
return EventCreateSerializer
return super().get_serializer_class()
def perform_create(self, serializer):
user = self.request.user
participants = [
user,
]
chat = Chat()
chat.save()
chat.participants.set(participants)
serializer.save(
creator=user,
participants=participants,
chat=chat,
)
class CustomEventAPIView(ListAPIView):
serializer_class = EventSerializer
model = serializer_class.Meta.model
filter_backends = [
FilterByParticipation,
filters.SearchFilter,
FilterByDate,
FilterByDistance,
filters.OrderingFilter,
]
search_fields = [
"name", "games__name"
] # describe here which fields want to use for searching, then we use search=*
ordering_fields = [
"date",
] # describe here which fields want to use for ordering, then we use order=(-)field
def get_queryset(self):
queryset = Event.objects.all()
return queryset
@action(detail=True)
def list(self, *args, **kwargs):
queryset = self.filter_queryset(self.get_queryset())
# if queryset.count() == 0:
# return Response(None, status=status.HTTP_204_NO_CONTENT)
# else:
context = {"request": self.request}
serializer = self.serializer_class(queryset, context=context, many=True)
page = self.paginate_queryset(queryset=serializer.data)
return self.get_paginated_response(page)
class GameViewSet(viewsets.ReadOnlyModelViewSet):
queryset = Game.objects.all()
serializer_class = GameSerializer
filter_backends = [filters.SearchFilter]
filter_class = Game
search_fields = ["name"]
authentication_classes = ()
permission_classes = ()
@api_view(["PATCH"])
def join_leave_event(request, pk):
event = get_object_or_404(Event, pk=pk)
if request.user == event.creator:
print(request.user == event.creator)
return Response(None, status.HTTP_405_METHOD_NOT_ALLOWED)
if request.user in event.participants.all():
event.participants.remove(request.user)
event.chat.participants.remove(request.user)
else:
event.participants.add(request.user)
event.chat.participants.add(request.user)
event.save()
return Response(None, status.HTTP_200_OK)
|
# Generated by Django 3.2.5 on 2021-07-16 06:31
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('mainapp', '0002_auto_20210716_0321'),
]
operations = [
migrations.CreateModel(
name='Brand',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=51)),
],
),
migrations.AlterField(
model_name='profile',
name='address',
field=models.ManyToManyField(blank=True, to='mainapp.ShippingAddress'),
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=51)),
('qty', models.IntegerField()),
('img', models.ImageField(upload_to='products/')),
('description', models.TextField()),
('brand', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mainapp.brand')),
],
),
]
|
# Generated by Django 3.0.7 on 2020-07-05 14:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('shop', '0017_auto_20200705_2044'),
]
operations = [
migrations.AddField(
model_name='category',
name='category_id',
field=models.CharField(default='Somikoron', max_length=10),
preserve_default=False,
),
]
|
# 31. 문자열을 정수로 바꾸기
def solution(s):
answer = ''
answer += s
answer = int(answer)
return answer
str_1 = '1234'
str_2 = '-1234'
print(solution(str_1))
print(solution(str_2)) |
def average_grade(roster=[]):
total = 0
for i in range(len(roster)):
total = total + roster[i].student_grade
average = total/len(roster)
return average
|
# 辺の長さが {a,b,c} と整数の3つ組である直角三角形を考え,
# その周囲の長さを p とする. p = 120のときには3つの解が存在する:
# {20,48,52}, {24,45,51}, {30,40,50}
# p ≤ 1000 のとき解の数が最大になる p はいくつか?
import time
def get_unique_list(seq):
seen = []
return [x for x in seq if x not in seen and not seen.append(x)]
# 答えの組を返す
def get_ressult_list(num):
res = []
for a in range(1, int(num*0.29) +1):
for b in range(a + 1, int(num*0.48) + 1 ):
c = num - a - b
if(c ** 2 == a ** 2 + b ** 2):
res.append([a, b, c])
return get_unique_list(res)
res = []
max_len = 0
start = time.time()
for i in range(1, 1000+1):
# print(get_ressult_list(i))
# res.append([i, len(get_ressult_list(i))])
if(max_len < len(get_ressult_list(i))):
max_len = len(get_ressult_list(i))
print(i,max_len)
print("実行時間:" + str(time.time()-start)[:5] + "s")
### 参考 1 ~ 300 回した時の実行時間
## 40秒ほど
# for a in range(1, num):
# for b in range(1, num):
# for c in range(1, num):
# if(a + b + c == p and a^2 + b ^2 == c^2)
#
## 25秒
# for a in range(1, num//2 + 1):
# for b in range(1, num//2 + 1):
# for c in range(1, num//2 + 1):
#
## 3秒
# for b in range(1, num//2 + 1):
# c = p -a -b
# if(a^2 + b ^2 == c^2)
#
## 1秒
# for b in range(a, num//2 + 1):
#
## 番外1
#
# for b in range(a + 1, num + 1 -2*a):
#
## 番外2
# for a in range(1, int(num*0.29) +1):
# for b in range(a + 1, int(num*0.48) + 1 ):
|
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
"""
common configuration settings
"""
DEBUG = False
CSRF_ENABLED = True
SECRET = os.getenv('SECRET')
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'bucketlist.sqlite')
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
"""
Configuration settings for development
"""
DEBUG = True
class TestingConfig(Config):
"""
Configurations for Testing
"""
TESTING = True
SQLALCHEMY_TRACK_MODIFICATIONS = True
SQLALCHEMY_DATABASE_URI = ('sqlite:///' + os.path.join(basedir,
'bucketlist.db'))
DEBUG = True
class StagingConfig(Config):
"""
Configurations for Staging.
"""
DEBUG = True
class ProductionConfig(Config):
"""
Configurations for Production.
"""
DEBUG = False
TESTING = False
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'default': DevelopmentConfig
}
|
# Aa Aa 0 - Aa a Aa a Aa a AaN
N
'''N
Aa a a a a a-a a a a a a a a a aN
a aN
N
Aa a:N
N
- Aa a a a a a a.N
- Aa a a a a a a a.N
- Aa a a a a a a a a a a a a.N
- Aa a a a a a a.N
N
'''N
N
a a a aN
a a a aN
N
N
a a('a_a_0.a', a='a-0') a a_a:N
a_a = a(a(a_a))N
a_a_a = a_a[0]N
a_a = a_a[0:]N
N
a('Aa a')N
a(a_a_a)N
N
a(*' ', a='\a')N
N
a('Aa a')N
a(*a_a[:0], a='\a')N
N
# Aa'a a a a a a a a a a a a a a a aN
N
'''N
a: Aa a a a a.N
a_a_a: Aa a a a a a a a a.N
a_a_a: Aa a a a a a a a a a a.N
a_a_a: Aa a a a a a a a a a a.N
a_a: Aa a a a a a (a a a a a a a).N
a_a: Aa a a a a a.N
a_a: Aa a a a a a a a.N
a: Aa a a a a A a.N
'''N
N
# Aa a a a a a a a a a a a a a aN
# (a_a_a)N
# Aa a a a a a a aN
# Aa a a a a a a a a a a a aN
N
a_a_a = a()N
a_a_a = a()N
# Aa a a a a a a_a_a a a a a a a a aN
a_a_a_a = a.a(0, 0, 0, 0, 0).a()N
a_a_a_a = a.a(0, 0, 0, 0, 0).a()N
N
N
a a a a_a:N
# Aa a a a a (a) a a a N
a_a_a_a = a.a.a(a[0], '%A-%a-%aA%A:%A:%A')N
a_a_a_a = a.a.a(a[0], '%a/%a/%a %A:%A')N
a_a_a_a = a.a.a(a[0], '%a/%a/%a %A:%A')N
N
# a a a a a a a a aN
a[0] = a_a_a_aN
a[0] = a_a_a_aN
a[0] = a_a_a_aN
N
a_a_a = a_a_a_a.a()N
N
a a_a_a > a_a_a_a:N
a_a_a_a = a_a_aN
N
a a_a_a < a_a_a_a:N
a_a_a_a = a_a_aN
N
N
# Aa a a a a a a aN
a_a_a_a_a = a_a_a_a.a('%A, %A')N
N
a a_a_a_a_a a a a_a_a:N
a_a_a[a_a_a_a_a] = 0N
a:N
a_a_a[a_a_a_a_a] += 0N
N
# Aa a a a a a a a,aN
a_a = a_a_a_a - a_a_a_aN
N
a a_a a a a_a_a:N
a_a_a[a_a] = 0N
N
a:N
a_a_a[a_a] += 0N
N
N
N
a(*' ', a='\a')N
a('Aa a a a a a a a')N
a(*a_a[:0], a='\a')N
a(*' ', a='\a')N
a('Aa a - a a a')N
a(a_a_a)N
a(*' ', a='\a')N
a('Aa a a a a a a: ', a_a_a_a)N
a('Aa a a a a a a: ', a_a_a_a)N
a(*' ', a='\a')N
a('Aa a - Aa a - Aa 0 a a a')N
N
a a, a a a(a_a_a.a(), a=a a: a[0], a=Aa)[:0]:N
a('Aa: {} - Aa: {}'.a(a, a))N
N
a(*' ', a='\a')N
a('Aa a a: ', a(a_a_a))N
a('Aa a a: ', a(a_a_a))N
N
|
# coding: utf-8
"""
vault/kernel/core_api/proto/v1/accounts/core_api_account_schedule_tags.proto
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: version not set
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class SchedulerJob(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'id': 'str',
'status': 'SchedulerJobStatus',
'schedule_id': 'str',
'schedule_timestamp': 'datetime',
'publish_timestamp': 'datetime',
'completed_timestamp': 'datetime'
}
attribute_map = {
'id': 'id',
'status': 'status',
'schedule_id': 'schedule_id',
'schedule_timestamp': 'schedule_timestamp',
'publish_timestamp': 'publish_timestamp',
'completed_timestamp': 'completed_timestamp'
}
def __init__(self, id=None, status=None, schedule_id=None, schedule_timestamp=None, publish_timestamp=None, completed_timestamp=None): # noqa: E501
"""SchedulerJob - a model defined in Swagger""" # noqa: E501
self._id = None
self._status = None
self._schedule_id = None
self._schedule_timestamp = None
self._publish_timestamp = None
self._completed_timestamp = None
self.discriminator = None
if id is not None:
self.id = id
if status is not None:
self.status = status
if schedule_id is not None:
self.schedule_id = schedule_id
if schedule_timestamp is not None:
self.schedule_timestamp = schedule_timestamp
if publish_timestamp is not None:
self.publish_timestamp = publish_timestamp
if completed_timestamp is not None:
self.completed_timestamp = completed_timestamp
@property
def id(self):
"""Gets the id of this SchedulerJob. # noqa: E501
A Unique identifier for a Job. # noqa: E501
:return: The id of this SchedulerJob. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this SchedulerJob.
A Unique identifier for a Job. # noqa: E501
:param id: The id of this SchedulerJob. # noqa: E501
:type: str
"""
self._id = id
@property
def status(self):
"""Gets the status of this SchedulerJob. # noqa: E501
:return: The status of this SchedulerJob. # noqa: E501
:rtype: SchedulerJobStatus
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this SchedulerJob.
:param status: The status of this SchedulerJob. # noqa: E501
:type: SchedulerJobStatus
"""
self._status = status
@property
def schedule_id(self):
"""Gets the schedule_id of this SchedulerJob. # noqa: E501
The Schedule ID references the Schedule that the Job belongs to. # noqa: E501
:return: The schedule_id of this SchedulerJob. # noqa: E501
:rtype: str
"""
return self._schedule_id
@schedule_id.setter
def schedule_id(self, schedule_id):
"""Sets the schedule_id of this SchedulerJob.
The Schedule ID references the Schedule that the Job belongs to. # noqa: E501
:param schedule_id: The schedule_id of this SchedulerJob. # noqa: E501
:type: str
"""
self._schedule_id = schedule_id
@property
def schedule_timestamp(self):
"""Gets the schedule_timestamp of this SchedulerJob. # noqa: E501
The time the job was scheduled to be triggered. # noqa: E501
:return: The schedule_timestamp of this SchedulerJob. # noqa: E501
:rtype: datetime
"""
return self._schedule_timestamp
@schedule_timestamp.setter
def schedule_timestamp(self, schedule_timestamp):
"""Sets the schedule_timestamp of this SchedulerJob.
The time the job was scheduled to be triggered. # noqa: E501
:param schedule_timestamp: The schedule_timestamp of this SchedulerJob. # noqa: E501
:type: datetime
"""
self._schedule_timestamp = schedule_timestamp
@property
def publish_timestamp(self):
"""Gets the publish_timestamp of this SchedulerJob. # noqa: E501
The time the job was actually published by the Scheduler. # noqa: E501
:return: The publish_timestamp of this SchedulerJob. # noqa: E501
:rtype: datetime
"""
return self._publish_timestamp
@publish_timestamp.setter
def publish_timestamp(self, publish_timestamp):
"""Sets the publish_timestamp of this SchedulerJob.
The time the job was actually published by the Scheduler. # noqa: E501
:param publish_timestamp: The publish_timestamp of this SchedulerJob. # noqa: E501
:type: datetime
"""
self._publish_timestamp = publish_timestamp
@property
def completed_timestamp(self):
"""Gets the completed_timestamp of this SchedulerJob. # noqa: E501
Indicates the time that a successful job outcome was received. This timestamp is UTC. # noqa: E501
:return: The completed_timestamp of this SchedulerJob. # noqa: E501
:rtype: datetime
"""
return self._completed_timestamp
@completed_timestamp.setter
def completed_timestamp(self, completed_timestamp):
"""Sets the completed_timestamp of this SchedulerJob.
Indicates the time that a successful job outcome was received. This timestamp is UTC. # noqa: E501
:param completed_timestamp: The completed_timestamp of this SchedulerJob. # noqa: E501
:type: datetime
"""
self._completed_timestamp = completed_timestamp
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(SchedulerJob, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, SchedulerJob):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
from flask import *
import tmdbAPI
tmdb_app = Flask(__name__)
# === Start of Main Routes ===
@tmdb_app.route("/")
def index():
return render_template("index.html")
@tmdb_app.route("/search")
def search():
# valid_cats and params will be used to ensure user input is valid.
# These are defaults that will only be modified if user input is valid.
valid_cats = ["movie", "tv", "person"]
params = {
"category": request.args.get("category", default="movie", type=str),
"page": request.args.get("page", default=1, type=int),
"q": request.args.get("q", default="", type=str)
}
# Check if the supplied category is valid
if params["category"] not in valid_cats:
# If it isn't, reset it to default
params["category"] = "movie"
if params["q"] != "":
# A non-empty query has been supplied, so try a search
response_data = tmdbAPI.search(params)
genres = tmdbAPI.get_genres(params["category"])
# Everything is set up, so render search.html with the params, genre list, and response data.
return render_template("search.html", params=params, response=response_data, genres=genres)
else:
# Send 0 as the response. This indicates that no response is present, and that the search.html page should not
# build any results HTML
return render_template("search.html", params=params, response=0)
@tmdb_app.route("/discover")
def discover():
return render_template("discover.html")
# === End of Main Routes ===
# === Start of API Abstraction Routes ===
# These are accessed by AJAX requests initiated under tmdb.js and discover.js
@tmdb_app.route("/api/discover/top-rated")
def rated():
# call the finder function, which returns the JSON object from The Movie DB API
return tmdbAPI.find_rated()
@tmdb_app.route("/api/discover/top-trending")
def trending():
# call the finder function, which returns the JSON object from The Movie DB API
return tmdbAPI.find_trending()
@tmdb_app.route("/api/detail")
def detail():
valid_cats = ["movie", "tv"]
category = request.args.get("category", default="movie", type=str)
title_id = request.args.get("id", default=0, type=int)
if category not in valid_cats:
category = "movie"
# call the finder function, which returns the JSON object from The Movie DB API
return tmdbAPI.get_detail(category, title_id)
# === End of API Abstraction Routes ===
if __name__ == "__main__":
tmdb_app.run(host='0.0.0.0')
|
import random
def gen_random_word(num):
seed = "abcdefghijklmnopqrstuvwxyz"
sa = []
for i in range(num):
sa.append(random.choice(seed))
salt = ''.join(sa)
return salt
|
#!/usr/bin/python3.5
from PIL import Image
import random
import math
# image/canvas size
image_x_size = 1920
image_y_size = 1080
# blob properies
blobs = 10
max_blob_size = 1080
min_blob_size = 10
max_alpha = 100
# create background and foreground canvas and set 'pixels' to foreground.
# background is black and fully opaque
background = Image.new ('RGBA', (image_x_size,image_y_size), (0,0,0,255))
# foreground is black but fully transparent
foreground = Image.new ('RGBA', (image_x_size,image_y_size), (0,0,0,0))
pixels = foreground.load()
# do for every blob
for blob in range(blobs):
# randomize the blob size
blob_size = random.randint(min_blob_size, max_blob_size)
blob_area = blob_size * blob_size
blob_origo = blob_radius = int(blob_size / 2)
blob_fade = int(blob_size * 0.2)
# randomize the color. Strong shades of red, green, blue, magenta, cyan, yellow
rgb_rand = random.randint(0,5)
if rgb_rand == 0:
red = random.randint(150,255)
green = random.randint(0,55)
blue = random.randint(0,55)
elif rgb_rand == 1:
red = random.randint(0,55)
green = random.randint(150,255)
blue = random.randint(0,55)
elif rgb_rand == 2:
red = random.randint(0,55)
green = random.randint(0,55)
blue = random.randint(150,255)
elif rgb_rand == 3:
red = random.randint(150,255)
green = random.randint(0,55)
blue = random.randint(150,255)
elif rgb_rand == 4:
red = random.randint(0,55)
green = random.randint(150,255)
blue = random.randint(150,255)
elif rgb_rand == 5:
red = random.randint(150,255)
green = random.randint(150,255)
blue = random.randint(0,55)
# randomize the position for the upper left corner of the blob
# the range is such that up to half the blob can end up outside the image, true for both x and y
blob_x_pos = random.randrange(0 - blob_radius, image_x_size - blob_radius, 1)
blob_y_pos = random.randrange(0 - blob_radius, image_y_size - blob_radius, 1)
blob_origo_x = blob_x_pos + blob_origo
blob_origo_y = blob_y_pos + blob_origo
# if parts of the blob end up outside of the visible area, set visible start and end positions
if blob_x_pos < 0:
blob_visible_x_start = 0
else:
blob_visible_x_start = blob_x_pos
if blob_x_pos + blob_size > image_x_size:
blob_visible_x_end = image_x_size
else:
blob_visible_x_end = blob_x_pos + blob_size
if blob_y_pos < 0:
blob_visible_y_start = 0
else:
blob_visible_y_start = blob_y_pos
if blob_y_pos + blob_size > image_y_size:
blob_visible_y_end = image_y_size
else:
blob_visible_y_end = blob_y_pos + blob_size
for y in range(blob_visible_y_start, blob_visible_y_end, 1):
for x in range(blob_visible_x_start, blob_visible_x_end, 1):
# get the values for the current pixel
current_pixel = foreground.getpixel((x, y))
# calculate pixel distance from blob (hypotenuse)
x_pos = abs(blob_origo_x - x)
y_pos = abs(blob_origo_y - y)
distance = math.sqrt(x_pos * x_pos + y_pos * y_pos)
if distance > blob_radius:
# outside of blob, no change to pixel
r = current_pixel[0]
g = current_pixel[1]
b = current_pixel[2]
a = current_pixel[3]
elif distance < (blob_radius - blob_fade):
# inside of area for max blob opaqueness, add blob color and alpha to pixel
r = current_pixel[0] + red
g = current_pixel[1] + green
b = current_pixel[2] + blue
a = current_pixel[3] + max_alpha
else:
# in fade area, calculate which amount of color and alpha to add
fade_level = (blob_radius - distance) / blob_fade
fade_pixel = (6.28 * fade_level - math.sin(6.28 * fade_level)) / 6.28
alpha = int(max_alpha * fade_pixel)
r = int(current_pixel[0] + red * fade_pixel)
g = int(current_pixel[1] + green * fade_pixel)
b = int(current_pixel[2] + blue * fade_pixel)
a = current_pixel[3] + alpha
# if color or alpha is out of bounds, get it back in line
if r > 255:
r = 255
if g > 255:
g = 255
if b > 255:
b = 255
if a > 255:
a = 255
# put pixel
pixels[x, y] = (r, g, b, a)
# create file pointer to png file, write binary
fp = open("/tmp/blob.png","wb")
# merge background and foreground and save to file pointer
merged_image = Image.alpha_composite (background, foreground)
merged_image.save(fp, "PNG")
fp.close
# show the image
merged_image.show()
|
# get current age.
# max age = 90
# count the number of days left
# assume 365 days, 52 weeks and 12 months in a year
age = int(input("Please enter your age "))
years_left = 90 - age
days_left = years_left * 365
weeks_left = years_left * 52
months_left = years_left * 12
print(f"you have {days_left} days, {months_left} months, {weeks_left} weeks left till you reach age of 90")
|
from seis_grados import *
import sys
KB = 'Bacon Kevin'
def camino_hasta_KB(dataset, actor):
"""
Imprime el camino más corto con el cual se llega desde cualquier
actor hasta Kevin Bacon.
"""
if not pertenece_actor(dataset,actor):
print("No se pudo encontrar un camino desde el actor ingresado hasta KB.")
return
for (actor_1, actor_2, pelicula) in camino(dataset, KB, actor):
print("'{}' actuó con '{}' en '{}'.".format(actor_1,actor_2,pelicula))
def bacon_number(dataset, actor):
"""
Imprime el Kevin Bacon Number del actor recibido.
"""
if not pertenece_actor(dataset, actor):
print("El actor ingresado o es incorrecto o no se encuentra en la base de datos.")
return
path = camino(dataset, KB, actor)
kbn=-1
if path:
kbn=len(path)
print("'{}' tiene un Kevin Bacon Number igual a {}.".format(actor,kbn))
def bacon_number_mayor_a_6(dataset):
"""
Imprime la cantidad de actores (¿existirán?) a una distancia mayor
a 6 pasos de Kevin Bacon. De no existir actores a más pasos que 6,
se imprime un mensaje acorde. En este numero no influyen la cantidad
de actores con un KBN infinito.
"""
actores = actores_a_mayor_distancia(dataset, KB, 6)
if not actores:
print("No existen actores cuyo KBN sea mayor a 6")
return
else:
aux = {}
for actor, cercania in actores:
if cercania in aux:
aux[cercania]+=1
else:
aux[cercania]=0
for cercania in aux:
if aux[cercania] and cercania>6:
print("Con KBN igual a {}: {}".format(cercania, aux[cercania]))
def bacon_number_infinito(dataset):
"""
Imprime la cantidad de actores con un KBN infinito. De no haber, se
debe imprimir un mensaje acorde.
"""
cont=0
for kbn in bacon_number_actores(dataset):
if kbn<0:
cont += 1
print("Los actores con un Bacon Number infinito son {}".format(cont))
def KBN_promedio(dataset):
"""
Imprime el Kevin Bacon Number promedio de todos los actores en
la base de datos.
PRE: Recibe el dataset.
"""
promedio = promedio_kbn(dataset)
print("El Kevin Bacon Number promedio es %", promedio)
def similares_a_KB(dataset, n):
print("Los {} actores más similares KB son {}".format(n, similares(dataset,KB, int(n))))
def popularidad_contra_KB(dataset, actor):
ac=popularidad(dataset, actor)
kb=popularidad(dataset,KB)
print("{} es un {} de lo popular que es Kevin Bacon".format(actor, (ac*100)/kb))
def cantidad_actores(dataset):
print("El dataset contiene {} actores".format(estadisticas(dataset)[1]))
def cantidad_peliculas(dataset):
print("El dataset contiene {} peliculas".format(estadisticas(dataset)[0]))
def main():
"""Funcion principal"""
file = sys.argv[1]
dataset = grafo_crear(file)
print("Cargado.\n")
print(popularidad(dataset,KB))
print(popularidad(dataset,'Penn Sean'))
funciones = ['camino_hasta_KB', 'bacon_number', 'bacon_number_mayor_a_6',
'KBN_promedio', 'cantidad_actores', 'cantidad_peliculas','popularidad_contra_KB',
'similares_a_KB','bacon_number_infinito']
try:
entrada= input().split(" ")
while entrada:
while entrada == "" or entrada[0] not in funciones:
entrada = input("No valido.Vuelva a ingresar.\n").split(" ")
comando = entrada[0]
actor = " ".join(entrada[1:])
if comando == 'camino_hasta_KB':
camino_hasta_KB(dataset,actor)
if comando == 'bacon_number':
bacon_number(dataset,actor)
if comando == 'bacon_number_mayor_a_6':
bacon_number_mayor_a_6(dataset)
if comando == 'similares_a_KB':
if entrada[1].isdigit():
similares_a_KB(dataset, entrada[1])
if comando == 'popularidad_contra_KB':
popularidad_contra_KB(dataset, actor)
if comando == 'bacon_number_infinito':
bacon_number_infinito(dataset)
if comando == 'KBN_promedio':
KBN_promedio(dataset)
if comando == 'cantidad_actores':
cantidad_actores(dataset)
if comando == 'cantidad_peliculas':
cantidad_peliculas(dataset)
entrada= input().split(" ")
except EOFError:
sys.exit()
main() |
import json
import csv
from result import Result
import requests
import time
import re
import io
from extract_entities import entities
writer = csv.writer(open("welink_results_qald7.csv", 'a', newline=''))
url = 'http://127.0.0.1:8000/api/'
headers = {'Content-type': 'application/json'}
with open('qald-7.json', encoding='UTF-8') as data_file:
data = json.loads(data_file.read())
nb=0
for distro in data['questions']:
# print(distro['query']['sparql'])
entities_dataset=entities(distro['query']['sparql'])
print(entities_dataset)
entity_mentions=0
correctly_linked=0
n=1
system_result=0
result=[]
tmp=time.time()
for d in distro['question']:
if d["language"]=='en':
question_en=d["string"]
print(question_en)
query = {'query': str(question_en)}
data_json = json.dumps(query)
response = requests.post(url, data=data_json, headers=headers)
execution_time=time.time()-tmp
print(execution_time)
if response:
response_json=response.json()
if 'mentions' in response_json:
detected_entity= len(response_json['mentions'])
system_result=detected_entity
if 'results' in response_json:
# system_result=len(response_json['results'])
entity_mentions=len(entities_dataset)
for em in entities_dataset:
for i in range(len(response_json["mentions"])):
j=response_json["results"][str(i)][0][1]
if j==em:
if j not in result:
# system_result=system_result+n
correctly_linked=correctly_linked+1
result.append(j)
n=n+1
#print(correctly_linked, system_result, entity_mentions)
res= Result(correctly_linked, system_result, entity_mentions)
fmeasure=0
if system_result!=0:
entity_precision=res.precision()
else:
entity_precision=0
if entity_mentions!=0:
entity_recall=res.recall()
else:
entity_recall=0
if entity_recall!=0 and entity_precision!=0:
fmeasure= (2*entity_precision*entity_recall)/(entity_precision + entity_recall)
for i in result:
print("id question: ", distro['id'], "result n: ", system_result, detected_entity, result)
print("Precision:", entity_precision," Recall:", entity_recall )
print("____________________________________")
myData=[[distro['id'],question_en,entity_mentions,detected_entity,system_result,correctly_linked, entity_precision,entity_recall, fmeasure, "0", "0", execution_time] ]
myFile = open('welink_results_qald7.csv', 'a', encoding='utf-8')
with myFile:
writer = csv.writer(myFile, delimiter =";", lineterminator='\r')
writer.writerows(myData)
else:
#No string match
nsm=0
system_result=0
entity_precision=0
entity_recall=0
nsm=nsm+1
myData=[[distro['id'],question_en,entity_mentions,detected_entity,system_result,correctly_linked, entity_precision,entity_recall, "0", "0",nsm, execution_time] ]
print("____________________________________No string match")
myFile = open('welink_results_qald7.csv', 'a', encoding='utf-8')
with myFile:
writer = csv.writer(myFile, delimiter =";", lineterminator='\r')
writer.writerows(myData)
else:
#No detected named entity:
if entities_dataset:
nbem=0
system_result=0
entity_precision=0
entity_recall=0
correctly_linked=0
detected_entity=0
if 'entity mapping' in distro:
for em in distro["entity mapping"]:
nbem=nbem+1
myData=[[distro['id'],question_en,nbem,detected_entity,system_result,correctly_linked, entity_precision,entity_recall,"0", "1", "0", execution_time] ]
print("____________________________________No detected named entity")
else:
nbem=0
system_result=1
entity_precision=1
entity_recall=1
correctly_linked=1
detected_entity=0
fmeasure=1
if 'entity mapping' in distro:
for em in distro["entity mapping"]:
nbem=nbem+1
myData=[[distro['id'],question_en,nbem,detected_entity,system_result,correctly_linked, entity_precision,entity_recall,fmeasure, "3", "3", execution_time] ]
print("____________________________________No mention + No results")
myFile = open('welink_results_qald7.csv', 'a', encoding='utf-8')
with myFile:
writer = csv.writer(myFile, delimiter =";", lineterminator='\r')
writer.writerows(myData)
else:
#Unknown error from the web service
execution_time=time.time()-tmp
system_result=0
entity_precision=0
entity_recall=0
fmeasure= 0
entity_mentions=0
detected_entity=0
correctly_linked=0
print("____________________________________Unknown error from the web service")
myData=[[distro['id'],question_en,entity_mentions,detected_entity,system_result,correctly_linked, entity_precision,entity_recall, fmeasure, "2", "2", execution_time] ]
myFile = open('welink_results_qald7.csv', 'a', encoding='utf-8')
with myFile:
writer = csv.writer(myFile, delimiter =";", lineterminator='\r')
writer.writerows(myData)
#resultats= Results(best_candidate)
#resultats_classified=resultats.message()
#print(resultats_classified)
print("process completed")
# import json
# import csv
# from result import Result
# import requests
# import time
# import re
# import io
#
# def extract_entities(query):
# pattern="http://dbpedia.org/resource/[^>]+"
# return re.findall(pattern,query)
# def extract_entities_QALD7(query):
# firstModified=[]
# #print (query)
# if query=="OUT OF SCOPE":
# return firstModified
# whereString = query[query.index('{')+1:query.rfind('}')-1]
# if "no_query" in whereString:
# return firstModified
# whereString=whereString.replace("\n","")
# whereString=whereString.replace("\t"," ")
# query=whereString
# pattern="res:[^\s]+"
# first=re.findall(pattern,query)
#
# for entity in first:
# firstModified.append(entity.replace("res:","http://dbpedia.org/resource/"))
#
# pattern="http://dbpedia.org/resource/[^>]+"
# second=re.findall(pattern,query)
# #print(firstModified+second)
# return firstModified+second
#
# writer = csv.writer(open("final_results_qald8_tt.csv", 'a', newline=''))
# url = 'http://127.0.0.1:8000/api/'
# headers = {'Content-type': 'application/json'}
# with open('qald-8-train-multilingual.json', encoding='UTF-8') as data_file:
# data = json.loads(data_file.read())
# nb=0
# for distro in data['questions']:
# print(distro['query']['sparql'])
# entities_dataset=extract_entities_QALD7(distro['query']['sparql'])
# print(entities_dataset)
# entity_mentions=0
# correctly_linked=0
# n=1
# system_result=0
# result=[]
# tmp=time.time()
# if distro['question'][nb]['language']=='en':
# question_en=distro['question'][nb]['string']
# query = {'query': str(question_en)}
# data_json = json.dumps(query)
# response = requests.post(url, data=data_json, headers=headers)
# if response:
# execution_time=time.time()-tmp
# response_json=response.json()
# if 'mentions' in response_json:
# detected_entity= len(response_json['mentions'])
# if response_json['results']:
# # system_result=len(response_json['results'])
# if entities_dataset:
# for em in entities_dataset:
# entity_mentions=entity_mentions+1
# for b in response_json['results']:
# n=1
# for j in response_json['results'][str(b)]:
# if j[1]==em:
# if j[1] not in result:
# system_result=system_result+n
# correctly_linked=correctly_linked+1
# result.append(j[1])
# n=n+1
# else:
# system_result=1
# correctly_linked=1
# entity_mentions=1
# #print(correctly_linked, system_result, entity_mentions)
# res= Result(correctly_linked, system_result, entity_mentions)
# fmeasure=0
# if system_result!=0:
# entity_precision=res.precision()
# else:
# entity_precision=0
# if entity_mentions!=0:
# entity_recall=res.recall()
# else:
# entity_recall=0
# if entity_recall!=0 and entity_precision!=0:
# fmeasure= (2*entity_precision*entity_recall)/(entity_precision + entity_recall)
#
# for i in result:
# print("id question: ", distro['id'], "result n: ", system_result, detected_entity, result)
# print("Precision:", entity_precision," Recall:", entity_recall )
# print("____________________________________")
# myData=[[distro['id'],question_en,entity_mentions,detected_entity,system_result,correctly_linked, entity_precision,entity_recall, fmeasure, "0", "0", execution_time] ]
# myFile = open('final_results_qald8_tt.csv', 'a', encoding='utf-8')
# with myFile:
# writer = csv.writer(myFile, delimiter =";", lineterminator='\r')
# writer.writerows(myData)
#
# else:
# #No string match
# system_result=0
# entity_precision=0
# entity_recall=0
# nsm=nsm+1
# myData=[[distro['id'],question_en,entity_mentions,detected_entity,system_result,correctly_linked, entity_precision,entity_recall, "0", "0",nsm, execution_time] ]
# print("____________________________________No string match")
# myFile = open('final_results_qald8_tt.csv', 'a', encoding='utf-8')
# with myFile:
# writer = csv.writer(myFile, delimiter =";", lineterminator='\r')
# writer.writerows(myData)
# else:
# #No detected named entity:
# nbem=0
# system_result=0
# entity_precision=0
# entity_recall=0
# correctly_linked=0
# detected_entity=0
# if 'entity mapping' in distro:
# for em in distro["entity mapping"]:
# nbem=nbem+1
# myData=[[distro['id'],question_en,nbem,detected_entity,system_result,correctly_linked, entity_precision,entity_recall,"0", "1", "0", execution_time] ]
# print("____________________________________No detected named entity")
# myFile = open('final_results_qald8_tt.csv', 'a', encoding='utf-8')
# with myFile:
# writer = csv.writer(myFile, delimiter =";", lineterminator='\r')
# writer.writerows(myData)
# else:
# #Unknown error from the web service
# execution_time=time.time()-tmp
# system_result=0
# entity_precision=0
# entity_recall=0
# fmeasure= 0
# entity_mentions=0
# detected_entity=0
# correctly_linked=0
# print("____________________________________Unknown error from the web service")
# myData=[[distro['id'],question_en,entity_mentions,detected_entity,system_result,correctly_linked, entity_precision,entity_recall, fmeasure, "2", "2", execution_time] ]
# myFile = open('final_results_qald8_tt.csv', 'a', encoding='utf-8')
# with myFile:
# writer = csv.writer(myFile, delimiter =";", lineterminator='\r')
# writer.writerows(myData)
#
#
# #resultats= Results(best_candidate)
# #resultats_classified=resultats.message()
# #print(resultats_classified)
# print("process completed")
|
import socket
def TcpClient(target_host,target_port):
# Socket object creation
#socket.AF_INET Use of Ipv4 address or hostname
#socket.SOCK_STREAM TCP Client
client = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
#Client connection
client.connect((target_host,target_port))
return client
def TcpSenderReceiver(client,target_host,target_port):
Client=TcpClient(target_host,target_port)
#Sending some data
client.send("GET / HTTP/1.1\r\nHost: example.com\r\n\r\n")
#Data reception
response = client.recv(4096)
#Show response
showData(response)
def showData(data):
#Create your own display data operations
print data
def main():
target_host="www.example.com"
target_port=80
client=TcpClient(target_host,target_port)
TcpSenderReceiver(client,target_host,target_port)
if __name__ == "__main__":
main() |
"""
Module to build and train neural networks to predict protein properties.
"""
import os
import yaml
__version__ = "0.7.2"
CONFIG_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "config")
BASE_CONF_FILE = os.path.join(CONFIG_DIR, "base_config.yml")
with open(BASE_CONF_FILE, 'r') as base_conf_file:
BASE_CONFIG = yaml.load(base_conf_file)
# Directory where summaries and checkpoints are written.
WORK_DIR = BASE_CONFIG.pop('work_dir')
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import codecs
from xml.dom.minidom import parse, parseString
def getText(nodelist):
rc = []
for node in nodelist:
if node.nodeType == node.TEXT_NODE:
rc.append(node.data)
return ''.join(rc)
dom = parse('data.xml')
domains = dom.getElementsByTagName('d')
parsedDomains = []
for d in domains:
title = d.getElementsByTagName('t')[0].childNodes[0].data
title.strip()
aloneCases = d.getElementsByTagName('c')
parsedAloneCases = []
for a in aloneCases:
if a.parentNode.nodeName != "gr":
parsedAloneCases.append(a.childNodes[0].data)
groups = dom.getElementsByTagName('gr')
parsedGroups = []
for g in groups:
name = g.getElementsByTagName('n')[0].childNodes[0].data
cases = g.getElementsByTagName('c')
parsedCases = []
for c in cases:
parsedCases.append(c.childNodes[0].data)
parsedGroups.append((name, parsedCases))
parsedDomains.append((title, parsedAloneCases, parsedGroups))
domainString = ""
# [ (Nom du domaine, [Cas sans groupe], (Titre, [Cas])) ]
for d in parsedDomains:
domainString += "\domainNeeds{" + d[0] + "}"
domainString += "\n{\n"
for c in d[1]:domainString += "\t\\paragraph{ " + c + "}\n"
domainString += "}\n"
domainString += "{\n"
for g in d[2]:
domainString += "\paragraph{" + g[0] + "}\n"
domainString += "\\begin{itemize}\n"
for c in g[1]: domainString += "\t\\item " + c + "\n"
domainString += "\\end{itemize}\n\n"
domainString += "}\n\n"
f=codecs.open(sys.argv[1], encoding="utf-8", mode="w")
f.write(domainString)
f.close()
|
from brownie import Impl, accounts, reverts
ERR_MSG = "Number not greater than 10"
def test_shouldThrow():
impl = Impl.deploy({ 'from': accounts[0] })
with reverts(ERR_MSG):
impl.shouldThrow(1)
def test_shouldThrowWithoutWithStatement():
impl = Impl.deploy({ 'from': accounts[0] })
impl.shouldThrow(1)
|
# crawl "snkrs pass" data from nike. It will notify you by message as long as it detects "snkrs pass" from nike.com
#But there is a delay between "pass" and message. Range:[0,50s]
from selenium import webdriver
from time import sleep
import time
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
#from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver import ChromeOptions
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
#import schedule
from twilio.rest import Client
import logging
snkrs=[]
phone_receiver=''
phone_sender='+16307964576'
frequency=50 #how often does the program run
account_sid=' ' #from twilio account
auth_token=' ' #from twilio account
logging.basicConfig(level=logging.INFO,
filename='snkr.log',
filemode='w',
format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
datefmt='%a, %d %b %Y %H:%M:%S')
def monitor():
option = ChromeOptions()
option.add_argument('--user-data-dir=C:/Users/chris/AppData/Local/Google/Chrome/User Data') #based on location of you cookie
#option.add_argument('--headless')
option.add_experimental_option('excludeSwitches', ['enable-automation'])
browser=webdriver.Chrome(options=option)
browser.get('https://www.nike.com/launch/')
browser.implicitly_wait(5)
browser.find_element_by_css_selector("li.nav-items:nth-child(2) > a:nth-child(1)").click()
browser.implicitly_wait(5)
time.sleep(1)
browser.find_element_by_css_selector("li.nav-items:nth-child(3) > a:nth-child(1)").click()
browser.implicitly_wait(5)
time.sleep(1)
browser.find_element_by_css_selector("li.nav-items:nth-child(1) > a:nth-child(1)").click()
browser.implicitly_wait(5)
global snkrs
for i in range(2):
time.sleep(2)
try:
shoes=browser.find_elements_by_xpath("//a[contains(@aria-label,'Pass')]")
print(2)
snkrs=list(set(snkrs))
print(3)
for shoe in shoes:
Pass=shoe.get_attribute('aria-label')
print(Pass)
print(4)
if Pass in snkrs:
print('have already detected')
pass
else:
print(Pass)
account_sid=account_sid
auth_token=auth_token
client=Client(account_sid,auth_token)
message=client.messages.create(
from_=phone_sender,
body=Pass,
to=phone_receiver
)
snkrs.append(Pass)
except:
print("can't find the Snkrs Pass")
browser.refresh()
browser.quit()
print(time.strftime("%m.%d-%H:%M:%S", time.localtime()))
while True:
monitor()
logging.info('working')
time.sleep(frequency)
|
import pygame, math, sys
from map_engine import *
from entities import *
from Rendering import *
from Script_Engine import *
pygame.init()
#Declaring the Entities used in the game
player1 = Player("Alex", [0, 0], "/home/euler/Desktop/dungeons_v2/Assets/Player/blank_south.png", "Nerd", [384, 256], [])
Player.Pos_Calc(player1, Globals.Camera_xy)
#Creating an instance of the 'Map' class and declaring all of its variables
Terrain = Map("/home/euler/Desktop/dungeons_v2/Maps/barrier_test.txt", "World")
Map.Load_Map(Terrain)
Map.Blocked_Calc(Terrain)
#Seting up the variables needed for the display and the game loop
size = [800, 600]
display = pygame.display.set_mode(size, pygame.HWSURFACE|pygame.DOUBLEBUF|pygame.RESIZABLE)
pygame.display.set_caption("The Dungeons of Plasmawr")
Chunk = pygame.Rect(-64, -64, size[0] + 128, size[1] + 128)
clock = pygame.time.Clock()
running = True
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
if event.type == pygame.VIDEORESIZE:
display = pygame.display.set_mode(event.dict["size"], pygame.HWSURFACE|pygame.DOUBLEBUF|pygame.RESIZABLE)
resize = Render.resize(event, player1, [], display)
size = resize[0]
Chunk = resize[1]
if event.type == pygame.KEYDOWN:
Key_Pressed = event.key
if Key_Pressed == pygame.K_w:
Globals.Camera_move = 1
if Key_Pressed == pygame.K_s:
Globals.Camera_move = 2
if Key_Pressed == pygame.K_d:
Globals.Camera_move = 3
if Key_Pressed == pygame.K_a:
Globals.Camera_move = 4
if event.type == pygame.KEYUP:
Globals.Camera_move = 0
Player.Environment_Collide(player1, Terrain.Blocked_Tiles)
Player.Player_Move(player1)
Player.Pos_Calc(player1, Globals.Camera_xy)
display.fill((175, 175, 255))
Render.Render(display, [], player1, Chunk, Terrain)
pygame.display.update()
clock.tick(30)
pygame.quit()
sys.exit
|
nota1 = float(input("primeira nota: "))
nota2 = float(input("segunda nota: "))
nota3 = float(input("terceira nota: "))
nota4 = float(input("quarta nota: "))
media = (nota1 * 1 + nota2 * 2 + nota3 * 3 + nota4 * 4)/10
print(round(media, 2))
|
import requests
import json
import random
a = ["abs", "xyz", "vrt", "alpha", "beta"]
b = {"abs":"as01hu@gmail.com","xyz":"xyz@outlook.com","vrt": "alpha@yahoomail.com", "alpha":"xwe@yahoo.in", "beta":"ade@gmail.com"}
for i in range(5):
username = random.choice(a)
name = random.choice(["Ashu", "pp"])
email = b[username]
#print(username, name, email)
message_send = {
"username" : username,
"name" : name,
"email" : email
}
resp = requests.post("http://localhost:7000/createuserid", data = json.dumps(message_send))
body = resp.text
print(body)
body = json.loads(body)
userid = body["userid"]
uid_send ={
"userid" : userid
}
re = requests.get("http://localhost:7000/api/verifyemail", data = json.dumps(uid_send))
validate = re.text
print(validate)
a.remove(username) |
#https://programmers.co.kr/learn/courses/30/lessons/42862
def solution(n, lost, reserve):
answer = 0
st = []
for s in range(n):
st.append(0)
if s + 1 in lost:
st[s] -= 1
if s + 1 in reserve:
st[s] += 1
reserve = [s for s in range(n) if st[s] == 1]
for r in reserve:
f = r + 1
b = r - 1
if not r == 0:
if st[b] == -1:
st[b] = 0
st[r] = 0
continue
if not r + 1 == n:
if st[f] == -1:
st[f] = 0
st[r] = 0
answer = n - st.count(-1)
return answer |
'''
#TRABALHO 1 DE GRAFOS
#EXERCICIO: STORMSOFSWORDS
#PYTHON V 2.7.13
#UBUNTU 17.04
#ALUNOS: LEONARDO LARANIAGA ra94985
# WELLINGTON TATSUNORI ra94595
# THIAGO KIRA ra78750
#ARQUIVO DE ENTRADA:
# stormofswords.csv
#FUNCOES:
# BFS
# CAMINHO BFS
# BIPARTIDO
# DFS (corrigido)
# STACK DFS
# C_COMPONENTS
# ARTICULATION_POINT (VERSAO SLIDES)
# ARTICULATION_POINT VERSAO ALTERNATIVA
# PONTE
# DIJKSTRA
# CAMINHO_DIJKSTRA
'''
import Queue
import csv
import types
import sys
class Vertice:
''' CLASSE VERTICE
CLASSE PARA REPRESENTACAO DE VERTICES NO GRAFO
ATRIBUTOS:
nome: nome do vertice
visitado: para aplicacao do BFS
dist: distancia relativa a outro vertice
pred: vertice predescessor
adj: lista de nomes dos vertices adjacentes a este
cor: indica a cor para verificar biparticao
time: utilizado no dfs
cc: componente conexo
low: para verificar ponto de articulacao
'''
def __init__(self, n):
''' METODO CONSTRUTOR DA CLASSE
ARGS:
n: nome dado ao vertice
'''
self.cc = 0
self.nome = n
self.visitado = False
self.dist = 0
self.pred = None
self.adj = []
self.cor = 0
self.time = 0
self.low = 0
def get_nome(self):
''' METODO PARA PEGAR O NOME DO VERTICE
RETURNS:
retorna o nome do vertice
'''
return self.nome
def get_visitado(self):
''' METODO QUE VERIFICA SE O VERTICE JA FOI VISITADO
RETURNS:
true, se foi visitado
false, caso contratio
'''
return (self.visitado)
def set_visitado(self, b):
''' METODO PARA SETAR VISITADO
ARGS:
b: variavel logica (True ou False)
'''
self.visitado = b
def get_dist(self):
''' METODO QUE RETORNA A DISTANCIA DO VERTICE
RETURNS:
retorna a distancia do vertice
'''
return (self.dist)
def set_dist(self, d):
''' METODO PARA SETAR A DISTANCIA
ARGS:
d: distancia relativa ao vertice
'''
self.dist = d
def set_pred(self, v):
''' METODO PARA SETAR O PREDESCESSOR DO VERTICE
ARGS:
v: e o vertice PREDESCESSOR
'''
self.pred = v
def get_pred(self):
''' METODO PARA RETORNAR O PREDESCESSOR
'''
return (self.pred)
def add_adj(self, v):
''' METODO PARA ADICIONAR DETERMINADO VERTICE A LISTA DE ADJACENTES
ESTE METODO RECEBE UM VERTICE E O ADICIONA A LISTA DE VERTICES
ADJACENTES
ARGS:
v: e o vertice predescessor
'''
self.adj.append(v)
def get_adj(self):
''' METODO PARA RETORNAR VERTICES ADJACENTES
RETORNA UMA LISTA CONTENDO OS VERTICES ADJACENTES
RETURNS
devolve uma lista, com os vetices ADJACENTES
'''
return (self.adj)
def get_cor(self):
''' METODO PARA PEGAR COR DO VERTICE
UTILIZA PARA VERIFICAR SE O GRAFO E BIPARTIDO
RETURNS:
0 BRANCO
1 AZUL
2 VERMELHO
'''
return (self.cor)
def set_cor(self, c):
''' METODO UTILIZADO PARA SETAR COR NO VERTICE
UTILIZADO PARA VERIFICAR SE O GRAFO E BIPARTIDO
ARGS:
0 BRANCO
1 AZUL
2VERMELHO
'''
self.cor = c
def set_low(self, l):
''' METODO PARA SETAR LOW NO VERTICE
ARGS:
l: valor do low do VERTICE
'''
self.low = l
def get_low(self):
''' METODO PARA RETORNAR LOW DO VERTICE
RETURNS:
retorna o valor do low
'''
return (self.low)
def set_time(self, t):
''' METODO PARA SETAR TEMPO
UTILIZADO NO DFS
ARGS:
t: tempo a ser setado
'''
self.time = t
def get_time(self):
''' METODO PARA PEGAR O TEMPO
'''
return (self.time)
def get_cc(self):
'''METODO PARA RETORNAR CC
'''
return self.cc
def set_cc(self, c):
''' METODO PARA SETAR CC
'''
self.cc = c
def print_vertice(self):
''' METODO PARA EXIBIR O VERTICE
IMPRIME O VERTICE NA FORMA NOME: DISTANCIA
'''
print ('-> {}:' .format(self.nome))
def print_bfs(self):
''' METODO PARA IMPRIMIR INFORMACOES DO BFS
'''
print('V -> {} | D -> {} | P -> {}'.format(self.get_nome(),
self.get_dist(),
self.get_pred().get_nome()))
class Grafo:
''' CLASSE GRAFO
CLASSE PARA REPRESENTACAO DOS GRAFOS
ARMAZENA OS VERTICES DO GRAFO EM DICIONARIO, NO FORMATO:
'NOME' : 'VERTICE(OBJETO)'
ATRIBUTOS:
vertex: e o dicionario com os vertices do Grafo
cc: variavel para identificar os componentes conexos
mystack: uma lista pra usar como PILHA
tempo: variavel para a implementacao de alguns dos algoritmos de GRAFOS
myset: so para guardar as pontes sem repeticao
'''
def __init__(self):
''' METODO CONSTRUTOR DA CLASSE
'''
self.cc = 0
self.mystack = []
self.tempo = 0
self.vertex = {}
self.myset = set({})
with open ('stormofswords.csv', 'rU') as input:
spamreader = csv.reader(input)
for row in spamreader:
vaux1 = Vertice(row[0])
vaux2 = Vertice(row[1])
if (vaux1.get_nome() not in self.vertex.keys()):
self.vertex[vaux1.get_nome()] = vaux1
if (vaux2.get_nome() not in self.vertex.keys()):
self.vertex[vaux2.get_nome()] = vaux2
if (vaux2.get_nome() not in self.vertex[vaux1.get_nome()].get_adj()):
self.vertex[vaux1.get_nome()].add_adj(vaux2.get_nome())
if (vaux1.get_nome() not in self.vertex[vaux2.get_nome()].get_adj()):
self.vertex[vaux2.get_nome()].add_adj(vaux1.get_nome())
def print_grafo(self):
''' METODO PARA IMPRIMIR GRAFO
'''
for k in self.vertex.keys():
print('VERTICE ->')
self.vertex[k].print_vertice()
print('ADJACENTES ->')
for i in self.vertex[k].get_adj():
self.vertex[i].print_vertice()
def init_grafo(self):
''' METODO PARA INICIALIZAR GRAFO
RESETA OS ATRIBUTOS DOS VERTICE DO GRAFO
'''
self.cc = 0
self.mystack[:] = []
self.tempo = 0
self.myset.clear()
for k in self.vertex.values():
k.set_visitado(False)
k.set_pred(None)
k.set_dist(0)
k.set_cor(0)
k.set_time(0)
k.set_low(0)
def bfs(self, s):
''' BREAD FIST SEARCH
METODO PARA IMPLEMENTACAO DO BUSCA EM LARGURA
ARGS:
s: vertice raiz para o bfs
'''
self.init_grafo()
myq = Queue.Queue()
myq.put((s))
while (not myq.empty()):
v = myq.get()
vaux1 = self.vertex[str(v)]
for vaux2 in vaux1.get_adj():
if (not self.vertex[vaux2].get_visitado()):
self.vertex[vaux2].set_visitado(True)
self.vertex[vaux2].set_dist(vaux1.get_dist()+1)
self.vertex[vaux2].set_pred(vaux1)
myq.put(str(vaux2))
vaux1.set_visitado(True)
for k in self.vertex.values():
if (k.get_dist() > 0) :
k.print_bfs()
def caminho_bfs(self,s,v):
''' IMPRIME O CAMINHO DO VERTICE ATE DETERMINADO DESTINO
ARGS:
s: e o vertice origem
v: e o vertice destino
'''
if (s == v):
print(self.vertex[s].get_nome())
else:
self.caminho_bfs(s, self.vertex[v].get_pred().get_nome())
print(self.vertex[v].get_nome())
def caminho_djikstra(self,s,v):
''' IMPRIME O CAMINHO DO VERTICE ATE DETERMINADO DESTINO
ARGS:
s: e o vertice origem
v: e o vertice destino
'''
if (s == v):
print(self.vertex[s].get_nome())
else:
self.caminho_djikstra(s, self.vertex[v].get_pred())
print(self.vertex[v].get_nome())
def bipartido(self, s):
''' VERIFICA SE O GRAFO E BIPARTIDO
UTILIZA O CONCEITO DE QUE UM GRAFO BIPARTIDO NAO POSSUI CICLOS DE
TAMANHO IMPAR
ARGS:
s: vertice raiz
RETURNS:
retorna True ou False
'''
self.init_grafo()
self.vertex[s].set_visitado(True)
myq = Queue.Queue()
myq.put(s)
while(not myq.empty()):
v = myq.get()
vaux1 = self.vertex[str(v)]
for vaux2 in vaux1.get_adj():
if (self.vertex[vaux2].get_cor() == 0):
if (vaux1.get_cor() == 1):
self.vertex[vaux2].set_cor(2)
else:
self.vertex[vaux2].set_cor(1)
myq.put(vaux2)
elif (vaux1.get_cor() == self.vertex[vaux2].get_cor()):
return False
return True
def dfs(self, s):
''' METODO DE BUSCA EM PROFUNDIDADE
ALGORITMO DE BUSCA EM PROFUNDIDADE
UTILIZA PILHA IMPLICITA (RECURSAO)
ARGS:
s: vertice raiz
'''
self.init_grafo()
self.dfs_visit(self.vertex[s])
#proposta de modificacao:
#so para ajeitar a sequencia t.d, t.f
#agora o vertice raiz tem t.d = 1
#antigo codigo:
#self.vertex[s].set_visitado(True)
#for k in self.vertex[s].get_adj():
# if (not self.vertex[k].get_visitado()):
# self.dfs_visit(self.vertex[k])
#fim
for j in self.vertex.values():
if (j.get_visitado()):
print('N: {} | T.d: {} | T.f: {} | P: {}'.format(j.get_nome(), j.get_dist(), j.get_time(), j.get_pred()))
def dfs_visit(self,u):
''' METODO DE BUSCA EM PROFUNDIDADE
ALGORITMO DE BUSCA EM PROFUNDIDADE
UTILIZA u.dist como u.d
UTILIZA u.time como u.f
ARGS:
u: vertice raiz
'''
self.tempo += 1
u.set_visitado(True)
u.set_dist(self.tempo)
for k in u.get_adj():
if (not self.vertex[k].get_visitado()):
(self.vertex[k]).set_pred(u.get_nome())
self.dfs_visit(self.vertex[k])
self.tempo += 1
u.set_visitado(True)
u.set_time(self.tempo)
def stack_dfs(self, v):
''' DFS USANDO STACK
SO PARA TESTAR, AXO Q O PRIMEIRO DFS NAO FUNFA
CORRECAO: FUNFA SIM
'''
self.init_grafo()
self.mystack.append(v)
while (self.mystack):
u = self.mystack.pop()
if (not self.vertex[u].get_visitado()):
print(u)
self.vertex[u].set_visitado(True)
for k in self.vertex[u].get_adj():
if (not self.vertex[k].get_visitado()):
self.mystack.append(k)
for j in self.vertex.values():
if (j.get_visitado()):
print('N: {} | T.d: {} | T.f: {} | P: {}'.format(j.get_nome(), j.get_dist(), j.get_time(), j.get_pred()))
def c_components(self):
''' METODO PARA VERIFICAR OS COMPONENTES CONEXOS DO GRAFO
RETURNS:
retorna cc. o numero de componentes conexos
'''
self.init_grafo()
for k in self.vertex.values():
if (not k.get_visitado()):
self.cc += 1
self.c_visit(k)
return self.cc
def c_visit(self, c):
''' DFS VISIT PARA CC
ARGS:
c: e o vertice visitado
'''
c.set_visitado(True)
c.set_cc(self.cc)
for k in c.get_adj():
if (not self.vertex[k].get_visitado()):
self.c_visit(self.vertex[k])
def pontos_articulacao(self):
''' METODO ORIGINAL DOS SLIDES
AVERIGUAR RESPOSTAS
'''
self.init_grafo()
for i in self.vertex.keys():
if (not self.vertex[i].get_visitado()):
self.articulation_point(i)
print ('PONTOS DE ARTICULACAO: VERSAO ORIGINAL #### ESTA DANDO A RESPOSTA ERRADA')
for j in enumerate(self.myset):
print(j)
def articulation_point(self, u):
''' METODO PARA IDENTIFICAR PONTOS DE articulacao
UTILIZA u.dist como u.d
UTILIZA u.time como u.f
OBS: NAO FUNFO
PERGUNTA PRO KIKUTI DEPOIS
AT: CONTINUA NAO FUNFANDO
EU NAO FACO A MENOR IDEIA DO QUE ESTA ACONTECENDO
ARGS:
u: e o vertice visitado
'''
self.tempo += 1
self.vertex[u].set_visitado(True)
self.vertex[u].set_low(self.tempo)
self.vertex[u].set_dist(self.tempo)
for v in self.vertex[u].get_adj():
if (not self.vertex[v].get_visitado()):
self.vertex[v].set_pred(u)
self.articulation_point(v)
self.vertex[u].set_low(min(self.vertex[u].get_low(),
self.vertex[v].get_low()))
if (self.vertex[u].get_pred() == None):
if (self.vertex[v].get_nome() != self.vertex[self.vertex[u].get_adj()[0]].get_nome()):
self.myset.add(u)
else:
if (self.vertex[v].get_low() >= self.vertex[u].get_dist()):
self.myset.add(u)
else:
if (v != self.vertex[u].get_pred() and
self.vertex[v].get_dist() < self.vertex[u].get_dist()):
self.vertex[u].set_low(min(self.vertex[u].get_low,
self.vertex[v].get_dist()))
self.tempo += 1
self.vertex[u].set_time(self.tempo)
def articulation_alternativo(self):
''' METODO ALTERNARTIVO PARA O articulation_point
'''
self.init_grafo()
for i in self.vertex.keys():
if (not self.vertex[i].get_visitado()):
self.ap(i)
print ('PONTOS DE ARTICULACAO: ')
for j in enumerate(self.myset):
print(j)
def ap(self, u):
''' METODO ALTERNARTIVO PARA O ARTICULATION_POINT
'''
self.vertex[u].set_visitado(True)
self.tempo += 1
self.vertex[u].set_dist(self.tempo)
self.vertex[u].set_low(self.tempo)
for v in self.vertex[u].get_adj():
if (not self.vertex[v].get_visitado()):
self.vertex[v].set_pred(u)
self.ap(v)
self.vertex[u].set_low(min(self.vertex[u].get_low(), self.vertex[v].get_low()))
if (self.vertex[u].pred == None and self.vertex[v].get_nome() != self.vertex[self.vertex[u].get_adj()[0]].get_nome()):
self.myset.add(u)
if (self.vertex[u].pred != None and
self.vertex[v].get_low() >= self.vertex[u].get_dist()):
self.myset.add(u)
elif (v != self.vertex[u].get_pred()):
self.vertex[u].set_low(min(self.vertex[u].get_low(),
self.vertex[v].get_dist()))
def ponte(self):
''' METODO VERTIFICA PONTES DE UM GRAFO
'''
self.init_grafo()
for i in self.vertex.keys():
if (not self.vertex[i].get_visitado()):
self.ponte_visit(i)
print ('PONTES: ')
for j in enumerate(self.myset):
print(j)
def ponte_visit(self, u):
''' METODO PARA VERIFICAR AS PONTES DE UM GRAFO
ARGS:
u: vertice raiz
'''
self.tempo += 1
self.vertex[u].set_visitado(True)
self.vertex[u].set_low(self.tempo)
self.vertex[u].set_dist(self.tempo)
for v in self.vertex[u].get_adj():
if (not self.vertex[v].get_visitado()):
self.vertex[v].set_pred(u)
self.ponte_visit(v)
self.vertex[u].set_low(min(self.vertex[u].get_low(),
self.vertex[v].get_low()))
if (self.vertex[v].get_low() > self.vertex[u].get_dist() ):
resp = u + '->' + v
self.myset.add(str(resp))
elif (v != self.vertex[u].get_pred() and
self.vertex[v].get_dist() < self.vertex[u].get_dist()):
self.vertex[u].set_low(min(self.vertex[u].get_low(),
self.vertex[v].get_dist()))
self.tempo += 1
self.vertex[u].set_time(self.tempo)
def init_ssource(self, s):
''' METODO INITIALIZE SINGLE SOURCE
'''
for v in self.vertex.keys():
self.vertex[v].set_dist(sys.maxint)
self.vertex[v].set_pred(None)
self.vertex[s].set_dist(0)
def relax(self, u,v):
''' METODO RELAX
'''
if (self.vertex[v].get_dist() > (self.vertex[u].get_dist() + 1) ):
self.vertex[v].set_dist(self.vertex[u].get_dist() + 1)
self.vertex[v].set_pred(u)
def extract_min(self, q):
''' METODO EXTRACT MINIMO
PARA EXTRAIR O VALOR DA LISTA COM MENOR DISTANCIA
(TECNICA GULOSA)
O(n), NAO UTILIZEI O HEAP POR PREGUICA MESMO
RETURNS:
retorna o nome, na fila, do vertice com menor distancia
'''
menor = self.vertex[q[0]].get_dist()
nmenor = q[0]
for k in q:
if (self.vertex[k].get_dist() < menor):
menor = self.vertex[k].get_dist()
nmenor = self.vertex[k].get_nome()
q.remove(nmenor)
return (nmenor)
def djikstra(self, s, w):
''' METODO PARA CAMINHO MINIMO: ALGORITMO DE DJIkSTRA
ARGS:
s: vertice origem
w: vertice destino
'''
slist = []
self.init_grafo()
self.init_ssource(s)
for k in self.vertex.keys():
self.mystack.append(k)
while (len(self.mystack) > 0):
u = self.extract_min(self.mystack)
slist.append(self.vertex[u].get_nome())
for v in self.vertex[u].get_adj():
self.relax(u,v)
return (self.vertex[w].get_dist())
def teste(self, w):
print(self.vertex[self.vertex[w].get_adj()[0]].get_nome())
#INICIO:
goftrhones = Grafo()
print __doc__
goftrhones.articulation_alternativo()
goftrhones.pontos_articulacao()
goftrhones.ponte()
print('CAMINHO DJIKSTRA')
print('DIST: {}'.format(goftrhones.djikstra('Elia', 'Lothar')))
print('CAMINHO: ')
goftrhones.caminho_djikstra('Elia', 'Lothar')
|
#This is meant to take two files and compare them
def main():
#Stores the lines of the file specified by the user
source = readFile('Provide the name of the first file')
source2 = readFile('Provide the name of the second file')
#This calls the function to extract all the words from a file
words = getWords(source)
words2 = getWords(source2)
#This stores the return of the function which casts the words list as a set, making all words unique.
unique = getUnique(words)
unique2 = getUnique(words2)
#Printing the sets
printSet('Here is the list of words for the first file',unique)
print('\n\n')
printSet('Here is the list of words for the second file',unique2)
#Printing various set comparisons
compareSets(unique,unique2)
#this simple takes an array and casts/returns it as a set
def readFile(prompt):
#Getting filename from input for filename
filename = input('{}: \n'.format(prompt))
#Reads the file of filename
f = open(filename, 'r')
#Recording file contents in array
contents = f.readlines()
f.close()
return contents
#Gets the individual words and normalizes them (lowercase, no trailing or preceding punctuation)
def getWords(original):
#Iterate through each line
newlist = []
for i in original:
#Split the lines by spaces (a typical delimeter in English between words)
line = i.split(' ')
#Add the words in the line to the list.
newlist += line
#Clean up each word in the list, getting rid of . \n "" and ?
cleanlist = []
for i in newlist:
i = i.replace('\n','').replace('.','').replace('!','').replace('?','').replace('"','').replace(',','').replace('\'','')
#ensures than all words are lower case to ensure set is properly unique
i = i.lower()
cleanlist.append(i)
return cleanlist
#Casts any list to a set and returns result to main
def getUnique(array):
uniqueItems = set(array)
return uniqueItems
#Printing sets
def printSet(prompt,theSet):
print('{}:\n------------'.format(prompt))
array = list(theSet)
for i in sorted(array):
print(i)
#Function for comparing sets (intersection, difference, xor )
def compareSets(set1,set2):
print('The words common to both files are:')
displaySet = set1.intersection(set2)
for i in displaySet:
print(i)
print()
print('Uncommon words only in the first file:')
displaySet = set1.difference(set2)
for i in displaySet:
print(i)
print()
print('Uncommon words only in the second file:')
displaySet = set2.difference(set1)
for i in displaySet:
print(i)
print()
print('Uncommon in either file:')
displaySet = set1.symmetric_difference(set2)
for i in displaySet:
print(i)
print()
main() |
# package com.gwittit.client.facebook.entities
import java
from java import *
from com.google.gwt.core.client.JavaScriptObject import JavaScriptObject
class StreamFilter(JavaScriptObject):
"""
Facebook Stream Filter. Use this to filter stream
@see <a href="http://wiki.developers.facebook.com/index.php/Stream_filter_%28FQL%29"> Stream Filter </a>
@author olamar72
"""
@java.init
def __init__(self, *a, **kw):
pass
@java.protected
@__init__.register
@java.typed()
def __init__(self, ):
self.__init__._super()
@java.enum
class Type(java.Enum):
init = ["application", "newsfeed", "friendlist", "network", "publicprofile"]
@java.final
@java.native
def getUid(self):
"""
The ID of the user whose filters you are querying.
"""
@java.final
@java.native
def getFilterKey(self):
"""
A key identifying a particular filter for a user's stream. This filter is
useful to retrieve relevant items from the stream table.
"""
@java.final
@java.native
def getName(self):
"""
The name of the filter as it appears on the home page.
"""
@java.final
@java.native
def getRank(self):
"""
A 32-bit int that indicates where the filter appears in the sort.
"""
@java.final
@java.native
def getIconUrl(self):
"""
The URL to the filter icon. For applications, this is the same as your
application icon.
"""
@java.final
@java.native
def getIsVisible(self):
"""
If true, indicates that the filter is visible on the home page. If false,
the filter is hidden in the More link.
"""
@java.final
@java.native
def getType(self):
"""
The type of filter. One of application, newsfeed, friendlist, network, or
publicprofile.
"""
@java.final
def getTypeEnum(self):
"""
Get type as enum
"""
return Type.valueOf(self.getType())
# A 64-bit ID for the filter type.
@java.final
@java.native
def getValue(self):
pass
|
from pylab import *
x = [1, 2, 3, 4, 5]
y = [1.6, 1.6,1 , 1, 1.5]
width = [5, 2.5, 1.5, .75, .75]
for i in range(len(x)-1):
#plot(x[i:i+2], y[i:i+2], linewidth=width[i])
print x[i:i+3]
plot(x[i:i+5], y[i:i+5], linewidth=width[i])
show() |
#program for single user recommendation
import numpy as np
import pandas as pd
#import math
from contextlib import redirect_stdout
ratings = pd.read_csv('ratings1.csv')
#print(ratings.info)
#rm[a][b] is rating matrix in which a->movie_id & b->user_id
rm =np.ndarray(shape=(100000,944), dtype=float)
data = np.genfromtxt('ratings1.csv',delimiter=',')
print("rating matrix starts here")
for i in range(1,100000):
x=data[i][0]
y=data[i][1]
z=data[i][2]
b = int(x)
a = int(y)
rm[a][b]=z
#print(rm[a][b])
print("rating matrix ends here")
similarity = pd.read_csv('similarity1.csv')
sm=np.ndarray(shape=(84099,1682),dtype=float)
data1 =np.genfromtxt('similarity1.csv',delimiter = ',')
print("similarity matrix starts here")
for n in range(1,84099):
x1=data1[n][0]
y1=data1[n][1]
z1=data1[n][2]
s = int(x1)
t = int(y1)
sm[s][t]=z1
#print(sm[s][t])
print("Similarity matrix has been read and system is ready to predict now....")
#pred is the prediction matrix
#944 users and 50 movies are only considered
pred =np.ndarray(shape=(51), dtype=float)
th = 0.85
#threshhold value is assumed to be 0.85
u1 = input('Enter the user id: ')
temp=np.ndarray(shape=(51),dtype=float)
for i in range(1,51):
nr=0
dr=0
max=0
for j1 in range(1,51):
j = int(j1)
u = int(u1)
if (rm[j][u])!= 0:
new_nr = sm[i][j] * rm[j][u]
nr = nr + new_nr
dr = dr + sm[i][j]
pred[i]=nr/dr
#print(i)
#print(pred[i])
print("THE RECOMMENDED MOVIE LIST ACC. TO SINGLE LEVEL FILTERING IS : ")
fin=np.argsort(pred)[-10:]
#print(fin)
s=set(fin)
print(s)
|
# kind of an interface
class Plugin_Prototype():
def __init__(self):
self.module_name = "Plugin_Prototype"
self.module_version = "1.0"
def set_client(self, client):
'''
The Plugin needs the client which perfoms the actions
'''
self.client = client
def register_actions(self):
'''
Register the actions, on which the client should react
'''
pass
def setup(self):
'''
Setup the Things before running
'''
self._info()
def load(self):
'''
If you stored data, here you can load them
'''
pass
def save (self):
'''
If you want to store some data, do it here
'''
pass
async def clock(self):
'''
To run clocked events.
The Server Ping triggers this method
'''
pass
def _info(self):
print (f"Module Name: {self.module_name}\nModule Version {self.module_version}") |
num = float(input("학점을 입력하세요."))
if num == 4.5:
print("당신의 학점은 {}이며, 설명은 '신' 입니다.".format(num))
elif 4.2 <= num <= 4.49:
print("당신의 학점은 {}이며, 설명은 교수님의 사랑 입니다.".format(num))
elif 3.5 <= num <= 4.19:
print("당신의 학점은 {}이며, 설명은 현 체제의 수호자 입니다.".format(num))
elif 2.8 <= num <= 3.49:
print("당신의 학점은 {}이며, 설명은 일반인 입니다.".format(num))
elif 2.3 <= num <= 2.79:
print("당신의 학점은 {}이며, 설명은 일탈을 꿈꾸는 소시민 입니다.".format(num))
elif 1.75 <= num <= 2.29:
print("당신의 학점은 {}이며, 설명은 오락문화의 선구자 입니다.".format(num))
elif 1.0 <= num <= 1.74:
print("당신의 학점은 {}이며, 설명은 불가촉천민 입니다.".format(num))
elif 0.5 <= num <= 0.99:
print("당신의 학점은 {}이며, 설명은 자벌레 입니다.".format(num))
elif 0 <= num <= 0.49:
print("당신의 학점은 {}이며, 설명은 플랑크톤 입니다.".format(num))
else:
print("당신의 학점은 {}이며, 설명은 시대를 잘못 타고난 사람 입니다.".format(num)) |
class ZeroError(Exception):
def __init__(self, txt):
self.txt = txt
x = 100
while True:
try:
y = int(input(f'введите делитель для {x} : '))
if y == 0:
raise ZeroError("На отрицательное число делить нельзя!")
print(f"{x} разделить на {y} = {x / y}")
break
except ZeroError as e:
print(e)
|
from rest_framework.response import Response
from rest_framework import viewsets, status
from django.core.mail import send_mail
from main.settings import EMAIL_HOST_USER, EMAIL_LIST
from .models import Contact
from .serializers import ContactSerializer
class ContactViewSet(viewsets.ViewSet):
"""
A simple ViewSet for send emails
"""
def create(self, request):
serializer = ContactSerializer(data=request.data)
if serializer.is_valid():
contact = Contact(**serializer.validated_data)
contact.save()
mail_message = f"Name: {contact.name} \nEmail: {contact.email}\nSubject: {contact.subject}\nMessage: {contact.message}"
send_mail(
subject='Confirmation mail',
message='We received your message, we will contact you as soon as possible',
from_email=EMAIL_HOST_USER,
recipient_list=[contact.email],
fail_silently=False
)
send_mail(
subject='New Client',
message=mail_message,
from_email=EMAIL_HOST_USER,
recipient_list=EMAIL_LIST,
fail_silently=False
)
return Response(
serializer.validated_data, status=status.HTTP_201_CREATED
)
return Response({
'status': 'Bad request',
'message': 'email could not be created with received data.'
}, status=status.HTTP_400_BAD_REQUEST)
|
# Import the necessary modules for development
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support.expected_conditions import presence_of_element_located
import time
import sys
import pandas as pd
import csv
from selenium.webdriver.support import expected_conditions as EC
import selenium.webdriver.support.ui as UI
import unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver import ActionChains
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import StaleElementReferenceException
from selenium.webdriver.support import expected_conditions
# Invoke a new Chrome Instance
ff_driver = webdriver.Chrome()
# Blocking wait of 30 seconds in order to locate the element
ff_driver.implicitly_wait(30)
ff_driver.maximize_window()
# Open the Home Page
ff_driver.get("https://www.topuniversities.com/university-rankings/world-university-rankings/2021")
for i in range(1, 26):
buttons = ff_driver.find_elements_by_css_selector(f"#qs-rankings > tbody > tr:nth-child({i}) > td.uni > div > a.more")
# print(buttons)
for j in range(len(buttons)):
ff_driver.execute_script("arguments[0].click();", buttons[j])
ff_driver.implicitly_wait(10)
results = ff_driver.find_elements_by_class_name('criteria')
name = []
key = []
value = []
header = ff_driver.find_element_by_class_name('title')
for quote in results:
quoteArr = quote.text.split('\n')
for line in quoteArr:
if line is not None:
key.append(str(line).split(':')[0])
value.append(str(line).split(':')[1])
name.append(header)
else:
key.append('')
value.append('')
name.append(header)
data = {'name':name, 'key': key, 'value': value}
df = pd.DataFrame(data)
df.to_csv('test_output_new_3.csv', index = False)
print(quoteArr)
print()
with open(r'test_output_new_3.csv', 'a', newline='') as csvfile:
fieldnames = ['name','key', 'value']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writerow({'name':name, 'key':key, 'value': value})
ff_driver.implicitly_wait(10)
ff_driver.execute_script("window.history.go(-1)")
ff_driver.implicitly_wait(5)
ff_driver.close()
|
import math
class CandyFactory:
def __init__(self, m, w, p, n):
self.balance = 0
self.m = m
self.w = w
self.p = p
self.n = n
self.passes = 0
self.last_prod = self.m * self.w
self.curr_prod = self.m * self.w
def make(self, passes = 1):
self.last_prod = self.m * self.w
self.curr_prod = self.m * self.w
self.balance += self.curr_prod * passes
print('making +', self.curr_prod, ' $:', self.balance, 'in passes of:', passes)
def max_purchase_items(self, init_balanace = None):
if not init_balanace:
init_balanace = self.balance
return math.floor(init_balanace / self.p)
def buy_projection(self, init_balanace = None, limit = math.inf):
if not init_balanace:
init_balanace = self.balance
new_m = 0
new_w = 0
mppi = min(limit, self.max_purchase_items(init_balanace))
resources = self.m + self.w + mppi
half_resource = math.ceil(resources / 2)
if self.m > self.w:
new_m = max(self.m, half_resource)
new_w = resources - half_resource
else:
new_m = max(self.w, half_resource)
new_w = resources - half_resource
return mppi, new_m, new_w
def buy(self):
mppi, new_m, new_w = self.buy_projection()
self.balance -= mppi * self.p
print('buying m:', new_m - self.m)
print('buying w:', new_w - self.w)
self.m = new_m
self.w = new_w
self.curr_prod = self.m * self.w
def target_reached(self):
return (self.balance >= self.n)
def jump(self, passes):
print('jumping of', passes)
print('from $', self.balance)
self.make(passes)
self.pass_(passes)
print('to $', self.balance)
def strategy(self):
self.curr_prod = self.m * self.w
days_to_next_buy = math.ceil(max(self.p - self.balance, 0) / self.curr_prod)
if days_to_next_buy == 0:
return True
if days_to_next_buy > 1:
self.jump(days_to_next_buy)
return True
new_balanace = self.balance + days_to_next_buy * self.curr_prod
_, new_m, new_w = self.buy_projection(new_balanace)
next_prod = new_m * new_w
days_at_current_rate = math.ceil(self.n - self.balance / self.curr_prod)
days_at_next_rate = math.ceil((self.n - self.balance - next_prod) / next_prod) + days_to_next_buy
if days_at_current_rate > days_at_next_rate:
return False
else:
return True
def pass_(self, passes = 1):
self.passes += passes
def getPasses(self):
return self.passes
def __str__(self):
return f'candies:{self.balance}, m:{self.m}, w:{self.w}, lp:{self.last_prod}, cp:{self.curr_prod}, pass:{self.passes}'
def minimumPasses(m, w, p, n):
cf = CandyFactory(m, w, p, n)
while not cf.target_reached():
print(cf)
cf.make()
print(cf.target_reached())
if cf.target_reached():
cf.pass_()
break
if cf.strategy():
cf.buy()
cf.pass_()
print(cf)
print()
return cf.getPasses()
'''
#m: number of machines
#w: number of workers
#p: purchase or hire cost in candy
#n: candies to accumulate
days = 0
candies = 0
answer = math.ceil(n / (m * w))
run = math.inf
while days < n:
day = 0 if (m > math.inf / w) else (p - candies) / (m*w)
if step < 0:
mw = candy / p
if m >= w + mw:
w += mw
elif w >= m + mw:
m += mw
else:
total = m + w + mw
m = total /2
w = total -m
candy %= p
day = 1
days += day
if day * m > math.inf / w:
candies = math.inf
else:
candy += step * m * w
run = min(run, passes + math.ceil(n - candies) / (m * w))
return min(days, run)
'''
'''
# let's jump coupe of days to the point we have money to invest
if p > candies:
daysNeeded = math.ceil((p - candies) / (m * w))
candies += daysNeeded * m * w
days += daysNeeded
# diff between no of machines and workers
diff = abs(m - w)
# how many we could buy
available = candies // p
# how many we actually buy
purchased = min(diff, available)
if m < w:
# buy machines
m += purchased
else:
# buy workers
w += purchased
# how much is left
rest = available - purchased
m += rest // 2
w += rest - rest // 2
candies -= available * p
# generation
candies += m * w
days += 1
remainingCandies = max(n - candies, 0)
answer = min(answer, days + math.ceil(remainingCandies / (m * w)))
return answer
'''
#print(minimumPasses(3,1,2,12), ' expected: 3')
print(minimumPasses(1,1,6,45), ' expected: 16')
|
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth import get_user_model
from django.db.models import Q
User = get_user_model()
class CustomBackend(ModelBackend):
def authenticate(self, request, username=None, password=None, **kwargs):
try:
# 通过用户名或邮箱来获取用户对象
user = User.objects.get(
Q(username=username) |
Q(mobile = username)
)
# 验证用户的密码
if user.check_password(password):
return user
except Exception:
return None |
sum=0
a=5
while a>0:
num=int(raw_input("enter a integer number"))
sum=sum+num
a=a-1
print "average number of the user input",sum/6.0 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'D:\Pankaj\ABXpulse\REPO\SOURCE\SCRIPTS\PYTHON\LightNPray\lib\MayaAddon\inp_render_setting_ui.ui'
#
# Created: Mon Nov 01 19:26:27 2010
# by: PyQt4 UI code generator 4.4.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(447, 504)
self.gridLayout = QtGui.QGridLayout(Form)
self.gridLayout.setObjectName("gridLayout")
self.settingsTab = QtGui.QTabWidget(Form)
self.settingsTab.setObjectName("settingsTab")
self.projectGlobalsTab = QtGui.QWidget()
self.projectGlobalsTab.setObjectName("projectGlobalsTab")
self.gridLayout_21 = QtGui.QGridLayout(self.projectGlobalsTab)
self.gridLayout_21.setMargin(1)
self.gridLayout_21.setHorizontalSpacing(1)
self.gridLayout_21.setObjectName("gridLayout_21")
self.scrollArea = QtGui.QScrollArea(self.projectGlobalsTab)
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setObjectName("scrollArea")
self.scrollAreaWidgetContents = QtGui.QWidget(self.scrollArea)
self.scrollAreaWidgetContents.setGeometry(QtCore.QRect(0, 0, 419, 425))
self.scrollAreaWidgetContents.setObjectName("scrollAreaWidgetContents")
self.gridLayout_24 = QtGui.QGridLayout(self.scrollAreaWidgetContents)
self.gridLayout_24.setSpacing(2)
self.gridLayout_24.setObjectName("gridLayout_24")
self.groupBox_5 = QtGui.QGroupBox(self.scrollAreaWidgetContents)
self.groupBox_5.setObjectName("groupBox_5")
self.formLayout_6 = QtGui.QFormLayout(self.groupBox_5)
self.formLayout_6.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout_6.setContentsMargins(9, 9, -1, -1)
self.formLayout_6.setObjectName("formLayout_6")
self.label_15 = QtGui.QLabel(self.groupBox_5)
self.label_15.setMinimumSize(QtCore.QSize(75, 0))
self.label_15.setObjectName("label_15")
self.formLayout_6.setWidget(0, QtGui.QFormLayout.LabelRole, self.label_15)
self.label_17 = QtGui.QLabel(self.groupBox_5)
self.label_17.setMinimumSize(QtCore.QSize(75, 0))
self.label_17.setObjectName("label_17")
self.formLayout_6.setWidget(1, QtGui.QFormLayout.LabelRole, self.label_17)
self.label_18 = QtGui.QLabel(self.groupBox_5)
self.label_18.setMinimumSize(QtCore.QSize(75, 0))
self.label_18.setObjectName("label_18")
self.formLayout_6.setWidget(2, QtGui.QFormLayout.LabelRole, self.label_18)
self.resolutionPrjCB = QtGui.QComboBox(self.groupBox_5)
self.resolutionPrjCB.setObjectName("resolutionPrjCB")
self.formLayout_6.setWidget(0, QtGui.QFormLayout.FieldRole, self.resolutionPrjCB)
self.imageFormatCB = QtGui.QComboBox(self.groupBox_5)
self.imageFormatCB.setObjectName("imageFormatCB")
self.formLayout_6.setWidget(1, QtGui.QFormLayout.FieldRole, self.imageFormatCB)
self.imageDepthCB = QtGui.QComboBox(self.groupBox_5)
self.imageDepthCB.setObjectName("imageDepthCB")
self.formLayout_6.setWidget(2, QtGui.QFormLayout.FieldRole, self.imageDepthCB)
self.gridLayout_24.addWidget(self.groupBox_5, 2, 0, 1, 1)
self.groupBox_4 = QtGui.QGroupBox(self.scrollAreaWidgetContents)
self.groupBox_4.setObjectName("groupBox_4")
self.formLayout_4 = QtGui.QFormLayout(self.groupBox_4)
self.formLayout_4.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout_4.setObjectName("formLayout_4")
self.label_7 = QtGui.QLabel(self.groupBox_4)
self.label_7.setMinimumSize(QtCore.QSize(75, 0))
self.label_7.setObjectName("label_7")
self.formLayout_4.setWidget(0, QtGui.QFormLayout.LabelRole, self.label_7)
self.isStereoPrjChkB = QtGui.QCheckBox(self.groupBox_4)
self.isStereoPrjChkB.setObjectName("isStereoPrjChkB")
self.formLayout_4.setWidget(2, QtGui.QFormLayout.FieldRole, self.isStereoPrjChkB)
self.label_19 = QtGui.QLabel(self.groupBox_4)
self.label_19.setMinimumSize(QtCore.QSize(75, 0))
self.label_19.setObjectName("label_19")
self.formLayout_4.setWidget(1, QtGui.QFormLayout.LabelRole, self.label_19)
self.rendererCB = QtGui.QComboBox(self.groupBox_4)
self.rendererCB.setObjectName("rendererCB")
self.formLayout_4.setWidget(0, QtGui.QFormLayout.FieldRole, self.rendererCB)
self.renderEngineCB = QtGui.QComboBox(self.groupBox_4)
self.renderEngineCB.setObjectName("renderEngineCB")
self.formLayout_4.setWidget(1, QtGui.QFormLayout.FieldRole, self.renderEngineCB)
self.gridLayout_24.addWidget(self.groupBox_4, 1, 0, 1, 1)
self.groupBox_6 = QtGui.QGroupBox(self.scrollAreaWidgetContents)
self.groupBox_6.setObjectName("groupBox_6")
self.formLayout_5 = QtGui.QFormLayout(self.groupBox_6)
self.formLayout_5.setObjectName("formLayout_5")
self.label_20 = QtGui.QLabel(self.groupBox_6)
self.label_20.setMinimumSize(QtCore.QSize(75, 0))
self.label_20.setObjectName("label_20")
self.formLayout_5.setWidget(0, QtGui.QFormLayout.LabelRole, self.label_20)
self.label_21 = QtGui.QLabel(self.groupBox_6)
self.label_21.setMinimumSize(QtCore.QSize(75, 0))
self.label_21.setObjectName("label_21")
self.formLayout_5.setWidget(1, QtGui.QFormLayout.LabelRole, self.label_21)
self.deviceDSB = QtGui.QDoubleSpinBox(self.groupBox_6)
self.deviceDSB.setProperty("value", QtCore.QVariant(1.77))
self.deviceDSB.setObjectName("deviceDSB")
self.formLayout_5.setWidget(0, QtGui.QFormLayout.FieldRole, self.deviceDSB)
self.pixelDSB = QtGui.QDoubleSpinBox(self.groupBox_6)
self.pixelDSB.setProperty("value", QtCore.QVariant(1.0))
self.pixelDSB.setObjectName("pixelDSB")
self.formLayout_5.setWidget(1, QtGui.QFormLayout.FieldRole, self.pixelDSB)
self.gridLayout_24.addWidget(self.groupBox_6, 3, 0, 1, 1)
self.groupBox_7 = QtGui.QGroupBox(self.scrollAreaWidgetContents)
self.groupBox_7.setObjectName("groupBox_7")
self.formLayout_7 = QtGui.QFormLayout(self.groupBox_7)
self.formLayout_7.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout_7.setObjectName("formLayout_7")
self.label_9 = QtGui.QLabel(self.groupBox_7)
self.label_9.setMinimumSize(QtCore.QSize(75, 0))
self.label_9.setObjectName("label_9")
self.formLayout_7.setWidget(0, QtGui.QFormLayout.LabelRole, self.label_9)
self.mayaVersionLE = QtGui.QLineEdit(self.groupBox_7)
self.mayaVersionLE.setEnabled(False)
self.mayaVersionLE.setObjectName("mayaVersionLE")
self.formLayout_7.setWidget(0, QtGui.QFormLayout.FieldRole, self.mayaVersionLE)
self.gridLayout_24.addWidget(self.groupBox_7, 0, 0, 1, 1)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.gridLayout_24.addItem(spacerItem, 4, 0, 1, 1)
self.scrollArea.setWidget(self.scrollAreaWidgetContents)
self.gridLayout_21.addWidget(self.scrollArea, 0, 0, 1, 1)
self.settingsTab.addTab(self.projectGlobalsTab, "")
self.sceneSettingsTab = QtGui.QWidget()
self.sceneSettingsTab.setObjectName("sceneSettingsTab")
self.gridLayout_26 = QtGui.QGridLayout(self.sceneSettingsTab)
self.gridLayout_26.setMargin(1)
self.gridLayout_26.setHorizontalSpacing(1)
self.gridLayout_26.setObjectName("gridLayout_26")
self.scrollArea_2 = QtGui.QScrollArea(self.sceneSettingsTab)
self.scrollArea_2.setWidgetResizable(True)
self.scrollArea_2.setObjectName("scrollArea_2")
self.scrollAreaWidgetContents_2 = QtGui.QWidget(self.scrollArea_2)
self.scrollAreaWidgetContents_2.setGeometry(QtCore.QRect(0, 0, 403, 456))
self.scrollAreaWidgetContents_2.setObjectName("scrollAreaWidgetContents_2")
self.gridLayout_25 = QtGui.QGridLayout(self.scrollAreaWidgetContents_2)
self.gridLayout_25.setSpacing(2)
self.gridLayout_25.setObjectName("gridLayout_25")
self.groupBox_2 = QtGui.QGroupBox(self.scrollAreaWidgetContents_2)
self.groupBox_2.setObjectName("groupBox_2")
self.formLayout_2 = QtGui.QFormLayout(self.groupBox_2)
self.formLayout_2.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout_2.setObjectName("formLayout_2")
self.workFileLabel = QtGui.QLabel(self.groupBox_2)
self.workFileLabel.setMinimumSize(QtCore.QSize(75, 0))
self.workFileLabel.setObjectName("workFileLabel")
self.formLayout_2.setWidget(0, QtGui.QFormLayout.LabelRole, self.workFileLabel)
self.workFileLE = QtGui.QLineEdit(self.groupBox_2)
self.workFileLE.setEnabled(False)
self.workFileLE.setObjectName("workFileLE")
self.formLayout_2.setWidget(0, QtGui.QFormLayout.FieldRole, self.workFileLE)
self.renderTypeLabel = QtGui.QLabel(self.groupBox_2)
self.renderTypeLabel.setObjectName("renderTypeLabel")
self.formLayout_2.setWidget(1, QtGui.QFormLayout.LabelRole, self.renderTypeLabel)
self.renderTypeCB = QtGui.QComboBox(self.groupBox_2)
self.renderTypeCB.setObjectName("renderTypeCB")
self.formLayout_2.setWidget(1, QtGui.QFormLayout.FieldRole, self.renderTypeCB)
self.renderAsLabel = QtGui.QLabel(self.groupBox_2)
self.renderAsLabel.setObjectName("renderAsLabel")
self.formLayout_2.setWidget(2, QtGui.QFormLayout.LabelRole, self.renderAsLabel)
self.charBgCB = QtGui.QComboBox(self.groupBox_2)
self.charBgCB.setObjectName("charBgCB")
self.formLayout_2.setWidget(2, QtGui.QFormLayout.FieldRole, self.charBgCB)
self.gridLayout_25.addWidget(self.groupBox_2, 0, 0, 1, 1)
self.groupBox_3 = QtGui.QGroupBox(self.scrollAreaWidgetContents_2)
self.groupBox_3.setObjectName("groupBox_3")
self.formLayout_3 = QtGui.QFormLayout(self.groupBox_3)
self.formLayout_3.setObjectName("formLayout_3")
self.label_23 = QtGui.QLabel(self.groupBox_3)
self.label_23.setMinimumSize(QtCore.QSize(75, 0))
self.label_23.setObjectName("label_23")
self.formLayout_3.setWidget(1, QtGui.QFormLayout.LabelRole, self.label_23)
self.cameraCB = QtGui.QComboBox(self.groupBox_3)
self.cameraCB.setObjectName("cameraCB")
self.formLayout_3.setWidget(1, QtGui.QFormLayout.FieldRole, self.cameraCB)
self.isStereoChkB = QtGui.QCheckBox(self.groupBox_3)
self.isStereoChkB.setObjectName("isStereoChkB")
self.formLayout_3.setWidget(0, QtGui.QFormLayout.FieldRole, self.isStereoChkB)
self.gridLayout_25.addWidget(self.groupBox_3, 7, 0, 1, 1)
spacerItem1 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.gridLayout_25.addItem(spacerItem1, 9, 0, 1, 1)
self.groupBox = QtGui.QGroupBox(self.scrollAreaWidgetContents_2)
self.groupBox.setObjectName("groupBox")
self.formLayout = QtGui.QFormLayout(self.groupBox)
self.formLayout.setObjectName("formLayout")
self.isFrameRangeChkB = QtGui.QCheckBox(self.groupBox)
self.isFrameRangeChkB.setObjectName("isFrameRangeChkB")
self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.isFrameRangeChkB)
self.startFrameSB = QtGui.QSpinBox(self.groupBox)
self.startFrameSB.setMaximumSize(QtCore.QSize(80, 16777215))
self.startFrameSB.setMaximum(999999999)
self.startFrameSB.setProperty("value", QtCore.QVariant(1001))
self.startFrameSB.setObjectName("startFrameSB")
self.formLayout.setWidget(3, QtGui.QFormLayout.FieldRole, self.startFrameSB)
self.label_5 = QtGui.QLabel(self.groupBox)
self.label_5.setObjectName("label_5")
self.formLayout.setWidget(3, QtGui.QFormLayout.LabelRole, self.label_5)
self.endFrameSB = QtGui.QSpinBox(self.groupBox)
self.endFrameSB.setMaximumSize(QtCore.QSize(80, 16777215))
self.endFrameSB.setMaximum(999999999)
self.endFrameSB.setProperty("value", QtCore.QVariant(1101))
self.endFrameSB.setObjectName("endFrameSB")
self.formLayout.setWidget(4, QtGui.QFormLayout.FieldRole, self.endFrameSB)
self.label_22 = QtGui.QLabel(self.groupBox)
self.label_22.setObjectName("label_22")
self.formLayout.setWidget(4, QtGui.QFormLayout.LabelRole, self.label_22)
self.frameRangeLE = QtGui.QLineEdit(self.groupBox)
self.frameRangeLE.setEnabled(False)
self.frameRangeLE.setObjectName("frameRangeLE")
self.formLayout.setWidget(2, QtGui.QFormLayout.FieldRole, self.frameRangeLE)
self.label_6 = QtGui.QLabel(self.groupBox)
self.label_6.setMinimumSize(QtCore.QSize(75, 0))
self.label_6.setObjectName("label_6")
self.formLayout.setWidget(2, QtGui.QFormLayout.LabelRole, self.label_6)
self.gridLayout_25.addWidget(self.groupBox, 6, 0, 1, 1)
self.groupBox_8 = QtGui.QGroupBox(self.scrollAreaWidgetContents_2)
self.groupBox_8.setObjectName("groupBox_8")
self.formLayout_8 = QtGui.QFormLayout(self.groupBox_8)
self.formLayout_8.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout_8.setContentsMargins(9, 9, -1, -1)
self.formLayout_8.setObjectName("formLayout_8")
self.label_24 = QtGui.QLabel(self.groupBox_8)
self.label_24.setMinimumSize(QtCore.QSize(75, 0))
self.label_24.setObjectName("label_24")
self.formLayout_8.setWidget(0, QtGui.QFormLayout.LabelRole, self.label_24)
self.label_25 = QtGui.QLabel(self.groupBox_8)
self.label_25.setMinimumSize(QtCore.QSize(75, 0))
self.label_25.setObjectName("label_25")
self.formLayout_8.setWidget(1, QtGui.QFormLayout.LabelRole, self.label_25)
self.sceneResCB = QtGui.QComboBox(self.groupBox_8)
self.sceneResCB.setObjectName("sceneResCB")
self.formLayout_8.setWidget(0, QtGui.QFormLayout.FieldRole, self.sceneResCB)
self.percentRenderSB = QtGui.QSpinBox(self.groupBox_8)
self.percentRenderSB.setMinimum(10)
self.percentRenderSB.setMaximum(100)
self.percentRenderSB.setProperty("value", QtCore.QVariant(100))
self.percentRenderSB.setObjectName("percentRenderSB")
self.formLayout_8.setWidget(1, QtGui.QFormLayout.FieldRole, self.percentRenderSB)
self.curntRezLabel = QtGui.QLabel(self.groupBox_8)
self.curntRezLabel.setObjectName("curntRezLabel")
self.formLayout_8.setWidget(2, QtGui.QFormLayout.FieldRole, self.curntRezLabel)
self.gridLayout_25.addWidget(self.groupBox_8, 1, 0, 1, 1)
self.scrollArea_2.setWidget(self.scrollAreaWidgetContents_2)
self.gridLayout_26.addWidget(self.scrollArea_2, 0, 0, 1, 1)
self.settingsTab.addTab(self.sceneSettingsTab, "")
self.gridLayout.addWidget(self.settingsTab, 0, 0, 1, 3)
spacerItem2 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.gridLayout.addItem(spacerItem2, 1, 1, 1, 1)
self.renderButton = QtGui.QToolButton(Form)
self.renderButton.setMinimumSize(QtCore.QSize(70, 25))
self.renderButton.setMaximumSize(QtCore.QSize(16777215, 40))
self.renderButton.setObjectName("renderButton")
self.gridLayout.addWidget(self.renderButton, 1, 2, 1, 1)
self.updateFileButton = QtGui.QToolButton(Form)
self.updateFileButton.setMinimumSize(QtCore.QSize(70, 25))
self.updateFileButton.setMaximumSize(QtCore.QSize(16777215, 40))
self.updateFileButton.setObjectName("updateFileButton")
self.gridLayout.addWidget(self.updateFileButton, 1, 0, 1, 1)
self.retranslateUi(Form)
self.settingsTab.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Form", "Render Settings", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_5.setTitle(QtGui.QApplication.translate("Form", "Image", None, QtGui.QApplication.UnicodeUTF8))
self.label_15.setText(QtGui.QApplication.translate("Form", "Resolution", None, QtGui.QApplication.UnicodeUTF8))
self.label_17.setText(QtGui.QApplication.translate("Form", "Image Format", None, QtGui.QApplication.UnicodeUTF8))
self.label_18.setText(QtGui.QApplication.translate("Form", "Image Depth", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_4.setTitle(QtGui.QApplication.translate("Form", "Render", None, QtGui.QApplication.UnicodeUTF8))
self.label_7.setText(QtGui.QApplication.translate("Form", "Renderer", None, QtGui.QApplication.UnicodeUTF8))
self.isStereoPrjChkB.setText(QtGui.QApplication.translate("Form", "Stereo Render", None, QtGui.QApplication.UnicodeUTF8))
self.label_19.setText(QtGui.QApplication.translate("Form", "Render Engine", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_6.setTitle(QtGui.QApplication.translate("Form", "Aspect Ratio", None, QtGui.QApplication.UnicodeUTF8))
self.label_20.setText(QtGui.QApplication.translate("Form", "Device", None, QtGui.QApplication.UnicodeUTF8))
self.label_21.setText(QtGui.QApplication.translate("Form", "Pixel", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_7.setTitle(QtGui.QApplication.translate("Form", "Maya", None, QtGui.QApplication.UnicodeUTF8))
self.label_9.setText(QtGui.QApplication.translate("Form", "Version", None, QtGui.QApplication.UnicodeUTF8))
self.settingsTab.setTabText(self.settingsTab.indexOf(self.projectGlobalsTab), QtGui.QApplication.translate("Form", "Project Globals", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_2.setTitle(QtGui.QApplication.translate("Form", "Files", None, QtGui.QApplication.UnicodeUTF8))
self.workFileLabel.setText(QtGui.QApplication.translate("Form", "Work File", None, QtGui.QApplication.UnicodeUTF8))
self.renderTypeLabel.setText(QtGui.QApplication.translate("Form", "Render Type", None, QtGui.QApplication.UnicodeUTF8))
self.renderAsLabel.setText(QtGui.QApplication.translate("Form", "Render As", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_3.setTitle(QtGui.QApplication.translate("Form", "Camera", None, QtGui.QApplication.UnicodeUTF8))
self.label_23.setText(QtGui.QApplication.translate("Form", "Camera", None, QtGui.QApplication.UnicodeUTF8))
self.isStereoChkB.setText(QtGui.QApplication.translate("Form", "Stereo Render", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox.setTitle(QtGui.QApplication.translate("Form", "Frames", None, QtGui.QApplication.UnicodeUTF8))
self.isFrameRangeChkB.setText(QtGui.QApplication.translate("Form", "Frame Range (eg:- 1-10,15, 20-25)", None, QtGui.QApplication.UnicodeUTF8))
self.label_5.setText(QtGui.QApplication.translate("Form", "Start Frame", None, QtGui.QApplication.UnicodeUTF8))
self.label_22.setText(QtGui.QApplication.translate("Form", "End Frame", None, QtGui.QApplication.UnicodeUTF8))
self.label_6.setText(QtGui.QApplication.translate("Form", "Frames", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_8.setTitle(QtGui.QApplication.translate("Form", "Image", None, QtGui.QApplication.UnicodeUTF8))
self.label_24.setText(QtGui.QApplication.translate("Form", "Resolution", None, QtGui.QApplication.UnicodeUTF8))
self.label_25.setText(QtGui.QApplication.translate("Form", "Percentage render", None, QtGui.QApplication.UnicodeUTF8))
self.settingsTab.setTabText(self.settingsTab.indexOf(self.sceneSettingsTab), QtGui.QApplication.translate("Form", "Scene Settings", None, QtGui.QApplication.UnicodeUTF8))
self.renderButton.setText(QtGui.QApplication.translate("Form", "Launch", None, QtGui.QApplication.UnicodeUTF8))
self.updateFileButton.setText(QtGui.QApplication.translate("Form", "Update Scene File", None, QtGui.QApplication.UnicodeUTF8))
|
# Generated by Django 2.1.7 on 2019-03-23 13:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('homedetail', '0012_auto_20190320_1438'),
]
operations = [
migrations.AlterField(
model_name='data',
name='Detail',
field=models.TextField(blank=True, help_text='Enter a brief description of the data', max_length=1000, null=True),
),
]
|
import sys
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
from . import session
class Test_Ipwd(session.make_sessions_mixin([('otherrods', 'rods')], []), unittest.TestCase):
def setUp(self):
super(Test_Ipwd, self).setUp()
self.admin = self.admin_sessions[0]
def tearDown(self):
super(Test_Ipwd, self).tearDown()
def test_collection_with_backslash_in_name_does_not_cause_problems__issue_4060(self):
col = 'issue_4060_test\\folder'
self.admin.assert_icommand(['imkdir', col])
self.admin.assert_icommand('ils', 'STDOUT', col)
self.admin.assert_icommand(['icd', col])
self.admin.assert_icommand('ipwd', 'STDOUT', col)
self.admin.assert_icommand('ils', 'STDOUT', col)
|
import urllib.error
import urllib.request
import scrapy
class DailyHighLowPerChanceOfRain(scrapy.Spider):
name = "DailyHighLowPerChanceOfRain"
def start_requests(self):
start_url = 'https://tds.scigw.unidata.ucar.edu/thredds/catalog/grib/NCEP/NDFD/NWS/CONUS/NOAAPORT/latest.html'
yield scrapy.Request(start_url, callback=self.parse, meta={'start_url': start_url})
def parse(self, response):
start_url = response.meta['start_url']
link = response.css('tr td a::attr(href)').get()
url = start_url.replace('latest.html', str(link))
yield scrapy.Request(url, callback=self.download_data)
def download_data(self, response):
base_url = 'https://tds.scigw.unidata.ucar.edu'
dl_links = response.css('html body ol li a::attr(href)').getall()
for link in dl_links:
if 'fileServer' in link:
download_link = base_url + link
try:
urllib.request.urlretrieve(download_link, '../../../output/DHLPCoR_data.grb2')
except urllib.error.URLError:
print('Dataset download link not found.')
|
# /ticket/<id>/vote/<user> --> payload karte
# /ticket/<id>/close --> kein voten mehr möglich
# /ticket/<id>
# @app.route('/ticket')
# def user():
# return '{"status":"user"}' |
def heapify(arr, size, index):
largest = index
left = 2 * index + 1
right = 2 * index + 2
if left < size and arr[left] > arr[index]:
largest = left
if right < size and arr[right] > arr[index]:
largest = right
if largest != index:
arr[index], arr[largest] = arr[largest], arr[index]
print("Swapped")
heapify(arr, size, largest)
return arr
def insert(arr, el):
if len(arr) == 0:
arr.append(el)
elif len(arr) > 0:
arr.append(el)
for i in range((len(arr)//2)-1, -1, -1):
heapify(arr, len(arr), i)
def search(arr, el):
if len(arr) > 0:
for i in range(len(arr)):
if arr[i] == el:
return i
return -1
def delete(arr, el):
index = search(arr, el)
if index==-1:
return False
arr[index], arr[len(arr)-1] = arr[len(arr)-1], arr[index]
arr.remove(el)
for i in range((len(arr)//2)-1, -1, -1):
heapify(arr, len(arr), i)
# arr1 = [3, 9, 2, 1, 4, 5]
# for i in range(len(arr1), -1, -1):
# arr1 = heapify(arr1, len(arr1), i)
arr1 = []
insert(arr1, 3)
insert(arr1, 4)
insert(arr1, 9)
insert(arr1, 5)
insert(arr1, 2)
print(arr1)
delete(arr1, 4)
print(arr1)
|
import os
import django.core.handlers.wsgi
os.environ['DJANGO_SETTINGS_MODULE'] = 'example_client.settings'
application = django.core.handlers.wsgi.WSGIHandler()
|
from heapq import heapify, heappush, heappop
class Solution:
# @param A : integer
# @param B : list of integers
# @return a list of integers
def solve(self, K, A):
ans = []
minheap = []
heapify(minheap)
for i, num in enumerate(A):
if len(minheap) < K:
heappush(minheap, num)
else:
top = heappop(minheap)
if num > top:
heappush(minheap, num)
else:
heappush(minheap, top)
if i<K-1:
ans.append(-1)
else:
top = heappop(minheap)
ans.append(top)
heappush(minheap, top)
return ans
if __name__ == '__main__':
print(Solution().solve(4, [1,2,3,4,5,6])) |
import os
import sys
os.system('mspaint')
print('结束进程才能执行')
print(os.path)
print(sys.path)
os.system('mspaint C:\\Users\\GaoAolei\\Desktop\\1.png')
os.system('dir d:\\pycharmtest')
print('%x' %512) # 计算512的十六进制
|
import numpy as np
IMG_PX_SIZE = 80
HM_SLICES = 16
LR = 1e-3
MODEL_NAME = 'boldvst1w-{}-{}.model.tflearn'.format(LR, '2conv')
train_data = np.load('muchdata-80-80-16.npy')
import tflearn
from tflearn.layers.conv import conv_2d, max_pool_2d
from tflearn.layers.core import input_data, dropout, fully_connected
from tflearn.layers.estimator import regression
convnet = input_data(shape=[None,IMG_PX_SIZE,IMG_PX_SIZE,1], name='input')
convnet = conv_2d(convnet, 32, 2, activation='relu')
convnet = max_pool_2d(convnet,2)
convnet = conv_2d(convnet, 64, 2, activation='relu')
convnet = max_pool_2d(convnet,2)
convnet = conv_2d(convnet, 32, 2, activation='relu')
convnet = max_pool_2d(convnet,2)
convnet = conv_2d(convnet, 64, 2, activation='relu')
convnet = max_pool_2d(convnet,2)
convnet = fully_connected(convnet, 1024, activation='relu')
convnet = dropout(convnet, 0.8)
convnet = fully_connected(convnet, 2, activation='softmax')
convnet = regression(convnet, optimizer='adam', learning_rate=LR,loss='categorical_crossentropy', name='targets')
model = tflearn.DNN(convnet, tensorboard_dir='log')
train = train_data[:-400]
test = train_data[-400:]
X = np.array([i[0] for i in train]).reshape(-1, IMG_PX_SIZE,IMG_PX_SIZE, 1)
Y = [i[1] for i in train]
test_x = np.array([i[0] for i in test]).reshape(-1, IMG_PX_SIZE,IMG_PX_SIZE, 1)
test_y = [i[1] for i in test]
model.fit({'input':X},{'targets': Y}, n_epoch=3, validation_set=({'input':test_x},{'targets':test_y}),
snapshot_step=500, show_metric=True, run_id=MODEL_NAME)
model.save('model.tflearn')
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 4 16:41:40 2019
@author: z5075710, The Minh Tran
"""
#Need to validate using isinstance(x, int)/(y, str)
from Order import Order
class Staff:
def __init__(self, username = None, password = None):
self._order = []
self._orderReady = []
self._username = username
self._password = password
#Functions involving username
@property
def username(self):
print('Getting username') #For testing purposes, omment this line of code out once done
return self._username
@username.setter
def username(self, username):
print('Setting username') #For testing purposes, omment this line of code out once done
self._username = username
#Functions involving password
@property
def password(self):
print('Getting password') #For testing purposes, omment this line of code out once done
return self._password
@password.setter
def password(self, password):
print('Setting password') #For testing purposes, omment this line of code out once done
self._password = password
#Functions involving order
@property
def order(self):
print('Getting order list') #For testing purposes, omment this line of code out once done
return self._order
def addOrder(self, order):
print('Adding order ... \n') #For testing purposes, omment this line of code out once done
self._order.append(order)
@order.deleter
def deleteOrder(self, order):
if order in self._order:
print('Deleting order') #For testing purposes, omment this line of code out once done
self._order.remove(order)
else:
print('No order exists')
@property
def viewOrder(self):
print(self._order[0])
def viewAllOrder(self):
for o in self._order:
print(o)
def setStatus(self, orderIndex, status = 'Ready'):
print('Setting status ...\n')
self._order[orderIndex].setStatus(status)
self._orderReady.append(self._order[orderIndex])
self._order.remove(self._order[orderIndex])
def checkStatus(self, orderID):
print('Checking status ... \n')
if any(self._orderReady) is True:
for o in range(0,len(self._orderReady)):
if orderID is self._orderReady[o].orderID:
print(f'{self._orderReady[o].status}')
else:
print('Not ready')
else:
print('Not ready')
|
# -*- coding: utf-8 -*-
import sys
from PyQt4 import QtCore, QtGui, uic
import pwd, grp
import pickle, operator
from os.path import expanduser
import os
idstart = 1000
form_class = uic.loadUiType("ui/main.ui")[0] # Load the main UI
dial1 = uic.loadUiType("ui/dial1.ui")[0] # Load the UI for profile managing
assign = uic.loadUiType("ui/assign.ui")[0] # Load the UI for profile assign
stats = uic.loadUiType("ui/stats.ui")[0] # Load the UI for profile assign
class StatsForm(QtGui.QDialog, stats):
def __init__(self, parent=None):
global idstart
QtGui.QWidget.__init__(self, parent)
self.setupUi(self)
self.data = parent.data
self.parent = parent
stats_text = ""
for p in pwd.getpwall():
if p[2] >= idstart:
home = p[5]
try:
user_stats = pickle.load(open(home+"/.tk_stats.p", "rb"))
stats_text = stats_text + p[0] + "\n"
for a in user_stats:
if ('date' in user_stats[a]):
stats_text = stats_text + user_stats[a]['date'] + " - " + str(user_stats[a]['time']) + " minutes" + "\n"
stats_text = stats_text + "\n"
except:
pass
self.textEdit.setText(stats_text)
class AssignForm(QtGui.QDialog, assign):
def __init__(self, parent=None, assign_type = None, uuid = 0):
QtGui.QWidget.__init__(self, parent)
self.setupUi(self)
self.data = parent.data
self.assign_type = assign_type
self.uuid = int(uuid)
self.parent = parent
#print self.assign_type, uuid
for i in self.data['profiles']:
self.comboBox.insertItem(int(self.data['profiles'][i]['id']),self.data['profiles'][i]['id']+"|"+self.data['profiles'][i]['name'])
######################################################################################
# Accept assigning profile
def accept(self):
profile_id = int(self.comboBox.currentText().split("|")[0])
if (self.assign_type == "user"):
self.data['user_profiles'][self.uuid] = profile_id
# For groups
if (self.assign_type == "group"):
self.data['group_profiles'][self.uuid] = profile_id
self.parent.distribute_configs()
self.close()
class MyForm(QtGui.QDialog, dial1):
def __init__(self, parent=None, id_profile = None):
QtGui.QWidget.__init__(self, parent)
self.setupUi(self)
self.data = parent.data
#print parent.data['profiles']
self.id_profile = id_profile
#print self.id_profile
#QtCore.QObject.connect(self.ui.pushButton, QtCore.SIGNAL('clicked()'), self.popup)
if (id_profile):
self.load_data()
######################################################################################
# Combobox settings in dialog
def set_combobox(self,combo,val):
val = str(val)
index = combo.findText(val)
if(index != -1):
combo.setCurrentIndex(index)
######################################################################################
# Load profile data in dialog
def load_data(self):
self.name.setText(self.data['profiles'][self.id_profile]['name'])
#self.set_combobox(self.no_type,self.data['profiles'][self.id_profile]['not'])
#self.set_combobox(self.no_interval,self.data['profiles'][self.id_profile]['noi'])
#self.set_combobox(self.logout_type,self.data['profiles'][self.id_profile]['lot'])
self.set_combobox(self.SocialNetworks,self.data['profiles'][self.id_profile]['SN'])
self.set_combobox(self.HideProfanity,self.data['profiles'][self.id_profile]['HP'])
self.set_combobox(self.AdultContent,self.data['profiles'][self.id_profile]['AC'])
self.set_combobox(self.SafeSearch,self.data['profiles'][self.id_profile]['SS'])
self.BlackList.setText(self.data['profiles'][self.id_profile]['BL'])
self.WhiteList.setText(self.data['profiles'][self.id_profile]['WL'])
sett = self.data['profiles'][self.id_profile]['settings']
self.su_banned.setText(sett[1]['banned'])
self.mo_banned.setText(sett[2]['banned'])
self.tu_banned.setText(sett[3]['banned'])
self.we_banned.setText(sett[4]['banned'])
self.th_banned.setText(sett[5]['banned'])
self.fr_banned.setText(sett[6]['banned'])
self.sa_banned.setText(sett[7]['banned'])
for a in range(7):
index = a + 1
if (index == 1):
comboH = self.su_hours
comboM = self.su_mins
elif (index == 2):
comboH = self.mo_hours
comboM = self.mo_mins
elif (index == 3):
comboH = self.tu_hours
comboM = self.tu_mins
elif (index == 4):
comboH = self.we_hours
comboM = self.we_mins
elif (index == 5):
comboH = self.th_hours
comboM = self.th_mins
elif (index == 6):
comboH = self.fr_hours
comboM = self.fr_mins
elif (index == 7):
comboH = self.sa_hours
comboM = self.sa_mins
if(sett[index]['time'] != "None"):
self.set_combobox(comboH, int(sett[index]['time']) / 60)
self.set_combobox(comboM, int(sett[index]['time']) % 60)
######################################################################################
# Create new profile with data model
def new_profile(self,new_id):
name = self.name.text()
SN = str(self.SocialNetworks.currentText())
HP = str(self.HideProfanity.currentText())
AC = str(self.AdultContent.currentText())
SS = str(self.SafeSearch.currentText())
BL = str(self.BlackList.toPlainText())
WL = str(self.WhiteList.toPlainText())
su_banned = self.su_banned.text()
mo_banned = self.mo_banned.text()
tu_banned = self.tu_banned.text()
we_banned = self.we_banned.text()
th_banned = self.th_banned.text()
fr_banned = self.fr_banned.text()
sa_banned = self.sa_banned.text()
su_time = str(int(self.su_hours.currentText())*60+int(self.su_mins.currentText()))
mo_time = str(int(self.mo_hours.currentText())*60+int(self.mo_mins.currentText()))
tu_time = str(int(self.tu_hours.currentText())*60+int(self.tu_mins.currentText()))
we_time = str(int(self.we_hours.currentText())*60+int(self.we_mins.currentText()))
th_time = str(int(self.th_hours.currentText())*60+int(self.th_mins.currentText()))
fr_time = str(int(self.fr_hours.currentText())*60+int(self.fr_mins.currentText()))
sa_time = str(int(self.sa_hours.currentText())*60+int(self.sa_mins.currentText()))
print SN
# Profile data model
self.data['profiles'][new_id] = {"id" : str(new_id), "name" : name, "SN" : SN, "HP" : HP, "AC" : AC, "SS" : SS, "WL" : WL, "BL" : BL ,"settings" : {
1 : {"banned" : su_banned, "time" : su_time},
2 : {"banned" : mo_banned, "time" : mo_time},
3 : {"banned" : tu_banned, "time" : tu_time},
4 : {"banned" : we_banned, "time" : we_time},
5 : {"banned" : th_banned, "time" : th_time},
6 : {"banned" : fr_banned, "time" : fr_time},
7 : {"banned" : sa_banned, "time" : sa_time}
}}
######################################################################################
# Save settings for existing profile or create new one
def accept(self):
if not (self.id_profile):
new_id = max(self.data['profiles'].iteritems(), key=operator.itemgetter(0))[0]+1
# print "Adding new profile with ID:",new_id
self.new_profile(new_id)
else:
# print "Editing profile with ID:",self.id_profile
self.new_profile(self.id_profile)
self.close()
class MyWindowClass(QtGui.QMainWindow, form_class):
def __init__(self, parent=None):
self.data = self.reload_data()
QtGui.QMainWindow.__init__(self, parent)
self.setupUi(self)
self.actionQuit.triggered.connect(self.quit)
#self.actionStatistics.triggered.connect(self.show_stats)
self.btn_profiles_edit.clicked.connect(self.edit_profiles)
self.btn_profiles_add.clicked.connect(self.add_profiles)
self.btn_profiles_delete.clicked.connect(self.delete_profiles)
self.btn_uprofiles_assign.clicked.connect(self.edit_user_profiles)
self.btn_uprofiles_delete.clicked.connect(self.delete_user_profiles)
self.btn_gprofiles_assign.clicked.connect(self.edit_group_profiles)
self.btn_gprofiles_delete.clicked.connect(self.delete_group_profiles)
self._dialog = None
self.fill_profiles()
self.fill_user_profiles()
self.fill_group_profiles()
######################################################################################
# Reload settings with 2 test profiles
def reload_data(self):
all_data = {'profiles' :
{5 :
{"id" : "5", "name" : "Employee", "SN" : "Allowed", "HP" : "Yes", "AC" : "Allowed", "SS" : "Allowed", "WL" : "", "BL" : "", "settings" : {
1 : {"banned" : "1230-1630", "time" : "60"},
2 : {"banned" : "1330-1730", "time" : "50"},
3 : {"banned" : "1430-2355", "time" : "40"},
4 : {"banned" : "1530-2355", "time" : "30"},
5 : {"banned" : "1630-2030", "time" : "20"},
6 : {"banned" : "1730-2130", "time" : "10"},
7 : {"banned" : "1830-2230", "time" : "10"}
}},
6 :
{"id" : "6", "name" : "Childrens", "SN" : "Allowed", "HP" : "Yes", "AC" : "Allowed", "SS" : "Allowed", "WL" : "", "BL" : "", "settings" : {
1 : {"banned" : "1230-1630", "time" : "80"},
2 : {"banned" : "1330-1730", "time" : "70"},
3 : {"banned" : "1430-1830", "time" : "60"},
4 : {"banned" : "1530-1930", "time" : "50"},
5 : {"banned" : "1630-2030", "time" : "40"},
6 : {"banned" : "1730-2130", "time" : "30"},
7 : {"banned" : "1830-2230", "time" : "20"}
}}
},
'user_profiles' :
{},
'group_profiles' :
{}
}
# Only for testing
#pickle.dump(all_data, open("config.p", "wb"))
return pickle.load(open("config.p", "rb"))
######################################################################################
# Redistribute all config files
def distribute_configs(self):
global idstart
for p in pwd.getpwall():
if p[2] >= idstart:
home = p[5]
# If user has set profile id, then copy settings
if (int(p[2]) in self.data['user_profiles']):
profile_id = self.data['user_profiles'][int(p[2])]
profile_settings = self.data['profiles'][profile_id]
try:
pickle.dump(profile_settings, open(home+"/.btw_settings.p", "wb"))
except:
pass
elif (int(p[3]) in self.data['group_profiles']):
profile_id = self.data['group_profiles'][int(p[3])]
profile_settings = self.data['profiles'][profile_id]
try:
pickle.dump(profile_settings, open(home+"/.btw_settings.p", "wb"))
except:
pass
else:
try:
pickle.dump({}, open(home+"/.btw_settings.p", "wb"))
except:
pass
try:
os.chown(home+"/.btw_settings.p", p[2], p[3])
except:
pass
######################################################################################
# Saves main app pickle file
def save_data(self):
pickle.dump( self.data, open( "config.p", "wb" ) )
#return pickle.load( open( "config.p", "rb" ) )
######################################################################################
# Filling profile table with data
def fill_profiles(self):
# Read all filters from PICKEL and show
self.table_profiles.setRowCount(0)
entries = []
for a in self.data['profiles']:
entries.append((self.data['profiles'][a]['id'],self.data['profiles'][a]['name']))
self.table_profiles.setRowCount(len(entries))
self.table_profiles.setColumnCount(len(entries[0]))
for i, row in enumerate(entries):
for j, col in enumerate(row):
item = QtGui.QTableWidgetItem(col)
self.table_profiles.setItem(i, j, item)
######################################################################################
# Edit existing profile for time managemant
def edit_profiles(self):
prof_row = self.table_profiles.currentIndex().row()
# If profile is selected in table
if (prof_row >= 0):
pass
# print "Editing profile ID:",self.table_profiles.item(prof_row,0).text()
else:
return
id_prof = int(self.table_profiles.item(prof_row,0).text())
myapp= MyForm(self,id_prof)
myapp.exec_()
self.save_data()
self.fill_profiles()
self.distribute_configs()
######################################################################################
# Delete profiles - recursively with all settings
def delete_profiles(self):
prof_row = self.table_profiles.currentIndex().row()
# If profile is selected in table
if (prof_row >= 0):
pass
# print "Delete profile ID:",self.table_profiles.item(prof_row,0).text()
else:
return
pid = int(self.table_profiles.item(prof_row,0).text())
del self.data['profiles'][pid]
self.save_data()
self.fill_profiles()
self.distribute_configs()
######################################################################################
# Add new profile
def add_profiles(self):
myapp= MyForm(self,None)
myapp.exec_()
self.save_data()
self.fill_profiles()
######################################################################################
# Filling profile table with data
def fill_user_profiles(self):
global idstart
# Find all users and by user ID read pickle and assign
self.table_user_profiles.setRowCount(0)
entries = []
for p in pwd.getpwall():
if p[2] >= idstart:
# If user has set profile id, then find name
user_profile_name = ""
group_profile_name = ""
effective_profile_name = ""
if (int(p[2]) in self.data['user_profiles']):
user_profile_id = self.data['user_profiles'][int(p[2])]
user_profile_name = self.data['profiles'][user_profile_id]['name']
effective_profile_name = user_profile_name
if (int(p[3]) in self.data['group_profiles']):
group_profile_id = self.data['group_profiles'][int(p[3])]
group_profile_name = self.data['profiles'][group_profile_id]['name']
if (effective_profile_name == ""):
effective_profile_name = group_profile_name
entries.append((str(p[2]),p[0],user_profile_name, group_profile_name, effective_profile_name))
# Read all filters from PICKEL and show
self.table_user_profiles.setRowCount(len(entries))
self.table_user_profiles.setColumnCount(len(entries[0]))
for i, row in enumerate(entries):
for j, col in enumerate(row):
item = QtGui.QTableWidgetItem(col)
self.table_user_profiles.setItem(i, j, item)
#####################################################################################
# Edit existin profile for time managemant
def edit_user_profiles(self):
prof_row = self.table_user_profiles.currentIndex().row()
# If profile is selected in table
if (prof_row >= 0):
pass
# print "Editing user filter settings:",self.table_user_profiles.item(prof_row,0).text()
else:
return
myapp= AssignForm(self,"user",self.table_user_profiles.item(prof_row,0).text())
myapp.exec_()
self.save_data()
self.fill_user_profiles()
self.distribute_configs()
#####################################################################################
# Delete profiles - recursively with all settings
def delete_user_profiles(self):
prof_row = self.table_user_profiles.currentIndex().row()
# If profile is selected in table
if (prof_row >= 0):
pass
# print "Delete user filter settings:",self.table_user_profiles.item(prof_row,0).text()
else:
return
pid = int(self.table_user_profiles.item(prof_row,0).text())
del self.data['user_profiles'][pid]
self.save_data()
self.fill_user_profiles()
self.distribute_configs()
#####################################################################################
# Filling profile table with data
def fill_group_profiles(self):
global idstart
# Find all groups and by group ID read pickle and assign
self.table_group_profiles.setRowCount(0)
entries = []
for p in pwd.getpwall():
if p[2] >= idstart:
# If group has set profile id, then find name
group_profile_name = ""
if (int(p[2]) in self.data['group_profiles']):
group_profile_id = self.data['group_profiles'][int(p[2])]
group_profile_name = self.data['profiles'][group_profile_id]['name']
entries.append((str(p[2]),p[0],group_profile_name))
# Read all filters from PICKEL and show
self.table_group_profiles.setRowCount(len(entries))
self.table_group_profiles.setColumnCount(len(entries[0]))
for i, row in enumerate(entries):
for j, col in enumerate(row):
item = QtGui.QTableWidgetItem(col)
self.table_group_profiles.setItem(i, j, item)
#####################################################################################
# Edit existing profile for time managemant
def edit_group_profiles(self):
prof_row = self.table_group_profiles.currentIndex().row()
# If profile is selected in table
if (prof_row >= 0):
pass
# print "Editing group filter settings:",self.table_group_profiles.item(prof_row,0).text()
else:
return
myapp= AssignForm(self,"group",self.table_group_profiles.item(prof_row,0).text())
myapp.exec_()
self.save_data()
self.fill_group_profiles()
self.fill_user_profiles()
self.distribute_configs()
#####################################################################################
# Delete profiles - recursively with all settings
def delete_group_profiles(self):
prof_row = self.table_group_profiles.currentIndex().row()
# If profile is selected in table
if (prof_row >= 0):
pass
# print "Delete group filter settings:",self.table_group_profiles.item(prof_row,0).text()
else:
return
pid = int(self.table_group_profiles.item(prof_row,0).text())
del self.data['group_profiles'][pid]
self.save_data()
self.fill_group_profiles()
self.fill_user_profiles()
self.distribute_configs()
#####################################################################################
def quit(self):
print "exit"
sys.exit()
def show_stats(self):
myapp= StatsForm(self)
myapp.exec_()
def show_dial1(self):
myapp= MyForm(self)
myapp.show()
app = QtGui.QApplication(sys.argv)
myWindow = MyWindowClass(None)
myWindow.show()
app.exec_()
|
#!/usr/bin/env python3
""" represents a gated recurrent unit:"""
import numpy as np
class GRUCell:
""" represents a gated recurrent unit:"""
def __init__(self, i, h, o):
"""
-i is the dimensionality of the data
-h is the dimensionality of the hidden state
-o is the dimensionality of the outputs
- Wz, Wr, Wh, Wy, bz, br, bh, by : weights and biases of the cell
-Wz and bz are for the update gate
-Wr and br are for the reset gate
-Wh and bh are for the intermediate hidden state
-Wy and by are for the output
"""
self.Wz = np.random.normal(size=(i+h, h))
self.Wr = np.random.normal(size=(i+h, h))
self.Wh = np.random.normal(size=(i+h, h))
self.Wy = np.random.normal(size=(h, o))
self.bz = np.zeros((1, h))
self.br = np.zeros((1, h))
self.bh = np.zeros((1, h))
self.by = np.zeros((1, o))
def softmax(self, z):
""" softmax function"""
t = np.exp(z - np.max(z))
sf = t / t.sum(axis=1, keepdims=True)
return sf
def sigmoid(self, x_t):
""" sigmoid function"""
return 1/(1+np.exp(-x_t))
def forward(self, h_prev, x_t):
"""
ARGS:
-x_t: {numpy.ndarray} shape (m, i) : data input for the cell
-m is the batche size for the data
-h_prev: {numpy.ndarray} shape (m, h)
containing the previous hidden state
Returns: h_next, y
-h_next is the next hidden state
-y is the output of the cell
"""
# GRU Layer
sigmoid_input = np.concatenate((h_prev, x_t), axis=1)
zt = self.sigmoid(np.dot(sigmoid_input, self.Wz) + self.bz)
rt = self.sigmoid(np.dot(sigmoid_input, self.Wr) + self.br)
tanh_input = np.concatenate((rt * h_prev, x_t), axis=1)
ht = np.tanh(np.dot(tanh_input, self.Wh) + self.bh)
h_next = h_prev * (1 - zt) + zt * ht
# Final output calculation
y = self.softmax(np.dot(h_next, self.Wy) + self.by)
return h_next, y
|
# Copyright 2023 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from unittest import TestCase
from coursedashboards.models import Term, StudentMajor
class TestStudentMajors(TestCase):
def setUp(self):
# create terms
self.winter2016 = Term()
self.winter2016.quarter = 'winter'
self.winter2016.year = 2016
self.autumn2017 = Term()
self.autumn2017.quarter = 'autumn'
self.autumn2017.year = 2017
self.first_major = StudentMajor()
self.first_major.term = self.winter2016
self.second_major = StudentMajor()
self.second_major.term = self.autumn2017
def test_term_sort(self):
sorted_majors = sorted([self.second_major, self.first_major])
|
'''
tests for geojson outputter
'''
import os
from glob import glob
from datetime import timedelta
import numpy as np
import pytest
from gnome.outputters import TrajectoryGeoJsonOutput
from gnome.spill import SpatialRelease, Spill, point_line_release_spill
from gnome.basic_types import oil_status
from gnome.environment import constant_wind, Water
from gnome.weatherers import Evaporation
from gnome.spill.elements import floating
from ..conftest import sample_model, sample_model_weathering, test_oil
@pytest.fixture(scope='function')
def model(sample_model, output_dir):
model = sample_model_weathering(sample_model, test_oil)
rel_start_pos = sample_model['release_start_pos']
rel_end_pos = sample_model['release_end_pos']
model.cache_enabled = True
model.uncertain = True
water, wind = Water(), constant_wind(1., 0)
model.environment += [water, wind]
model.weatherers += Evaporation(water, wind)
et = model.spills[0].element_type
N = 10 # a line of ten points
line_pos = np.zeros((N, 3), dtype=np.float64)
line_pos[:, 0] = np.linspace(rel_start_pos[0], rel_end_pos[0], N)
line_pos[:, 1] = np.linspace(rel_start_pos[1], rel_end_pos[1], N)
# print start_points
model.spills += point_line_release_spill(1,
start_position=rel_start_pos,
release_time=model.start_time,
end_position=rel_end_pos,
element_type=et,
amount=100,
units='tons')
model.outputters += TrajectoryGeoJsonOutput(output_dir=output_dir)
model.rewind()
return model
def test_init():
'simple initialization passes'
g = TrajectoryGeoJsonOutput()
assert g.output_dir is None
assert g.round_to == 4
assert g.round_data
def test_clean_output_files(model, output_dir):
'test geojson outputter with a model since simplest to do that'
model.rewind()
model.full_run()
files = glob(os.path.join(output_dir, '*.geojson'))
print files
assert len(files) == model.num_time_steps
model.outputters[-1].clean_output_files()
files = glob(os.path.join(output_dir, '*.geojson'))
print files
assert len(files) == 0
@pytest.mark.slow
@pytest.mark.parametrize("output_ts_factor", [1, 2, 2.4])
def test_write_output_post_run(model, output_ts_factor, output_dir):
model.rewind()
o_geojson = model.outputters[-1]
o_geojson.output_timestep = timedelta(seconds=model.time_step *
output_ts_factor)
del model.outputters[-1]
model.full_run()
# purge the output
# note: there are two outputter on the model -- not sure why
# so still one after removing this one so need to clear output dir
o_geojson.clean_output_files()
files = glob(os.path.join(output_dir, '*.geojson'))
assert len(files) == 0
o_geojson.write_output_post_run(model.start_time,
model.num_time_steps,
cache=model._cache,
spills=model.spills)
files = glob(os.path.join(output_dir, '*.geojson'))
assert len(files) == int((model.num_time_steps-2)/output_ts_factor) + 2
o_geojson.output_timestep = None
model.outputters += o_geojson
def test_geojson_multipoint_output(model):
'test geojson outputter with a model since simplest to do that'
# default is to round data
odir = model.outputters[-1].output_dir
model.outputters[-1].output_dir = None
model.rewind()
round_to = model.outputters[0].round_to
for step in model:
uncertain_fc = step['TrajectoryGeoJsonOutput']['uncertain']['features']
certain_fc = step['TrajectoryGeoJsonOutput']['certain']['features']
fc = uncertain_fc + certain_fc
assert 'output_filename' not in step['TrajectoryGeoJsonOutput']
for feature in fc:
if feature['properties']['sc_type'] == 'uncertain':
uncertain = True
else:
uncertain = False
assert 'mass' in feature['properties']
assert feature['properties']['mass'] > 0
assert 'spill_num' in feature['properties']
assert type(feature['properties']['spill_num']) is int
mask = np.where(model.spills.LE('status_codes', uncertain) ==
feature['properties']['status_code'])
np.allclose(model.spills.LE('positions', uncertain)[mask, :2],
feature['geometry']['coordinates'],
atol=10 ** -round_to)
model.outputters[-1].output_dir = odir
|
import os
def count(dir='.', counter=0):
"returns number of files in dir and subdirs"
for pack in os.walk(dir):
for f in pack[2]:
counter += 1
return "No of files in '" + dir + "' : " + str(counter) + " files"
#print(count(".")) |
from __future__ import absolute_import
from django.db import IntegrityError, transaction
from django.utils import timezone
from rest_framework import serializers
from rest_framework.response import Response
from uuid import uuid4
from sentry.api.base import DocSection
from sentry.api.bases.project import ProjectEndpoint
from sentry.api.serializers import serialize, ProjectUserReportSerializer
from sentry.api.paginator import DateTimePaginator
from sentry.models import (Event, EventMapping, EventUser, Group, GroupStatus, UserReport)
from sentry.utils.apidocs import scenario, attach_scenarios
@scenario('CreateUserFeedback')
def create_user_feedback_scenario(runner):
with runner.isolated_project('Plain Proxy') as project:
runner.request(
method='POST',
path='/projects/{}/{}/user-feedback/'.format(runner.org.slug, project.slug),
data={
'name': 'Jane Smith',
'email': 'jane@example.com',
'comments': 'It broke!',
'event_id': uuid4().hex,
}
)
class UserReportSerializer(serializers.ModelSerializer):
class Meta:
model = UserReport
fields = ('name', 'email', 'comments', 'event_id')
class ProjectUserReportsEndpoint(ProjectEndpoint):
doc_section = DocSection.PROJECTS
def get(self, request, project):
"""
List a Project's User Feedback
``````````````````````````````
Return a list of user feedback items within this project.
:pparam string organization_slug: the slug of the organization.
:pparam string project_slug: the slug of the project.
:auth: required
"""
queryset = UserReport.objects.filter(
project=project,
group__isnull=False,
).select_related('group')
status = request.GET.get('status', 'unresolved')
if status == 'unresolved':
queryset = queryset.filter(
group__status=GroupStatus.UNRESOLVED,
)
elif status:
return Response({'status': 'Invalid status choice'}, status=400)
return self.paginate(
request=request,
queryset=queryset,
order_by='-date_added',
on_results=lambda x: serialize(x, request.user, ProjectUserReportSerializer()),
paginator_cls=DateTimePaginator,
)
@attach_scenarios([create_user_feedback_scenario])
def post(self, request, project):
"""
Submit User Feedback
````````````````````
Submit and associate user feedback with an issue.
:pparam string organization_slug: the slug of the organization.
:pparam string project_slug: the slug of the project.
:auth: required
:param string event_id: the event ID
:param string name: user's name
:param string email: user's email address
:param string comments: comments supplied by user
"""
serializer = UserReportSerializer(data=request.DATA)
if not serializer.is_valid():
return Response(serializer.errors, status=400)
report = serializer.object
report.project = project
# TODO(dcramer): we should probably create the user if they dont
# exist, and ideally we'd also associate that with the event
euser = self.find_event_user(report)
if euser and not euser.name and report.name:
euser.update(name=report.name)
if euser:
report.event_user_id = euser.id
try:
mapping = EventMapping.objects.get(
event_id=report.event_id,
project_id=project.id,
)
except EventMapping.DoesNotExist:
# XXX(dcramer): the system should fill this in later
pass
else:
report.group = Group.objects.get(id=mapping.group_id)
try:
with transaction.atomic():
report.save()
except IntegrityError:
# There was a duplicate, so just overwrite the existing
# row with the new one. The only way this ever happens is
# if someone is messing around with the API, or doing
# something wrong with the SDK, but this behavior is
# more reasonable than just hard erroring and is more
# expected.
existing_report = UserReport.objects.get(
project=report.project,
event_id=report.event_id,
)
existing_report.update(
name=report.name,
email=report.email,
comments=report.comments,
date_added=timezone.now(),
event_user_id=euser.id if euser else None,
)
report = existing_report
return Response(serialize(report, request.user, ProjectUserReportSerializer()))
def find_event_user(self, report):
try:
event = Event.objects.get(
group_id=report.group_id,
event_id=report.event_id,
)
except Event.DoesNotExist:
if not report.email:
return None
try:
return EventUser.objects.filter(
project_id=report.project_id,
email=report.email,
)[0]
except IndexError:
return None
tag = event.get_tag('sentry:user')
if not tag:
return None
try:
return EventUser.for_tags(
project_id=report.project_id,
values=[tag],
)[tag]
except KeyError:
pass
|
import numpy
from src.SphereHandModel.utils import xyz_uvd
from scipy.spatial.distance import cdist
# from src.SphereHandModel.ShowSamples import *
def cost_function(setname,DepthImg, inSpheres, Center, SilhouetteDistImg,SubPixelNum):
# if setname=='mega':
# Center = [320,240]
# if setname=='icvl':
# Center = [160,120]
numShpere=inSpheres.shape[1]
totalSphere = inSpheres.shape[0]*inSpheres.shape[1]
Spheres=inSpheres.reshape(inSpheres.shape[0]*inSpheres.shape[1],5).T
uvd = xyz_uvd.convert_depth_to_uvd(DepthImg)
xyz = xyz_uvd.uvd2xyz(setname=setname,uvd=uvd)
# points = xyz.reshape(xyz.shape[0],xyz.shape[1], 3)
Gx = xyz[:,:,0]
Gy = xyz[:,:,1]
Gz = xyz[:,:,2]
PixelInd = numpy.where( DepthImg > 0 )
# print 'num of hand points', PixelInd[0].shape[0]
if PixelInd[0].shape[0]<SubPixelNum:
SubPixelNum = PixelInd[0].shape[0]-10
tmp = numpy.random.randint(0,PixelInd[0].shape[0],SubPixelNum)
SubPixelInd =(PixelInd[0][tmp],PixelInd[1][tmp])
# SubD = numpy.zeros(1, SubPixelNum)
# DepthDiscrepancyThreshold = 10
Locates = numpy.empty((SubPixelNum,3))
Locates[:,0]=Gx[SubPixelInd]
Locates[:,1]=Gy[SubPixelInd]
Locates[:,2]=Gz[SubPixelInd]
# ShowPointCloud(points0=Locates,hand_points=Spheres[1:4,SLICE_IDX_SKE_FROM_SPHERE].T,Sphere=Spheres.T)
Dists = numpy.abs(cdist(Locates, Spheres[1:4, :].T) -Spheres[4,:].reshape(1,totalSphere))
# Dists = numpy.abs(cdist(Locates, Spheres[1:4, :].T) - numpy.ones((SubPixelNum, Spheres.shape[-1]))*Spheres[4,:].T)
# SubD = Dists.T
SubD = numpy.min(Dists.T,axis=0)
# print SubD.shape
B = numpy.zeros((1, totalSphere))
sphereUVD = xyz_uvd.xyz2uvd(setname=setname,xyz=Spheres[1:4,:].T)
u = numpy.asarray(numpy.round(sphereUVD[:,0]),dtype='int16')
v = numpy.asarray(numpy.round(sphereUVD[:,1]),dtype='int16')
# % check whether u or v is out of the range
if (max(u) >= Center[0]*2) or (min(u) <= 0) or (max(v) >= Center[1]*2) or (min(v) <= 0):
B = 1000 * numpy.ones((1,totalSphere))
else:
DepthProj = DepthImg[(v,u)]
DepthSphere = sphereUVD[:,2]
# % Find the valid projected point
ValidSpheresProjInd = numpy.where(DepthProj>0)
InValidSpheresProjInd = numpy.where(0 == DepthProj)
# templength = ValidSpheresProjInd[0].shape[0]
temp1 = DepthProj[ValidSpheresProjInd] - DepthSphere[ValidSpheresProjInd]
temp1[numpy.where(temp1<0)]=0
# temp2 = numpy.max([numpy.zeros((templength,)), temp1],axis=0)
# %B(ValidSpheresProjInd) = min([DepthDiscrepancyThreshold*ones(1,templength) temp2])
B[:,ValidSpheresProjInd]= temp1
invalidVU = (v[InValidSpheresProjInd],u[InValidSpheresProjInd])
B[:,InValidSpheresProjInd] = SilhouetteDistImg[invalidVU]
# print 'InValidSpheresProjInd',InValidSpheresProjInd, SilhouetteDistImg[invalidVU]
# B[InValidSpheresProjInd] = SilhouetteDistImg(indices(InValidSpheresProjInd))
L_Concise = numpy.zeros((7,3*numShpere,3*numShpere))
THUMB_SHPERE=range(2*numShpere,5*numShpere,1)
INDEX_SPHERE=range(6*numShpere,9*numShpere,1)
MIDDLE_SPHERE=range(10*numShpere,13*numShpere,1)
RING_SPHERE=range(14*numShpere,17*numShpere,1)
PINKY_SPHERE=range(18*numShpere,21*numShpere,1)
Thumb_Shperes = Spheres[:,THUMB_SHPERE]
Index_Spheres = Spheres[:,INDEX_SPHERE]
Middle_Spheres = Spheres[:,MIDDLE_SPHERE]
Ring_Spheres = Spheres[:,RING_SPHERE]
Small_Spheres = Spheres[:,PINKY_SPHERE]
L_Concise[0] = cdist(Index_Spheres[-1,:].T.reshape(3*numShpere,1), - Middle_Spheres[-1,:].T.reshape(3*numShpere,1)) \
- cdist(Index_Spheres[1:4,:].T, Middle_Spheres[1:4,:].T)
L_Concise[1] = cdist(Middle_Spheres[-1,:].T.reshape(3*numShpere,1), - Ring_Spheres[-1,:].T.reshape(3*numShpere,1)) \
- cdist(Middle_Spheres[1:4,:].T, Ring_Spheres[1:4,:].T)
L_Concise[2] = cdist(Ring_Spheres[-1,:].T.reshape(3*numShpere,1), - Small_Spheres[-1,:].T.reshape(3*numShpere,1)) \
- cdist(Ring_Spheres[1:4,:].T, Small_Spheres[1:4,:].T)
L_Concise[3] = cdist(Thumb_Shperes[-1,:].T.reshape(3*numShpere,1), - Middle_Spheres[-1,:].T.reshape(3*numShpere,1)) \
- cdist(Index_Spheres[1:4,:].T, Middle_Spheres[1:4,:].T)
L_Concise[4] = cdist(Thumb_Shperes[-1,:].T.reshape(3*numShpere,1), - Ring_Spheres[-1,:].T.reshape(3*numShpere,1)) \
- cdist(Middle_Spheres[1:4,:].T, Ring_Spheres[1:4,:].T)
L_Concise[5] = cdist(Thumb_Shperes[-1,:].T.reshape(3*numShpere,1), - Small_Spheres[-1,:].T.reshape(3*numShpere,1)) \
- cdist(Ring_Spheres[1:4,:].T, Small_Spheres[1:4,:].T)
L_Concise[6] = cdist(Thumb_Shperes[-1,:].T.reshape(3*numShpere,1), - Index_Spheres[-1,:].T.reshape(3*numShpere,1)) \
- cdist(Ring_Spheres[1:4,:].T, Small_Spheres[1:4,:].T)
L_Concise[numpy.where(L_Concise<0)]=0
# print 'conflict', numpy.where(L_Concise>0)
# L_Concise = numpy.max(L_Concise, numpy.zeros_like(L_Concise))
Lambda = 1.0 / SubPixelNum
term1 = Lambda*numpy.sum(SubD**2)
term2 = numpy.sum(B**2)/totalSphere
term3=numpy.sum(L_Concise**2)/7/numShpere/3
Cost = term1 +term2 + term3
# Cost = Lambda * numpy.sum(SubD**2) + numpy.sum(B**2) + numpy.sum(L_Concise**2)
# print 'cost',Cost,'term 1, 2, 3',term1,term2,term3
return Cost,term1,term2 ,term3
|
import FWCore.ParameterSet.Config as cms
displacedMuons = cms.EDProducer("MuonProducer",
ActivateDebug = cms.untracked.bool(False),
InputMuons = cms.InputTag("displacedMuons1stStep"),
FillPFMomentumAndAssociation = cms.bool(True),
PFCandidates = cms.InputTag("particleFlowTmp"),
FillTimingInfo = cms.bool(True),
FillDetectorBasedIsolation = cms.bool(True),
EcalIsoDeposits = cms.InputTag("muIsoDepositCalByAssociatorTowersDisplaced","ecal"),
HcalIsoDeposits = cms.InputTag("muIsoDepositCalByAssociatorTowersDisplaced","hcal"),
HoIsoDeposits = cms.InputTag("muIsoDepositCalByAssociatorTowersDisplaced","ho"),
TrackIsoDeposits = cms.InputTag("muIsoDepositTkDisplaced"),
JetIsoDeposits = cms.InputTag("muIsoDepositJetsDisplaced"),
FillPFIsolation = cms.bool(True),
PFIsolation = cms.PSet(
pfIsolationR03 = cms.PSet(chargedParticle = cms.InputTag("dispMuPFIsoValueChargedAll03"),
chargedHadron = cms.InputTag("dispMuPFIsoValueCharged03"),
neutralHadron = cms.InputTag("dispMuPFIsoValueNeutral03"),
neutralHadronHighThreshold = cms.InputTag("dispMuPFIsoValueNeutralHighThreshold03"),
photon = cms.InputTag("dispMuPFIsoValueGamma03"),
photonHighThreshold = cms.InputTag("dispMuPFIsoValueGammaHighThreshold03"),
pu = cms.InputTag("dispMuPFIsoValuePU03")),
pfIsolationR04 = cms.PSet(chargedParticle = cms.InputTag("dispMuPFIsoValueChargedAll04"),
chargedHadron = cms.InputTag("dispMuPFIsoValueCharged04"),
neutralHadron = cms.InputTag("dispMuPFIsoValueNeutral04"),
neutralHadronHighThreshold = cms.InputTag("dispMuPFIsoValueNeutralHighThreshold04"),
photon = cms.InputTag("dispMuPFIsoValueGamma04"),
photonHighThreshold = cms.InputTag("dispMuPFIsoValueGammaHighThreshold04"),
pu = cms.InputTag("dispMuPFIsoValuePU04")),
pfIsoMeanDRProfileR03 = cms.PSet(chargedParticle = cms.InputTag("dispMuPFMeanDRIsoValueChargedAll03"),
chargedHadron = cms.InputTag("dispMuPFMeanDRIsoValueCharged03"),
neutralHadron = cms.InputTag("dispMuPFMeanDRIsoValueNeutral03"),
neutralHadronHighThreshold = cms.InputTag("dispMuPFMeanDRIsoValueNeutralHighThreshold03"),
photon = cms.InputTag("dispMuPFMeanDRIsoValueGamma03"),
photonHighThreshold = cms.InputTag("dispMuPFMeanDRIsoValueGammaHighThreshold03"),
pu = cms.InputTag("dispMuPFMeanDRIsoValuePU03")),
pfIsoMeanDRProfileR04 = cms.PSet(chargedParticle = cms.InputTag("dispMuPFMeanDRIsoValueChargedAll04"),
chargedHadron = cms.InputTag("dispMuPFMeanDRIsoValueCharged04"),
neutralHadron = cms.InputTag("dispMuPFMeanDRIsoValueNeutral04"),
neutralHadronHighThreshold = cms.InputTag("dispMuPFMeanDRIsoValueNeutralHighThreshold04"),
photon = cms.InputTag("dispMuPFMeanDRIsoValueGamma04"),
photonHighThreshold = cms.InputTag("dispMuPFMeanDRIsoValueGammaHighThreshold04"),
pu = cms.InputTag("dispMuPFMeanDRIsoValuePU04")),
pfIsoSumDRProfileR03 = cms.PSet(chargedParticle = cms.InputTag("dispMuPFSumDRIsoValueChargedAll03"),
chargedHadron = cms.InputTag("dispMuPFSumDRIsoValueCharged03"),
neutralHadron = cms.InputTag("dispMuPFSumDRIsoValueNeutral03"),
neutralHadronHighThreshold = cms.InputTag("dispMuPFSumDRIsoValueNeutralHighThreshold03"),
photon = cms.InputTag("dispMuPFSumDRIsoValueGamma03"),
photonHighThreshold = cms.InputTag("dispMuPFSumDRIsoValueGammaHighThreshold03"),
pu = cms.InputTag("dispMuPFSumDRIsoValuePU03")),
pfIsoSumDRProfileR04 = cms.PSet(chargedParticle = cms.InputTag("dispMuPFSumDRIsoValueChargedAll04"),
chargedHadron = cms.InputTag("dispMuPFSumDRIsoValueCharged04"),
neutralHadron = cms.InputTag("dispMuPFSumDRIsoValueNeutral04"),
neutralHadronHighThreshold = cms.InputTag("dispMuPFSumDRIsoValueNeutralHighThreshold04"),
photon = cms.InputTag("dispMuPFSumDRIsoValueGamma04"),
photonHighThreshold = cms.InputTag("dispMuPFSumDRIsoValueGammaHighThreshold04"),
pu = cms.InputTag("dispMuPFSumDRIsoValuePU04"))
),
FillSelectorMaps = cms.bool(False),
SelectorMaps = cms.VInputTag(cms.InputTag("muidTMLastStationOptimizedLowPtLoose"),
cms.InputTag("muidTMLastStationOptimizedLowPtTight"),
cms.InputTag("muidTM2DCompatibilityLoose"),
cms.InputTag("muidTM2DCompatibilityTight"),
cms.InputTag("muidTrackerMuonArbitrated"),
cms.InputTag("muidTMLastStationAngLoose"),
cms.InputTag("muidGlobalMuonPromptTight"),
cms.InputTag("muidGMStaChiCompatibility"),
cms.InputTag("muidTMLastStationAngTight"),
cms.InputTag("muidGMTkChiCompatibility"),
cms.InputTag("muidTMOneStationAngTight"),
cms.InputTag("muidTMOneStationAngLoose"),
cms.InputTag("muidTMLastStationLoose"),
cms.InputTag("muidTMLastStationTight"),
cms.InputTag("muidTMOneStationTight"),
cms.InputTag("muidTMOneStationLoose"),
cms.InputTag("muidAllArbitrated"),
cms.InputTag("muidGMTkKinkTight"),
cms.InputTag("muidRPCMuLoose")
),
FillShoweringInfo = cms.bool(False),
ShowerInfoMap = cms.InputTag("muonShowerInformation"),
FillCosmicsIdMap = cms.bool(False),
CosmicIdMap = cms.InputTag("cosmicsVeto"),
ComputeStandardSelectors = cms.bool(True),
vertices = cms.InputTag("offlinePrimaryVertices")
)
# not commisoned and not relevant in FastSim (?):
from Configuration.Eras.Modifier_fastSim_cff import fastSim
fastSim.toModify(displacedMuons, FillCosmicsIdMap = False, FillSelectorMaps = False)
|
import graphene
from graphene_django import DjangoObjectType
from .models import Room
class RoomType(DjangoObjectType):
# Dynamic fields
# https://docs.graphene-python.org/en/latest/types/objecttypes/#resolver-parameters
is_fav = graphene.Boolean()
class Meta:
model = Room
# 공식문서에 따르면, 필드와 resolver를 같이 추가 해줘야한다.
def resolve_is_fav(parent, info):
# parent -> 7375 Carolyn Crossing (room)
# info -> <graphql.execution.base.ResolveInfo object at 0x10c701c40>
# print(parent, info)
user = info.context.user
if user.is_authenticated:
return parent in user.favs.all()
return False
class RoomListResponse(graphene.ObjectType):
arr = graphene.List(RoomType)
total = graphene.Int() |
from __future__ import annotations
from typing import Literal
from prettyqt import statemachine
from prettyqt.utils import bidict
ChildModeStr = Literal["exclusive", "parallel"]
CHILD_MODE: bidict[ChildModeStr, statemachine.QState.ChildMode] = bidict(
exclusive=statemachine.QState.ChildMode.ExclusiveStates,
parallel=statemachine.QState.ChildMode.ParallelStates,
)
RestorePolicyStr = Literal["dont_restore", "restore"]
RESTORE_POLICY: bidict[RestorePolicyStr, statemachine.QState.RestorePolicy] = bidict(
dont_restore=statemachine.QState.RestorePolicy.DontRestoreProperties,
restore=statemachine.QState.RestorePolicy.RestoreProperties,
)
class StateMixin(statemachine.AbstractStateMixin):
def set_child_mode(self, mode: ChildModeStr | statemachine.QState.ChildMode):
"""Set child mode to use.
Args:
mode: child mode to use
"""
self.setChildMode(CHILD_MODE.get_enum_value(mode))
def get_child_mode(self) -> ChildModeStr:
"""Return current child mode.
Returns:
child mode
"""
return CHILD_MODE.inverse[self.childMode()]
class State(StateMixin, statemachine.QState):
pass
if __name__ == "__main__":
reg = State()
|
from flask import Flask, request, jsonify, make_response, json
from flask_jwt_extended import (
JWTManager, create_access_token, create_refresh_token, jwt_required,
get_jwt_identity, jwt_refresh_token_required, get_raw_jwt)
from app.api.v2.models.user import UserClass as user
from app.api.errorHandler.user_validation import ValidateUser as validate
from app.database.userQuery import UserQueries as userquery
class Users():
def post(self):
if not request.json:
return validate().validate_user_json_format()
userjson = request.get_json(force=True)
firstname = userjson["firstname"]
lastname = userjson["lastname"]
othername = userjson["othername"]
email = userjson["email"]
phoneNumber = userjson["phoneNumber"]
passportUrl = userjson["passportUrl"]
isAdmin = userjson["isAdmin"]
if validate().validate_user_data(firstname, lastname, othername, email,
phoneNumber, passportUrl):
return validate().validate_user_data(firstname, lastname,
othername, email, phoneNumber,
passportUrl, isAdmin)
restp = userquery().create_user(firstname, lastname, othername, email,
phoneNumber, passportUrl, True)
access_token = create_access_token(identity=restp['id'], fresh=True)
refresh_token = create_refresh_token(identity=restp['id'])
user_list = {
"firstname": restp['firstname'],
"lastname": restp["lastname"],
"othername": restp["othername"],
"email": restp["email"],
"phoneNumber": restp["phoneNumber"],
"passportUrl": restp["passportUrl"],
"isAdmin": restp["isAdmin"],
"access-token": access_token,
"refresh-token": refresh_token
}
return make_response(
jsonify({"status": 201}, {"data": user_list}), 201)
def signin(self):
if not request.json:
return validate().validate_user_json_format()
userjson = request.get_json(force=True)
firstname = userjson["firstname"]
phonenumber = userjson["phoneNumber"]
restp = userquery().user_login(firstname, phonenumber)
if restp['msg'] == "Failed":
return make_response(
jsonify({"status": 201}, {
"error":
"Login failed,Either firstname or phonenumber is wrong"
}), 401)
else:
access_token = create_access_token(
identity=restp['id'], fresh=True)
refresh_token = create_refresh_token(identity=restp['id'])
user_list = {
"firstname": restp['firstname'],
"lastname": restp["lastname"],
"othername": restp["othername"],
"email": restp["email"],
"phoneNumber": restp["phoneNumber"],
"passportUrl": restp["passportUrl"],
"isAdmin": restp["isAdmin"],
"access-token": access_token,
"refresh-token": refresh_token
}
return make_response(
jsonify({"status": 201}, {"data": user_list}), 201)
|
from app import app, celery
from app.tasks import retrieve_page, boilerpipe_extract_and_populate
from flask import render_template, flash
@app.route('/')
def welcome():
return render_template('crawler.html')
@app.route('/initiate_crawl')
def initiate_crawl():
retrieve_page.delay(app.config['SEED_URL'])
flash("Crawl: Initiate... (seed: %s)"%(app.config['SEED_URL']))
return render_template('crawler.html')
@app.route('/pause_crawl')
def pause_crawl():
celery.control.cancel_consumer("retrieve")
flash("Crawl: Pause ...")
return render_template('crawler.html')
@app.route('/resume_crawl')
def resume_crawl():
celery.control.add_consumer("retrieve")
flash("Crawl: Resume ...")
return render_template('crawler.html')
@app.route('/shutdown_crawl')
def shutdown_crawl():
celery.control.broadcast('shutdown')
celery.control.purge()
flash("Crawl: Shutdown ...")
return render_template('crawler.html')
@app.route('/start_bp_extract')
def start_bp_extract():
boilerpipe_extract_and_populate.delay()
flash("Boilerpipe: Extraction ...")
return render_template('crawler.html')
@app.route('/start_jt_extract')
def start_jt_extract():
flash("jusText: Extraction ...")
return render_template('crawler.html') |
import serial
import time
import sys
# Stolen from http://stackoverflow.com/questions/472977/binary-data-with-pyserialpython-serial-port
def a2s(arr):
""" Array of integer byte values --> binary string
"""
return ''.join(chr(b) for b in arr)
# @param block string
def print_response(block, n=0):
if n == 0:
n = len(block)
byte_array = [c for c in block]
i = 0
for i in range(0, n):
print "Byte %d: %s" % (i, hex(ord(block[i])))
class Camera(object):
def __init__(self, port):
self.ser = serial.Serial()
self.ser.port = port
self.ser.baudrate = 57600
self.ser.timeout = 1
self.DEBUG = True
try:
self.ser.open()
except serial.SerialException, e:
sys.stderr.write("Could not open serial port %s: %s\n" % (self.ser.portstr, e))
sys.exit(1)
def check_response(self, response, expected):
pass
def reset(self):
cmd = [0x56, 0x00, 0x26, 0x00]
data = a2s(cmd)
self.ser.write(data)
response = self.ser.read(4)
sys.stderr.write('DEBUG: reset returned:\n')
print_response(response)
# Read serial data until we see 'Init end', see LinkSprite manual, pg 7
# TODO timeout
buffer = [''] * 8 # Pre-allocate to fit 'Init end'
while ''.join(buffer) != 'Init end':
for i in range(len(buffer)-1):
buffer[i] = buffer[i+1]
buffer[7] = self.ser.read()
print 'DEBUG: %s seen' % ('Init end')
# grab the CR and LF tacked on the end so that they don't interface
# with anything else. Could just have a flush fucntion.
self.ser.read(2)
# Manual says we must sleep for 2-3 seconds now
print "Sleeping for 3 seconds..."
time.sleep(3)
def start_image(self):
cmd = [0x56, 0x00, 0x36, 0x01, 0x00]
data = a2s(cmd)
self.ser.write(data)
# Read the response from camera.
response = self.ser.read(5)
expected = [0x76, 0x00, 0x36, 0x00, 0x00]
return (0 if [ord(r) for r in response] == expected else -1)
def stop_image(self):
cmd = [0x56, 0x00, 0x36, 0x01, 0x03]
data = a2s(cmd)
self.ser.write(data)
# Read the response from camera.
response = self.ser.read(5)
expected = [0x76, 0x00, 0x36, 0x00, 0x00]
return (0 if [ord(r) for r in response] == expected else -1)
def get_filesize(self):
""" Get filesize of image. Can only be called after start_image()
@return filesize in bytes
"""
cmd = [0x56, 0x00, 0x34, 0x01, 0x00]
data = a2s(cmd)
self.ser.write(data)
x = self.ser.read(7)
msb = self.ser.read()
msb = ord(msb)
lsb = self.ser.read()
lsb = ord(lsb)
# Convert to filsize in bytes:
filesize = (msb << 8) + lsb
return filesize
def set_baud(self):
pass
def get_image(self, filesize):
chunksize = 16
address = 0
image_buffer = ''
while (address < filesize):
image_buffer = self.get_block(address, chunksize, image_buffer)
address += chunksize
return image_buffer
def get_block(self, address, chunksize, image_buffer):
interval = 0x0A # Put here out of reach intentionally.
cmd = [0x56, 0x00, 0x32, 0x0C, 0x00, 0x0A, 0x00, 0x00,
0x00, # MH
0x00, # ML
0x00, 0x00,
0x00, # KH
0x00, # KL
0x00, # XH
0x00,]
cmd[8] = (address >> 8) & 0xFF
cmd[9] = (address >> 0) & 0xFF
cmd[12] = (chunksize >> 8) & 0xFF
cmd[13] = (chunksize >> 0) & 0xFF
cmd[14] = (interval >> 8) & 0xFF
cmd[15] = (interval >> 0) & 0xFF
data = a2s(cmd)
self.ser.write(data)
# Get header
header = self.ser.read(5)
#if header != [0x76, 0x00, 0x32, 0x00, 0x00]:
# print 'incorrect header: '
# print header
#print_response(header)
# quit()
if self.DEBUG:
print 'header: '
print_response(header)
# Get data
data = self.ser.read(chunksize)
if self.DEBUG:
print 'data: '
print_response(data)
# Get footer
footer = self.ser.read(5)
if self.DEBUG:
print 'footer: '
print_response(footer)
#if footer != [0x76, 0x00, 0x32, 0x00, 0x00]:
# print 'incorrect footer: '
# print_response(footer)
# quit()
return image_buffer + data
def close(self):
self.ser.close()
if __name__ == "__main__":
print 'cannot run this standalone'
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from PyQt5.QtWidgets import QApplication
from models.window_main import MainWindow
if __name__ == "__main__":
# 记录崩溃信息
# log_dir = os.path.join(os.getcwd(), 'crash')
# if not os.path.exists(log_dir):
# os.mkdir(log_dir)
# cgitb.enable(format='text', logdir=log_dir)
app = QApplication(sys.argv)
home = MainWindow()
home.show()
sys.exit(app.exec_())
|
from django.conf.urls import url
from . import views
handler404 = views.e404;
urlpatterns = [
url(r'^$', views.home, name='home'),
url(r'^news/$', views.NewsView.as_view(), name='news'),
url(r'^contact/$', views.ContactView.as_view(), name='contact'),
url(r'^comps/$', views.CompositionsView.as_view(), name='comps'),
url(r'^comps/pay/(?P<pk>\d+)', views.CompositionsView.as_view()),
url(r'^comp/(?P<pk>\d+)$', views.CompositionDetail.as_view(), name='detail'),
url(r'^comp/(?P<pk>\d+)/pay$', views.CompositionDetail.as_view()),
url(r'^search/$', views.search, name='search'),
]
|
from app.stories.models import User,Bot,Channel
user1=User(userName='Admin',password="Admin")
user1.save()
#bot1=Bot(botName='SampleBot',botDescription="This is sample bot")
#bot1.save()
#channel1=Channel(channelName='Spark')
#channel1.save() |
class Solution(object):
def findMinArrowShots(self, points):
"""
:type points: List[List[int]]
:rtype: int
"""
if not len(points):
return 0
def overlap(i1, i2):
return ((i2[0] <= i1[0] <= i2[1]) or (i1[0] <= i2[0] <= i1[1]))
intervals = sorted(points, key=lambda x: x[0])
results = []
merge = intervals[0]
for temp in intervals[1:]:
if overlap(temp, merge):
merge[1] = min(merge[1], temp[1])
merge[0] = max(merge[0], temp[0])
else:
results.append(merge)
merge = temp
results.append(merge)
return len(results)
|
import six.moves.urllib as urllib
import os
import sys
import tarfile
base_path = './pretrained_models'
filename ='faster_rcnn_inception_v2_coco_2018_01_28.tar.gz'
try:
import urllib.request
except ImportError:
raise ImportError('You should use Python 3.x')
if not os.path.exists(os.path.join(base_path, filename)):
print('Downloading ' + filename)
url_base = 'http://download.tensorflow.org/models/object_detection/faster_rcnn_inception_v2_coco_2018_01_28.tar.gz'
urllib.request.urlretrieve(url_base, os.path.join(base_path, filename))
print("Download " + filename + " Done")
else:
print("You already have " + filename)
file_tar, file_tar_ext = os.path.splitext(filename) # split into file.tar and .gz
file_untar, file_untar_ext = os.path.splitext(file_tar) #split into file and .tar
os.chdir(base_path)
if not os.path.isdir(os.path.join(os.getcwd(), file_untar)):
if file_tar_ext == '.gz' and file_untar_ext == '.tar': # check if file had format .tar.gz
print('Unpacking ' + filename)
tar = tarfile.open(filename)
tar.extractall(path='./') # untar file into same directory
tar.close()
os.chdir(file_untar) # This fails if file.tar.gz has different name compared to the untarred folder e.g.. file1 instead of file
print('Unpack ' + filename + " Done")
else:
raise Exception("File must have '.tar.gz' extension")
else:
print(filename + " has already been unpacked")
|
#!/usr/bin/python3
import jinja2
import os
import logging as log
import sys
log.basicConfig(stream=sys.stderr, level=os.environ.get("LOG_LEVEL", "WARNING"))
def convert(src, dst):
logger = log.getLogger("convert()")
logger.debug("Source: %s, Destination: %s", src, dst)
open(dst, "w").write(jinja2.Template(open(src).read()).render(**os.environ))
convert("/unbound.conf", "/etc/unbound/unbound.conf")
os.execv("/usr/sbin/unbound", ["-c /etc/unbound/unbound.conf"])
|
import unittest
from format_price import format_price
class FormatPriceTest(unittest.TestCase):
def test_integer_part_is_zero(self):
self.assertEqual(format_price(0.10), '0.10')
def test_decimal_part_is_zero(self):
self.assertEqual(format_price(3245.000000), '3 245')
def test_input_is_int(self):
self.assertEqual(format_price(12345), '12 345')
def test_input_is_float(self):
self.assertEqual(format_price(12345.6789), '12 345.68')
def test_input_is_minus(self):
self.assertEqual(format_price(-76543.21), '-76 543.21')
def test_input_is_empty(self):
self.assertIsNone(format_price(' '))
def test_input_without_decimal_part(self):
self.assertEqual(format_price(0.), '0')
def test_input_without_integer_part(self):
self.assertEqual(format_price(.567), '0.57')
def test_input_is_string(self):
self.assertIsNone(format_price('string'))
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
# Python面试题:请写出一段Python代码实现删除一个list里面的重复元素
l = [1, 3, 2, 'a', 'z', 'd', 3, 'd', 'z']
# set
print list(set(l))
# dict
print {}.fromkeys(l).keys()
|
"""
Titanic Data - exploring our titanic data set
"""
import pandas as pd
import numpy as np
def main():
# import data
df = pd.read_csv('../data/titanic-train.csv')
print('------------COLUMNS------------')
print(df.columns)
btwn_70_and_75 = df[(df['Age'] > 70) & (df['Age'] < 75)]
print('------------AGES BETWEEN 70 AND 75------------')
print(btwn_70_and_75.head())
embarked_vals = df['Embarked'].unique()
print('------------EMBARKED VALUES------------')
print(embarked_vals)
sorted_by_age = df.sort_values('Age', ascending=False)
print('------------AGES SORTED------------')
print(sorted_by_age.head())
print(sorted_by_age.tail())
# inspect the correlations
corr_survived = df.corr()['Survived'].sort_values
print('------------CORR SURVIVED-----------------')
print(corr_survived)
if __name__ == '__main__':
main() |
from django.contrib import admin
from webapp.models import News, Category
admin.site.register(News)
admin.site.register(Category)
|
import fileinput
from functools import reduce
from itertools import permutations
def parse(l):
a, b = l.split(' | ')
return (tuple(a.split()), tuple(b.split()))
'''
a
b c
d
e f
g
'''
toi = {
'abcefg': 0,
'cf': 1,
'acdeg': 2,
'acdfg': 3,
'bcdf': 4,
'abdfg': 5,
'abdefg': 6,
'acf': 7,
'abcdefg': 8,
'abcdfg': 9
}
chs = 'abcdefg'
def mapped(toc, pat):
return ''.join(sorted(map(lambda c: toc[c], pat)))
def correct_mapping(toc, ins):
for pat in ins:
if mapped(toc, pat) not in toi:
return False
return True
def get_sum(toc, out):
return reduce(
lambda x, pat: x * 10 + toi[mapped(toc, pat)],
out,
0
)
def solve():
pats = [parse(l) for l in fileinput.input()]
tot = 0
for pat, out in pats:
for p in permutations(chs):
toc = {p[c]: chs[c] for c in range(7)}
if correct_mapping(toc, pat):
tot += get_sum(toc, out)
break
print(tot)
solve()
|
from main import app, get_db
from starlette.testclient import TestClient
import pytest
import asyncio
from databases import Database
database = Database("sqlite:///./test_test.db", force_rollback=True)
# @app.on_event("startup")
# async def startup():
# await database.connect()
# @app.on_event("shutdown")
# async def shutdown():
# await database.disconnect()
def db():
return database
@pytest.fixture
def client():
app.dependency_overrides[get_db] = db
with TestClient(app) as client:
yield client
app.dependency_overrides = {}
def test_create_notes(client: TestClient):
r = client.post("/notes/", json={"text": "bullshit", "completed": True})
assert r.status_code == 200
assert r.json()["id"] is not None
assert r.json()["text"] == "bullshit"
assert r.json()["completed"]
def test_list_notes(client: TestClient):
r = client.get("/notes/")
assert r.status_code == 200
assert len(r.json()) == 0
|
import streamlit as st
import pandas as pd
import numpy as np
import faiss
'''
Similar Movies Recommender
==========================
Search for a title and get recommendations for similar movies.
------
## Recommendations
'''
def load_data():
# Import movie indexes
movie_index = pd.read_pickle('output/embedding_index.pkl')
# Import embedding matrices
with open('output/embedding_matrix.npy', 'rb') as f:
content_matrix = np.load(f)
cb_matrix = np.load(f)
hybrid_matrix = np.load(f)
# Import ANN indexes
content_ann = faiss.read_index("output/content_ann.bin")
cb_ann = faiss.read_index("output/cb_ann.bin")
hybrid_ann = faiss.read_index("output/hybrid_ann.bin")
data = dict(
movie_index=movie_index,
embedding_matrix=(content_matrix, cb_matrix, hybrid_matrix),
ann_index=(content_ann, cb_ann, hybrid_ann)
)
return data
# Dataset
data = load_data()
movie_index = data['movie_index']
# Movie list
movie_ids = movie_index.index.to_list()
movie_names = movie_index['title'].values
movies = dict(zip(movie_ids, movie_names))
# Parameters
algs = {'Content Based': 0, 'Collaborative Filtering': 1, 'Hybrid': 2}
RECSYS = st.sidebar.radio('Recommender:', list(algs.keys()), key='recommender')
st.sidebar.markdown('------')
K = st.sidebar.slider('Num.Recommendations:', min_value=5, max_value=30, value=10, step=5, key='num_neighbors')
st.sidebar.markdown('------')
MOVIE_ID = st.sidebar.selectbox('Movie:', movie_ids, format_func=lambda x: movies[x], key='selected_movie')
# Selected options
row = movie_index.loc[MOVIE_ID].row
embedding_matrix = data['embedding_matrix'][algs[RECSYS]]
ann_index = data['ann_index'][algs[RECSYS]]
# Recommendations
dist, idx = ann_index.search(embedding_matrix[[row], :], K+1)
neighbors = movie_index.iloc[idx.flatten()].copy()
neighbors['similarity'] = dist.flatten()
st.table(neighbors[['title','similarity']].reset_index(drop=True).drop(0))
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from torch.autograd import Variable
class TextRNN(nn.Module):
def __init__(self):
super(TextRNN, self).__init__()
#三个待输入的数据
self.embedding = nn.Embedding(5000, 64)
self.rnn = nn.LSTM(input_size=64, hidden_size=128, bidirectional=True)
#self.rnn = nn.GRU(input_size= 64, hidden_size= 128, num_layers= 2, bidirectional= True)
self.f1 = nn.Sequential(nn.Linear(256, 10),
F.softmax(input()))
def forward(self, x):
x = self.embedding(x) # batch_size x seq_len x embedding_size 64*600*64
x = x.permute(1, 0, 2) # seq_len x batch_size x embedding_size 600*64*64
# x为600*64*256, h_n为2*64*128 lstm_out
# Sentence_length * Batch_size * (hidden_layers * 2 [bio-direct]) h_n
# (num_layers * 2) * Batch_size * hidden_layers
x, (h_n, c_n) = self.rnn(x)
final_feature_map = F.dropout(h_n, 0.8)
feature_map = torch.cat([final_feature_map[i, :, :] for i in range(final_feature_map.shape[0])], dim = 1)
final_out = self.f1(feature_map)
return final_out
class TextCNN(nn.Module):
def __init__(self):
super(TextCNN, self).__init__()
self.embedding = nn.Embedding(5000, 64)
self.conv = nn.Sequential(nn.Conv1d(in_channels= 64,
out_channels= 256,
kernel_size= 5),
nn.ReLU(),
nn.MaxPool1d(kernel_size= 596))
self.f1 = nn.Linear(256, 10)
def forward(self, x):
x = self.embedding(x) # batch_size x seq_len x embedding_size 64*600*64
x = x.permute(0, 2, 1) #64*64*600 变成 bs x embs x seq_len
x = self.conv(x) #Conv1后64*256*596,ReLU后不变,NaxPool1d后64*256*1
x = x.view(-1, x.size(1)) # 64 * 256
x = F.dropout(x, 0.8)
x = self.f1(x) # 64 x 10 bs x class_num
return x
if __name__ == '__main__':
net = TextRNN()
print(net) |
import tempfile
import os
from page_loader import engine
from page_loader.storage import url_normalization
URL = 'https://gevhoo.github.io/python-project-lvl3/tests/fixtures/index'
HTML_FILE_NAME = 'gevhoo-github-io-python-project-lvl3-tests-fixtures-index.html' # noqa: E501
RESOURCE1 = 'gevhoo-github-io-python-project-lvl3-tests-fixtures-index_files/style-style.css' # noqa: E501
RESOURCE2 = 'gevhoo-github-io-python-project-lvl3-tests-fixtures-index_files/image-python.png' # noqa: E501
RESOURCE3 = 'gevhoo-github-io-python-project-lvl3-tests-fixtures-index_files/image-requests.jpg' # noqa: E501
RESOURCE4 = 'gevhoo-github-io-python-project-lvl3-tests-fixtures-index_files/image-bs.jpg' # noqa: E501
def test_run():
with tempfile.TemporaryDirectory() as tmpdir:
engine.run(url_normalization(URL), str(tmpdir))
html_file_path = os.path.join(str(tmpdir), HTML_FILE_NAME)
with open(html_file_path) as f:
html_file = f.read()
assert os.path.join(str(tmpdir), RESOURCE1) in html_file
assert os.path.join(str(tmpdir), RESOURCE2) in html_file
assert os.path.join(str(tmpdir), RESOURCE3) in html_file
assert os.path.join(str(tmpdir), RESOURCE4) in html_file
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from collections import OrderedDict
import fnmatch
import obspy
import os
import re
import shelve
import warnings
import uuid
import event_query as search
class EventShelveException(Exception):
"""
Exception raised by this module.
"""
pass
class EventShelveWarning(UserWarning):
"""
Warning raised by this module.
"""
pass
class EventShelve(object):
def __init__(self, shelve_path, root_folder, quakeml_glob_expr,
regex_expr=None):
"""
Initializes the EventShelve object.
:param shelve_path:
:param root_folder:
:param quakeml_glob_expr:
:param regex_expr:
"""
self._s = shelve.open(shelve_path)
# First step is to get all files.
quakeml_filenames = []
for root, _, filenames in os.walk(root_folder):
for filename in fnmatch.filter(filenames, quakeml_glob_expr):
quakeml_filenames.append(os.path.abspath(
os.path.join(root, filename)))
quakeml_filenames = set(quakeml_filenames)
filenames_in_shelve = set(self._s.keys())
# Delete all files no longer available.
to_be_removed = filenames_in_shelve - quakeml_filenames
for filename in to_be_removed:
del self._s[filename]
filenames_in_shelve = set(self._s.keys())
# Find files that need to be added.
to_be_added = quakeml_filenames - filenames_in_shelve
for _i, filename in enumerate(to_be_added):
print("Indexing file %i: %s ..." % (_i, filename))
cat = obspy.readEvents(filename)
if len(cat) == 0:
continue
elif len(cat) > 1:
msg = ("File '%s' contains %i events. Only one event per "
"file is supported. Will be skipped." %
(filename, len(cat)))
warnings.warn(msg)
continue
ev = cat[0]
# Get the event id used for that event.
event_id = None
if regex_expr is not None:
match = re.match(regex_expr, ev.resource_id.id)
if match:
try:
event_id = match.group(1)
except IndexError:
pass
if not event_id:
event_id = str(uuid.uuid4())
try:
origin = ev.preferred_origin() or ev.origins[0]
magnitude = ev.preferred_magnitude() or ev.magnitudes[0]
#remove any event file with missing origin or magnitude details
except:
os.remove(filename)
print(filename+" Removed!")
continue
event_info = {
"event_id": event_id,
"latitude": origin.latitude,
"longitude": origin.longitude,
"time": origin.time,
"depth_in_km": origin.depth / 1000.0,
"magnitude": magnitude.mag,
"magnitude_type": magnitude.magnitude_type
}
self._s[filename] = event_info
# Copy to in memory dictionary.
self.events = OrderedDict(self._s)
# Close shelve.
self._s.close()
def query(self, starttime=None, endtime=None, minlatitude=None,
maxlatitude=None, minlongitude=None, maxlongitude=None,
latitude=None, longitude=None, maxradius=None, minradius=None,
mindepth=None, maxdepth=None, minmagnitude=None,
maxmagnitude=None, limit=None, offset=1, orderby="time",
event_id=None, query_id=None, **kwargs):
found_events={}
if float(minlatitude) < -90 or float(maxlatitude) > 90 or float(minlongitude) < -180 or float(
maxlongitude) > 180:
# only process a search if min/max values does not exceed -360 or 360 for longitude and -180 or 180 for latitude
if float(minlatitude) >= -180 or float(maxlatitude) <= 180 or float(minlongitude) >= -360 or float(
maxlongitude) <= 360:
found_events = search.processEventsQuery(self,starttime, endtime, minlatitude, maxlatitude, minlongitude, maxlongitude,latitude, longitude, maxradius, minradius,
mindepth, maxdepth, minmagnitude,maxmagnitude, limit, offset, event_id)
else:
return None
else:
found_events = search.findEvents(self,None,starttime, endtime, minlatitude, maxlatitude, minlongitude, maxlongitude,latitude, longitude, maxradius, minradius,
mindepth, maxdepth, minmagnitude,maxmagnitude, limit, offset, event_id)
print "Found events:", len(found_events)
if not found_events:
return None
# Sort the events.
if orderby == "time":
found_events = OrderedDict(sorted(found_events.iteritems(),
key=lambda x: x[1]["time"]))
elif orderby == "time-asc":
found_events = OrderedDict(sorted(found_events.iteritems(),
key=lambda x: x[1]["time"])[::-1])
elif orderby == "magnitude":
found_events = OrderedDict(sorted(found_events.iteritems(),
key=lambda x: x[1]["time"]))
elif orderby == "magnitude-asc":
found_events = OrderedDict(sorted(
found_events.iteritems(),
key=lambda x: x[1]["magnitude"])[::-1])
else:
msg = ("orderby '%s' is not valid. Valid orderings: 'time', "
"'time-asc', 'magnitude', 'magnitude-asc'" % orderby)
raise ValueError(msg)
if query_id is None:
query_id = "smi:local/%s" % str(uuid.uuid4())
else:
query_id = "smi:" + query_id.replace("http://", "")
cat_str = ("<?xml version='1.0' encoding='utf-8'?>\n"
'<ns0:quakeml xmlns:ns0="http://quakeml.org/xmlns/quakeml/'
'1.2" xmlns="http://quakeml.org/xmlns/bed/1.2">\n'
' <eventParameters publicID="%s">\n'
" {events}\n"
" </eventParameters>\n"
"</ns0:quakeml>" % query_id)
pattern = re.compile(r"<event\s.*<\/event>", re.DOTALL)
event_strings = []
for filename in found_events.iterkeys():
with open(filename, "rt") as fh:
event_str = fh.read()
event_str = re.findall(pattern, event_str)[0]
if event_str is None:
msg = ("Could not extract event string from event '%'. "
"Will be skipped." % filename)
warnings.warn(EventShelveWarning)
continue
event_strings.append(event_str)
cat_str = cat_str.format(events="\n ".join(event_strings))
return cat_str
|
import pygame
from pygame.display import *
pygame.init()
screen = pygame.display.set_mode((800,600))
pygame.display.set_caption("Checkers")
class Button:
def __init__(self,text,x,y,width,height,color1,color2):
mPos = pygame.mouse.get_pos()
mPressed = pygame.mouse.get_pressed()
self.dct = {'2':'X','1':'x','0':'.','-1':'o','-2':'O','Quit':'Quit', 'Stay':'Stay'}
self.text = text
self.x = x
self.y = y
self.width = width
self.height = height
self.color1 = color1
self.color2 = color2
self.status = False
self.chosen = False
if self.chosen or (self.x < mPos[0] < self.x+self.width and self.y < mPos[1] < self.y+self.width):
pygame.draw.rect(screen, self.color2, (self.x,self.y,self.width,self.height))
if mPressed[0] == 1:
self.status = True
else:
pygame.draw.rect(screen, self.color1, (self.x,self.y,self.width,self.height))
writing = pygame.font.Font("freesansbold.ttf",16)
self.startSurface, self.startRect = TextBox(self.dct[self.text], writing)
self.startRect = ((self.x+(self.width/4)),(self.y+(self.height/2)))
screen.blit(self.startSurface, self.startRect)
def update(self):
mPos = pygame.mouse.get_pos()
mPressed = pygame.mouse.get_pressed()
if self.chosen or (self.x < mPos[0] < self.x+self.width and self.y < mPos[1] < self.y+self.width):
pygame.draw.rect(screen, self.color2, (self.x,self.y,self.width,self.height))
if mPressed[0] == 1:
self.status = True
else:
pygame.draw.rect(screen, self.color1, (self.x,self.y,self.width,self.height))
writing = pygame.font.Font("freesansbold.ttf",16)
self.startSurface, self.startRect = TextBox(self.dct[self.text], writing)
self.startRect = ((self.x+(self.width/4)),(self.y+(self.height/2)))
screen.blit(self.startSurface, self.startRect)
def getpygame():
return pygame
def getScreen():
return screen
def TextBox(string, font):
surface = font.render(string, True, (0,0,0))
return surface, surface.get_rect()
def createButton(text,x,y,width,height,color1,color2):
return Button(text,x,y,width,height,color1,color2)
'''mPos = pygame.mouse.get_pos()
mPressed = pygame.mouse.get_pressed()
if x < mPos[0] < x+width and y < mPos[1] < y+width:
pygame.draw.rect(screen, color2, (x,y,width,height))
if mPressed[0] == 1:
return True
else:
pygame.draw.rect(screen, color1, (x,y,width,height))
writing = pygame.font.Font("freesansbold.ttf",16)
startSurface, startRect = TextBox(text, writing)
startRect = ((x+(width/4)),(y+(height/2)))
screen.blit(startSurface, startRect)'''
def createFrame(x,y,width,height,color):
pygame.draw.rect(screen, color, (x,y,width,height))
writing = pygame.font.Font("freesansbold.ttf",16)
startSurface, startRect = TextBox("", writing)
startRect = ((x+(width/4)),(y+(height/2)))
screen.blit(startSurface, startRect) |
from django.conf.urls import url
from . import views
urlpatterns = [
# vacantes
url(r'^crear-vacante/',
views.VacanteCreateView.as_view(),
name='crear_vacante'),
url(r'^editar-vacante/(?P<pk>\d+)/',
views.VacanteUpdateView.as_view(),
name='editar_vacante'),
url(r'^lista-vacantes/',
views.VacanteListView.as_view(),
name='lista_vacantes'),
# formacion para el trabajo
url(r'^crear-formacion-trabajo/',
views.FormacionTrabajoCreateView.as_view(),
name='crear_formacion_trabajo'),
url(r'^editar-formacion-trabajo/(?P<pk>\d+)/',
views.FormacionTrabajoUpdateView.as_view(),
name='editar_formacion_trabajo'),
url(r'^lista-formaciones-trabajo/',
views.FormacionTrabajoListView.as_view(),
name='lista_formaciones_trabajo')
]
|
a, b, c, d, e, f = map(int, input().split())
cap = e / (100 + e)
max_noudo = 0
sugar_ans, water_ans = 0, 0
for i in range(f//(100*a)+1):
for j in range((f - (100 * a * i)) // (100 * b) + 1):
water = 100 * (i * a + j * b)
for k in range(((i * a + j * b)*e)//c+1):
for l in range(((i * a + j * b)*e - k*c)//d+1):
sugar = k * c + l * d
if f < water + sugar or water == 0:
continue
else:
noudo = sugar / (water + sugar)
if max_noudo <= noudo <= cap:
max_noudo = noudo
sugar_ans = sugar
water_ans = water
print(water_ans+sugar_ans, sugar_ans)
|
from django.apps import AppConfig
class OneEndpointPracticeConfig(AppConfig):
name = 'one_endpoint_practice'
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Post(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
title = models.CharField(max_length = 50)
description = models.TextField()
image = models.ImageField(blank=True, upload_to='pics')
date_posted = models.DateTimeField(auto_now_add = True)
is_public = models.BooleanField(default = True)
def __str__(self):
return self.title
class Comments(models.Model):
post = models.ForeignKey(Post, on_delete = models.CASCADE)
commented_by = models.ForeignKey(User, on_delete=models.CASCADE)
date_commented = models.DateTimeField(auto_now_add=True)
comment = models.TextField()
class Meta:
ordering = ['-date_commented']
|
# Generated by Django 2.2.1 on 2019-06-23 12:46
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('channelaudit', '0002_auto_20190623_1246'),
('senseid', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='senseid',
name='project',
),
migrations.AddField(
model_name='senseid',
name='project_device',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='channelaudit.CaseProjectDevice', verbose_name='项目'),
preserve_default=False,
),
]
|
"""Training Script"""
import os
import shutil
import numpy as np
import pdb
import random
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim
from torch.utils.data import DataLoader
from torch.optim.lr_scheduler import MultiStepLR
import torch.backends.cudnn as cudnn
from torchvision.utils import make_grid
from tensorboardX import SummaryWriter
from config import ex
from util.utils import set_seed, CLASS_LABELS, date
from dataloaders_medical.prostate import *
# from models.fewshot import FewShotSeg
from settings import Settings
import few_shot_segmentor as fs
from torch.optim import lr_scheduler
from nn_common_modules import losses
def overlay_color(img, mask, label, scale=50):
"""
:param img: [1, 256, 256]
:param mask: [1, 256, 256]
:param label: [1, 256, 256]
:return:
"""
# pdb.set_trace()
scale = np.mean(img.cpu().numpy())
mask = mask[0]
label = label[0]
zeros = torch.zeros_like(mask)
zeros = [zeros for _ in range(3)]
zeros[0] = mask
mask = torch.stack(zeros,dim=0)
zeros[1] = label
label = torch.stack(zeros,dim=0)
img_3ch = torch.cat([img,img,img],dim=0)
masked = img_3ch+mask.float()*scale+label.float()*scale
return [masked]
@ex.automain
def main(_run, _config, _log):
settings = Settings()
common_params, data_params, net_params, train_params, eval_params = settings['COMMON'], settings['DATA'], settings[
'NETWORK'], settings['TRAINING'], settings['EVAL']
if _run.observers:
os.makedirs(f'{_run.observers[0].dir}/snapshots', exist_ok=True)
for source_file, _ in _run.experiment_info['sources']:
os.makedirs(os.path.dirname(f'{_run.observers[0].dir}/source/{source_file}'),
exist_ok=True)
_run.observers[0].save_file(source_file, f'source/{source_file}')
shutil.rmtree(f'{_run.observers[0].basedir}/_sources')
set_seed(_config['seed'])
cudnn.enabled = True
cudnn.benchmark = True
torch.cuda.set_device(device=_config['gpu_id'])
torch.set_num_threads(1)
_log.info('###### Load data ######')
data_name = _config['dataset']
if data_name == 'BCV':
make_data = meta_data
else:
print(f"data name : {data_name}")
raise ValueError('Wrong config for dataset!')
tr_dataset, val_dataset, ts_dataset = make_data(_config)
trainloader = DataLoader(
dataset=tr_dataset,
batch_size=_config['batch_size'],
shuffle=True,
num_workers=_config['n_work'],
pin_memory=False, #True load data while training gpu
drop_last=True
)
_log.info('###### Create model ######')
model = fs.FewShotSegmentorDoubleSDnet(net_params).cuda()
model.train()
_log.info('###### Set optimizer ######')
optim = torch.optim.Adam
optim_args = {"lr": train_params['learning_rate'],
"weight_decay": train_params['optim_weight_decay'],}
# "momentum": train_params['momentum']}
optim_c = optim(list(model.conditioner.parameters()), **optim_args)
optim_s = optim(list(model.segmentor.parameters()), **optim_args)
scheduler_s = lr_scheduler.StepLR(optim_s, step_size=100, gamma=0.1)
scheduler_c = lr_scheduler.StepLR(optim_c, step_size=100, gamma=0.1)
criterion = losses.DiceLoss()
if _config['record']: ## tensorboard visualization
_log.info('###### define tensorboard writer #####')
_log.info(f'##### board/train_{_config["board"]}_{date()}')
writer = SummaryWriter(f'board/train_{_config["board"]}_{date()}')
iter_print = _config["iter_print"]
iter_n_train = len(trainloader)
_log.info('###### Training ######')
for i_epoch in range(_config['n_steps']):
epoch_loss = 0
for i_iter, sample_batched in enumerate(trainloader):
# Prepare input
s_x = sample_batched['s_x'].cuda() # [B, Support, slice_num=1, 1, 256, 256]
X = s_x.squeeze(2) # [B, Support, 1, 256, 256]
s_y = sample_batched['s_y'].cuda() # [B, Support, slice_num, 1, 256, 256]
Y = s_y.squeeze(2) # [B, Support, 1, 256, 256]
Y = Y.squeeze(2) # [B, Support, 256, 256]
q_x = sample_batched['q_x'].cuda() # [B, slice_num, 1, 256, 256]
query_input = q_x.squeeze(1) # [B, 1, 256, 256]
q_y = sample_batched['q_y'].cuda() # [B, slice_num, 1, 256, 256]
y2 = q_y.squeeze(1) # [B, 1, 256, 256]
y2 = y2.squeeze(1) # [B, 256, 256]
y2 = y2.type(torch.LongTensor).cuda()
entire_weights = []
for shot_id in range(_config["n_shot"]):
input1 = X[:, shot_id, ...] # use 1 shot at first
y1 = Y[:, shot_id, ...] # use 1 shot at first
condition_input = torch.cat((input1, y1.unsqueeze(1)), dim=1)
weights = model.conditioner(condition_input) # 2, 10, [B, channel=1, w, h]
entire_weights.append(weights)
# pdb.set_trace()
avg_weights=[[],[None, None, None, None]]
for i in range(9):
weight_cat = torch.cat([weights[0][i] for weights in entire_weights],dim=1)
avg_weight = torch.mean(weight_cat,dim=1,keepdim=True)
avg_weights[0].append(avg_weight)
avg_weights[0].append(None)
output = model.segmentor(query_input, avg_weights)
loss = criterion(F.softmax(output, dim=1), y2)
optim_s.zero_grad()
optim_c.zero_grad()
loss.backward()
optim_s.step()
optim_c.step()
epoch_loss += loss
if iter_print:
print(f"train, iter:{i_iter}/{iter_n_train}, iter_loss:{loss}", end='\r')
scheduler_c.step()
scheduler_s.step()
print(f'step {i_epoch+1}: loss: {epoch_loss} ')
if _config['record']:
batch_i = 0
frames = []
query_pred = output.argmax(dim=1)
query_pred = query_pred.unsqueeze(1)
frames += overlay_color(q_x[batch_i,0], query_pred[batch_i].float(), q_y[batch_i,0])
# frames += overlay_color(s_xi[batch_i], blank, s_yi[batch_i], scale=_config['scale'])
visual = make_grid(frames, normalize=True, nrow=2)
writer.add_image("train/visual", visual, i_epoch)
save_fname = f'{_run.observers[0].dir}/snapshots/last.pth'
torch.save(model.state_dict(),save_fname)
|
from multiprocessing import Process
import os
import time
class MyNewProcess(Process):
# 重写run方法, 调用p.start()的时候自动调用
def run(self):
for i in range(5):
print("--- ", i)
time.sleep(1)
def run_pro(name):
print("Run child process %s ( %s )" % (name, os.getpid()))
if __name__ == "__main__":
print("Parent process %s." % os.getpid())
p = Process(target=run_pro, args=("test",)) # 跨平台的模块, 创建一个进程 传参 返回
print("Child process will start")
p.start() # 启动这个进程
# p.terminate() 直接结束子进程
p.join(1) # 等到子进程结束, 可选参数: 如果1s 内子进程未结束就提前结束
print("Child process end")
p2 = MyNewProcess() # 第二种创建进程的方式
print("--- new process ---")
p2.start()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.