text stringlengths 38 1.54M |
|---|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Oct 18 10:51:44 2020
@author: kerstin
"""
from influxdb import InfluxDBClient
import datetime
import pandas as pd
#import numpy as np
from config import Config
#define storing time as timebase in influx and grafana
storingtime = datetime.datetime.utcnow()
#initialize list of datapoints that should be stored in influxdb
influxdata=[]
dbname = Config.DATABASE
protocol = 'line'
port=Config.INFLUX_PORT
host='localhost'
#open influx client
influxclient = InfluxDBClient(host=host, port=port)
influxclient.switch_database(dbname)
def calc_energy_statistic(modul,client,storingtime):
today = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
yesterday = (datetime.datetime.utcnow()-datetime.timedelta(1)).strftime('%Y-%m-%dT%H:%M:%SZ')
query= 'SELECT * FROM "{}" '.format(modul)+"WHERE time >= '{}' AND time <= '{}'".format(yesterday,today)
energy = client.query(query)
energy_value = list(energy.get_points(measurement='{}'.format(modul)))
df = pd.DataFrame(energy_value)
bodyDB = [{
"measurement": '{}_statistik'.format(modul.rsplit("_")[1]),
"time": storingtime,
"fields":
{
"gespeicherteWerteProTag": df.describe().loc['count'],
"LeistungMittelwert": df.describe().loc['mean'],
"LeistungStandardabweichung": df.describe().loc['std'],
"LeistungQuartile25": df.describe().loc['25%'],
"LeistungQuartile50": df.describe().loc['50%'],
"LeistungQuartile75": df.describe().loc['75%'],
"LeistungMaxProTag": df.describe().loc['max'],
"LeistungMinProTag": df.describe().loc['min'],
"Energie_pos": df[df['value']>0]['value'].sum(),
"Energie_neg": df[df['value']<0]['value'].sum(),
"EnergiedurchsatzProTag": df['value'].abs().sum()
}
}]
#print(bodyDB)
influxclient.write_points(bodyDB, database='iobroker', time_precision='s', batch_size=10000, protocol='json')
return df
autark_dict=dict()
for item in Config.ENERGIE_ITEMS:
df = calc_energy_statistic(item,influxclient,storingtime)
autark_dict[item] = df[df['value']>0]['value'].sum()
#
if autark_dict['modbus.0.holdingRegisters.40073_Netz_Leistung'] <= autark_dict['modbus.0.holdingRegisters.40071_Hausverbrauch_Leistung']:
autarkiewert = int((1-(autark_dict['modbus.0.holdingRegisters.40073_Netz_Leistung']/autark_dict['modbus.0.holdingRegisters.40071_Hausverbrauch_Leistung']))*100)
else:
autarkiewert = 0
bodyAutarkie = [{
"measurement": 'autarkie',
"time": storingtime,
"fields":
{
"value_kb": autarkiewert
}
}]
influxclient.write_points(bodyAutarkie, database='iobroker', time_precision='s', batch_size=10000, protocol='json')
|
import cv2 as cv
import pyautogui as win
img = cv.VideoCapture(0)
while True:
isTrue, frame = img.read()
gray = cv.cvtColor(frame, cv.COLOR_BGR2GRAY)
haar_cascade = cv.CascadeClassifier('haar_face.xml')
faces_rect = haar_cascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=4)
print(f'{len(faces_rect)} face(s) detected')
faces = f'{len(faces_rect)} face(s)'
cv.putText(frame, faces, (0,25), cv.FONT_HERSHEY_TRIPLEX, 1.0, (0,0,255), thickness=2)
for (x,y,w,h) in faces_rect:
cv.rectangle(frame, (x,y), (x+w,y+h), (0,255,0), thickness=2)
cv.imshow('Video',frame)
if cv.waitKey(20) & 0xFF==ord('d') :
break
img.release()
cv.destroyAllWindows()
|
import math
import emoji
n = int(input(emoji.emojize("Digite o valor de :heavy_multiplication_x: : ", use_aliases = True)))
primos = list(range(2,n))
for i in range (2,int(math.sqrt(n) + 1)):
if i in primos:
for j in range(i**2, n , i):
if j in primos: primos.remove(j)
if primos == []:
print(emoji.emojize("não tem nenhum numero :speak_no_evil:", use_aliases = True))
else:
print(emoji.emojize("Lista de primos:see_no_evil:\n" + str(primos) + "\ntotal de: " + str(len(primos)), use_aliases = True)) |
from flask_testing import TestCase
from app import app, db
from app.config import app_config
class BaseTestCase(TestCase):
def create_app(self):
config_name = 'testing'
app.config.from_object(app_config[config_name])
return app
def setUp(self):
app = self.create_app()
with app.app_context():
db.create_all()
db.session.commit()
def tearDown(self):
app = self.create_app()
with app.app_context():
db.session.remove()
db.drop_all() |
from django.db import models
from uuid import uuid4
from transBack.models.persona import Persona
class Proovedor(models.Model):
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
persona = models.ForeignKey(Persona)
class Meta:
verbose_name = "Proovedor"
verbose_name_plural = "Proovedors"
def __unicode__(self):
return self.id
|
# return value of nCr when n and r are passed to function.
# --- --- ---
import math
m=math.factorial
def f(n,r):
if n<r:
return(False) # undefined value.
else:
a=m(n)/(m(n-r)*m(r))
return(int(a))
# --- --- ---
|
# coidng=utf-8
import yaml, os
cur = os.path.dirname(os.path.dirname(__file__))
def read_token(yamlName="token.yaml"):
p = os.path.join(cur, "Data", yamlName)
with open(p, 'r') as f:
t = yaml.load(f.read())
return t["Cookie"]
if __name__ == "__main__":
print(read_token())
|
class InfluxDBQuery:
def append_clause(self, name, clause=None):
additional_param = []
if clause != None and len(clause) != 0:
additional_param.append(name)
additional_param.append(clause)
return additional_param
# TODO improve code
# add where clause to sql query
def set_time_for_report_from_to(self,
where_clause=None,
time_from=None,
time_to=None
):
span_of_time = []
span_of_time.append(
''.join(self.append_clause("time >= ", time_from))
)
span_of_time.append(
''.join(self.append_clause(" AND time <= ", time_to))
)
for item in where_clause:
span_of_time.append(
''.join(self.append_clause(" AND ", item))
)
return ''.join(span_of_time)
# create an SQL query influxdb
# wrap value/values for function in a list -> ["max"], ["max","min"]
# also wrap where_clause in a list
def query_select(self,
function,
measurement,
tag_value='value',
where_clause=None,
time_from=None,
time_to=None,
group_by=None):
if measurement is None:
measurement = []
if tag_value is None:
tag_value = []
if group_by is None:
group_by = []
query = []
for function_item in function:
query.append("SELECT {0}({1}) FROM {2}{3}{4};".format(
function_item,
tag_value,
measurement,
' '.join(self.append_clause(" WHERE",
self.set_time_for_report_from_to(time_from=time_from, time_to=time_to,
where_clause=where_clause))),
' '.join(self.append_clause(" GROUP BY", group_by))
))
return query
|
from django.shortcuts import render, redirect, HttpResponse
from .forms import Profilereg, Clgreg
from .models import profile
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout
from django.contrib import messages
# Create your views here.
def Profdet(request):
if request.method == 'POST':
fm = Profilereg(request.POST)
if fm.is_valid():
name = fm.cleaned_data['name']
email = fm.cleaned_data['email']
phone = fm.cleaned_data['phone']
address = fm.cleaned_data['address']
reg = profile(name=name, email=email, phone=phone, address=address)
reg.save(instance=request.user.profile)
# It refresh the form and show the form empty
fm = Profilereg()
return redirect('/clgdet')
else:
fm = Profilereg()
# show = profile.objects.all()
return render(request,'profile.html',{'form':fm})
def clgdet(request):
if request.method == 'POST':
om = Clgreg(request.POST)
if om.is_valid():
college = om.cleaned_data['college']
school = om.cleaned_data['school']
branch = om.cleaned_data['branch']
degree = om.cleaned_data['degree']
clgreg = clgdet(college=college, school=school, branch=branch, degree=degree)
clgreg.save()
# It refresh the form and show the form empty
om = Clgreg()
else:
om = Clgreg()
# show = profile.objects.all()
# return render(request,'clgdet.html',{'form':om,'show':show})
show = User.objects.get(id=request.user.id)
return render(request,'clgdet.html',{'form':om})
def index(request):
return render(request, 'index.html')
def update_data(request, id):
if request.method == 'POST':
edit = profile.objects.get(pk=id)
fm = Profilereg(request.POST, instance=request.user)
if fm.is_valid():
fm.save()
return redirect('/clgdet')
else:
edit = profile.objects.get(pk=id)
fm = Profilereg(instance=edit)
return render(request, 'profupdate.html', {'form':fm})
def handleSignup(request):
if request.method == 'POST':
username = request.POST['username']
fname = request.POST['fname']
lname = request.POST['lname']
email = request.POST['email']
pass1 = request.POST['pass1']
pass2 = request.POST['pass2']
if pass1 != pass2:
messages.error(request,'Password do not match')
return render('home')
#Create the users
myuser = User.objects.create_user(username, email, pass1)
myuser.first_name = fname
myuser.Last_name = lname
myuser.save()
messages.success(request, "Your Account has been Successfully Created")
return redirect('home')
else:
return HttpResponse('404 - Not Found')
def handleLogin(request):
loginusername = request.POST['loginusername']
loginpassword = request.POST['loginpassword']
user = authenticate(username= loginusername, password= loginpassword)
if user is not None:
login(request, user)
messages.success(request, "You are successfully logged in")
return redirect('/profdet')
else:
messages.error(request, "Please Enter Correct Username & Password")
return redirect('home')
def handleLogout(request):
logout(request)
messages.success(request, "You are logged Out")
return redirect('home') |
import sys
from PyQt5.QtWidgets import QApplication,QMainWindow
#导入Ui文件
from Qt5_python_GUI.CalldemoCalendar.demoCalendar import *
#继承UI父类Ui_MainWindow
class MyMainWindow(QMainWindow,Ui_MainWindow):
def __init__(self):
super(MyMainWindow,self).__init__() #继承主类
self.setupUi(self)
self.calendarWidget.selectionChanged.connect(self.dispdate) #selectionChanged选择作为信号.
def dispdate(self):
self.dateEdit.setDate(self.calendarWidget.selectedDate()) #setDate与selectedDate()进行关联
if __name__ == '__main__':
app=QApplication(sys.argv) #创建应用
myWin=MyMainWindow() #实例化应用
myWin.show() #显示应用
exit(app.exec_()) #循环显示 |
# -*- coding: utf-8 -*-
"""
Created by Zhao Baoxin on 12/3/18
-------------------------------------------------
File Name: solution28
Description :
Author : zhaobx
date: 12/3/18
-------------------------------------------------
Change Activity:
12/3/18:
-------------------------------------------------
"""
class Solution:
def strStr(self, haystack, needle):
"""
:type haystack: str
:type needle: str
:rtype: int
"""
l1 = len(haystack)
l2 = len(needle)
if l1 == 0 :
if l2>0:
return -1
else:
return 0
if l2 == 0:
return 0
if l1 < l2:
return -1
i = 0
while i < l1 - l2+1:
commonFlag = 1
for j in range(l2):
if haystack[i+j] != needle[j]:
commonFlag = 0
break
if commonFlag:
return i
else:
i += 1
return -1
s = Solution()
print(s.strStr("a", "a")) |
#!/usr/bin/env python3
# gfm - Github Flavored Markdown to HTML
#
# gfm.py input.md
import sys
import requests
def get_html(f):
h = {
'Content-Type': 'text/plain',
}
url = 'https://api.github.com/markdown/raw'
r = requests.post(url, headers=h, data=f)
return r.text
if __name__ == '__main__':
if len(sys.argv) < 2:
print('./gfm.py source')
else:
print(get_html(open(sys.argv[1]).read()))
|
from experta import *
class Num_hours_per_day(Fact):
pass
def print_detail(sets, exc):
if exc == "":
return ""
temparray = exc.split('%')
num = round(int(temparray[1]) * sets)
if num == 0:
return ""
temparray[1] = str(num)
return temparray
current_fitness = -1
bmi = -1
goal_fitness = -1
rep_multiplier = -1
hr = -1
beginner_data = {}
intermediate_data = {}
pro_data = {}
response = dict({
'data': {},
'workouts': []
})
workouts = []
class gym_train(KnowledgeEngine):
# all the rules and facts are here
@DefFacts()
def symptoms(self):
yield Fact(action="det_current")
yield Fact(action="det_goal")
yield Fact(action="det_reps")
@Rule(Fact(action='det_current'),
AS.bmi << Fact(symptom=L("BMI24") | L("BMI23") |
L("BMI22") | L("BMI21") | L("BMI20")))
def det_bmi1(self):
global current_fitness
# print("MY bmi is :" + str(bmi))
current_fitness = 15
@Rule(Fact(action='det_current'),
AS.bmi << Fact(symptom=L("BMI19") | L("BMI18") |
L("BMI17") | L("BMI16") | L("BMI15")))
def det_bmi2(self):
global current_fitness
# print("MY bmi is :" + str(bmi))
current_fitness = 15 - (20 - bmi) * 0.7
@Rule(Fact(action='det_current'),
AS.bmi << Fact(symptom=L("BMI25") | L("BMI26") |
L("BMI27") | L("BMI28") | L("BMI29")))
def det_bmi3(self):
global current_fitness
# print("MY bmi is :" + str(bmi))
current_fitness = 15 - (bmi - 24) * 0.7
@Rule(Fact(action='det_current'),
AS.bmi << Fact(symptom=L("BMI30") | L("BMI31") |
L("BMI32") | L("BMI33") | L("BMI34")))
def det_bmi4(self):
global current_fitness
# print("MY bmi is :" + str(bmi))
current_fitness = 15 - (bmi - 24)
@Rule(Fact(action='det_goal'),
Fact(symptom="slim"))
def goal1(self):
global goal_fitness
goal_fitness = 18
@Rule(Fact(action='det_goal'),
Fact(symptom="ripped"))
def goal2(self):
global goal_fitness
goal_fitness = 20
@Rule(Fact(action='det_goal'),
Fact(symptom="fit"))
def goal3(self):
global goal_fitness
goal_fitness = 15
@Rule(Fact(action='det_reps'),
AS.hr << Num_hours_per_day(hours=P(lambda x: x > 50) & P(lambda x: x <= 65)))
def rep1(self):
global rep_multiplier
rep_multiplier = 0.75 + (hr - 50) / 60
@Rule(Fact(action='det_reps'),
AS.hr << Num_hours_per_day(hours=P(lambda x: x > 35) & P(lambda x: x <= 50)))
def rep2(self):
global rep_multiplier
rep_multiplier = 0.5 + (hr - 35) / 60
@Rule(Fact(action='det_reps'),
AS.hr << Num_hours_per_day(hours=P(lambda x: x > 20) & P(lambda x: x <= 35)))
def rep3(self):
global rep_multiplier
rep_multiplier = 0.25 + (hr - 20) / 60
@Rule(Fact(action='det_reps'),
AS.hr << Num_hours_per_day(hours=P(lambda x: x > 65) & P(lambda x: x <= 80)))
def rep4(self):
global rep_multiplier
rep_multiplier = 1 + (hr - 65) / 60
@Rule(Fact(action='det_reps'),
AS.hr << Num_hours_per_day(hours=P(lambda x: x > 80) & P(lambda x: x <= 95)))
def rep5(self):
global rep_multiplier
rep_multiplier = 1.25 + (hr - 80) / 60
@Rule(Fact(action='det_reps'),
AS.hr << Num_hours_per_day(hours=P(lambda x: x > 95) & P(lambda x: x <= 110)))
def rep6(self):
global rep_multiplier
rep_multiplier = 1.5 + (hr - 95) / 60
@Rule(Fact(action='det_reps'),
AS.hr << Num_hours_per_day(hours=P(lambda x: x > 110) & P(lambda x: x <= 130)))
def rep7(self):
global rep_multiplier
rep_multiplier = 1.75 + (hr - 110) / 60
@Rule(Fact(action='det_reps'),
AS.hr << Num_hours_per_day(hours=P(lambda x: x > 0) & P(lambda x: x <= 20)))
def rep8(self):
global rep_multiplier
rep_multiplier = 0 + (hr - 0) / 80
@Rule(Fact(level='beginner'))
def beginner(self):
global beginner_data
global workouts
workouts = []
count = 0
for i in range(7):
if i % 2 == 0:
workouts.append(dict({"rest": 1}))
else:
for u in beginner_data:
if count == len(beginner_data[u]):
count = 0
workouts.append(
dict({"rest": 0, "workouts": print_detail(rep_multiplier, beginner_data[u][count])}))
count += 1
@Rule(Fact(level='intermediate'))
def intermediate(self):
global intermediate_data
global workouts
workouts = []
count = 0
for i in range(6):
if i % 4 == 0:
workouts.append(dict({"rest": 1}))
else:
for u in intermediate_data:
if count == len(intermediate_data[u]):
count = 0
workouts.append(
dict({"rest": 0, "workouts": print_detail(rep_multiplier, beginner_data[u][count])}))
count += 1
workouts.append(dict({"rest": 1}))
@Rule(Fact(level='pro'))
def pro(self):
global pro_data
global workouts
workouts = []
count = 0
for i in range(6):
for u in pro_data:
if count == len(pro_data[u]):
count = 0
workouts.append(dict({"rest": 0, "workouts": print_detail(rep_multiplier, beginner_data[u][count])}))
count += 1
workouts.append(dict({"rest": 1}))
def create_fitness_plan(input_data):
global current_fitness
global bmi
global goal_fitness
global rep_multiplier
global hr
global beginner_data
global intermediate_data
global pro_data
global response
global workouts
print(input_data)
engine = gym_train()
age = int(input_data['age'])
goal = input_data['goal']
engine.reset()
engine.declare(Fact(symptom=goal))
engine.run()
bmi = round((int(input_data['weight']) * 100 * 100) / (int(input_data['height']) ** 2))
engine.reset()
check = "BMI" + str(bmi)
response['data']['bmi'] = bmi
engine.declare(Fact(symptom=check))
engine.run()
engine.reset()
hr = int(input_data['time'])
engine.declare(Num_hours_per_day(hours=hr))
engine.run()
response['data']['current_fitness'] = current_fitness
response['data']['goal_fitness'] = goal_fitness
response['data']['rep_multiplier'] = rep_multiplier
data = {
'beginner': {'chest': ["beginner Barbell Bench Press – %4% sets of 8 reps",
"Machine Chest Press – %4% sets of 10 reps",
"Cable Fly – %4% sets of 10 reps"],
'back': ["Lat-pulldowns – %4% sets of 10 reps",
"T-Bar Row – %4% sets of 10 reps",
"Close-Grip Pulldowns – %4% sets of 12 reps"],
'shoulders': ["Seated Dumbbell Press – %4% sets of 10 reps",
"Lateral Raises – %3% sets of 20 reps",
"EZ Bar Upright Rows – %3% sets of 15 reps"],
'legs': ["Leg Extensions – %4% sets of 10 reps",
"Leg Press Machine – %4% sets of 8 reps",
"Lunges – %3% sets of 10 reps per leg"],
'biceps': ["Barbell Bbicep Curls – %3% sets of 10 reps",
"EZ Bar Curls – %4% sets of 10 reps",
"Hammer Curls – %3% sets of 12 reps"],
'tricep': ["Barbell Bench Press – %4% sets of 8 reps",
"Machine Chest Press – %4% sets of 10 reps",
"Cable Fly – %4% sets of 10 reps"],
'abdomen': ["Lat-pulldowns – %4% sets of 10 reps",
"T-Bar Row – %4% sets of 10 reps",
"Close-Grip Pulldowns – %4% sets of 12 reps"],
'forearms': ["Seated Dumbbell Press – %4% sets of 10 reps",
"Lateral Raises – %3% sets of 20 reps",
"EZ Bar Upright Rows – %3% sets of 15 reps"]},
'intermediate': {'chest': ["intermediate Barbell Bench Press – %4% sets of 8 reps",
"Machine Chest Press – %4% sets of 10 reps",
"Cable Fly – %4% sets of 10 reps"],
'back': ["Lat-pulldowns – %4% sets of 10 reps",
"T-Bar Row – %4% sets of 10 reps",
"Close-Grip Pulldowns – %4% sets of 12 reps"],
'shoulders': ["Seated Dumbbell Press – %4% sets of 10 reps",
"Lateral Raises – %3% sets of 20 reps",
"EZ Bar Upright Rows – %3% sets of 15 reps"],
'legs': ["Leg Extensions – %4% sets of 10 reps",
"Leg Press Machine – %4% sets of 8 reps",
"Lunges – %3% sets of 10 reps per leg"],
'biceps': ["Barbell Bbicep Curls – %3% sets of 10 reps",
"EZ Bar Curls – %4% sets of 10 reps",
"Hammer Curls – %3% sets of 12 reps"],
'tricep': ["Barbell Bench Press – %4% sets of 8 reps",
"Machine Chest Press – %4% sets of 10 reps",
"Cable Fly – %4% sets of 10 reps"],
'abdomen': ["Lat-pulldowns – %4% sets of 10 reps",
"T-Bar Row – %4% sets of 10 reps",
"Close-Grip Pulldowns – %4% sets of 12 reps"],
'forearms': ["Seated Dumbbell Press – %4% sets of 10 reps",
"Lateral Raises – %3% sets of 20 reps",
"EZ Bar Upright Rows – %3% sets of 15 reps"]},
'pro': {'chest': ["advanced Barbell Bench Press – %4% sets of 8 reps",
"Machine Chest Press – %4% sets of 10 reps",
"Cable Fly – %4% sets of 10 reps"],
'back': ["Lat-pulldowns – %4% sets of 10 reps",
"T-Bar Row – %4% sets of 10 reps",
"Close-Grip Pulldowns – %4% sets of 12 reps"],
'shoulders': ["Seated Dumbbell Press – %4% sets of 10 reps",
"Lateral Raises – %3% sets of 20 reps",
"EZ Bar Upright Rows – %3% sets of 15 reps"],
'legs': ["Leg Extensions – %4% sets of 10 reps",
"Leg Press Machine – %4% sets of 8 reps",
"Lunges – %3% sets of 10 reps per leg"],
'biceps': ["Barbell Bbicep Curls – %3% sets of 10 reps",
"EZ Bar Curls – %4% sets of 10 reps",
"Hammer Curls – %3% sets of 12 reps"],
'tricep': ["Barbell Bench Press – %4% sets of 8 reps",
"Machine Chest Press – %4% sets of 10 reps",
"Cable Fly – %4% sets of 10 reps"],
'abdomen': ["Lat-pulldowns – %4% sets of 10 reps",
"T-Bar Row – %4% sets of 10 reps",
"Close-Grip Pulldowns – %4% sets of 12 reps"],
'forearms': ["Seated Dumbbell Press – %4% sets of 10 reps",
"Lateral Raises – %3% sets of 20 reps",
"EZ Bar Upright Rows – %3% sets of 15 reps"]}
}
beginner_data = {}
intermediate_data = {}
pro_data = {}
response['workout'] = []
for choice in input_data['choices']:
beginner_data[choice] = data['beginner'][choice]
intermediate_data[choice] = data['intermediate'][choice]
pro_data[choice] = data['pro'][choice]
if age >= 50 or goal_fitness - current_fitness >= 10:
for i in range(6):
engine.reset()
engine.declare(Fact(level='beginner'))
response['workout'].append(workouts)
engine.run()
else:
if goal_fitness == 20 and current_fitness >= 15 and age < 35:
for i in range(2):
engine.reset()
engine.declare(Fact(level='intermediate'))
response['workout'].append(workouts)
engine.run()
for i in range(4):
engine.reset()
engine.declare(Fact(level='pro'))
response['workout'].append(workouts)
engine.run()
elif goal_fitness == 20 and current_fitness >= 15 and 35 <= age < 50:
engine.reset()
engine.declare(Fact(level='beginner'))
response['workout'].append(workouts)
engine.run()
for i in range(3):
engine.reset()
engine.declare(Fact(level='intermediate'))
response['workout'].append(workouts)
engine.run()
for i in range(2):
engine.reset()
engine.declare(Fact(level='pro'))
response['workout'].append(workouts)
engine.run()
elif goal_fitness - current_fitness <= 5 and 35 <= age < 50:
for i in range(0):
engine.reset()
engine.declare(Fact(level='beginner'))
response['workout'].append(workouts)
engine.run()
for i in range(3):
engine.reset()
engine.declare(Fact(level='intermediate'))
response['workout'].append(workouts)
engine.run()
for i in range(3):
engine.reset()
engine.declare(Fact(level='pro'))
response['workout'].append(workouts)
engine.run()
elif goal_fitness - current_fitness > 5 and 35 <= age < 50:
for i in range(4):
engine.reset()
engine.declare(Fact(level='beginner'))
response['workout'].append(workouts)
engine.run()
for i in range(2):
engine.reset()
engine.declare(Fact(level='intermediate'))
response['workout'].append(workouts)
engine.run()
for i in range(0):
engine.reset()
engine.declare(Fact(level='pro'))
response['workout'].append(workouts)
engine.run()
elif goal_fitness - current_fitness <= 5 and age < 35:
for i in range(0):
engine.reset()
engine.declare(Fact(level='beginner'))
response['workout'].append(workouts)
engine.run()
for i in range(3):
engine.reset()
engine.declare(Fact(level='intermediate'))
response['workout'].append(workouts)
engine.run()
for i in range(3):
engine.reset()
engine.declare(Fact(level='pro'))
response['workout'].append(workouts)
engine.run()
elif goal_fitness - current_fitness > 5 and age < 35:
for i in range(2):
engine.reset()
engine.declare(Fact(level='beginner'))
response['workout'].append(workouts)
engine.run()
for i in range(2):
engine.reset()
engine.declare(Fact(level='intermediate'))
response['workout'].append(workouts)
engine.run()
for i in range(2):
engine.reset()
engine.declare(Fact(level='pro'))
response['workout'].append(workouts)
engine.run()
return response
input_options = dict({
"height": 155,
"weight": 65,
"age": 20,
"goal": "ripped",
"time": 120,
"choices": ['chest', 'back', 'shoulders', 'legs', 'biceps']
})
print(create_fitness_plan(input_options))
|
# Generated by Django 2.2.10 on 2020-11-07 05:44
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('enquiry_order', '0004_auto_20201107_0521'),
]
operations = [
migrations.AlterField(
model_name='orderupdate',
name='create_date',
field=models.DateTimeField(blank=True, default=datetime.datetime(2020, 11, 7, 5, 44, 29, 268858)),
),
migrations.AlterField(
model_name='statusoption',
name='create_date',
field=models.DateTimeField(blank=True, default=datetime.datetime(2020, 11, 7, 5, 44, 29, 271970)),
),
migrations.AlterField(
model_name='userqueries',
name='create_date',
field=models.DateTimeField(blank=True, default=datetime.datetime(2020, 11, 7, 5, 44, 29, 274055)),
),
]
|
import random, time, datetime
class InvalidMoveException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Piece:
"""A list of tuples that contain the coordinates of the blocks in a piece,
in respect to the left most block in the top row of a piece. This block is represented by (0,0)."""
def __init__(self, blocks, name, char_rep):
self.blocks = blocks
self.name = name
self.char_rep = char_rep
# as an example, a piece that is three blocks horizontally would be [(0,0),(1,0),(2,0)]
def __str__(self):
out = ""
x_offset = 0
y_offset = 0
width = 0
height = 0
for block in self.blocks:
width = max(width, block[0]+1)
height = max(height, block[1]+1)
x_offset = max(x_offset,-block[0])
y_offset = max(y_offset,-block[1])
width += x_offset
height += y_offset
for y in range(height):
for x in range(width):
if (x-x_offset,y-y_offset) in self.blocks:
out += 'X'
else:
out += ' '
out += '\n'
return out
class Move:
def __init__(self, piece, x, y):
self.piece = piece
self.x = x
self.y = y
def __str__(self):
return "{} at {},{}".format(self.piece.name,self.x,self.y)
def __repr__(self):
return '\n' + str(self)
def export_as_str(self):
return chr((10*self.y+self.x)+32)+ self.piece.char_rep
def import_move_as_str(string):
return Move(piece_dict[string[1]], (ord(string[0])-32)%10, ((ord(string[0])-32)//10))
class Board:
"""A 10 x 10 list of 0's and 1's, 0 means a space is clear, 1 means it's occupied. Represents a gamestate of the 10 x 10 board
On this board, the coordinates system starts with (0,0) being the top left corner, and the first coordinate being the x, and the second being the y."""
# EMPTY_BOARD = [[0] * 10 for x in range(10)]
"""Constructs a Board. Default is empty"""
def __init__(self, matrix = None, current_pieces = [], move_str = ''):
if matrix == None:
self.matrix = [[0] * 10 for x in range(10)]
else:
self.matrix = matrix
self.current_pieces = current_pieces[:]
self.move_str = ''
def copy(self):
"""Returns a new board identical to self"""
new_matrix = []
for col in self.matrix:
new_matrix.append(col[:])
return Board(new_matrix, self.current_pieces[:], self.move_str[:])
"""Returns True if you can place Piece p at the Location loc"""
def is_valid_move(self, move):
if not move.piece in self.current_pieces:
return False
for block in move.piece.blocks:
currX, currY = block[0] + move.x, block[1] + move.y
if currX not in range(10) or currY not in range(10) or self.matrix[currX][currY] == 1:
return False
return True
def get_valid_moves(self):
out = []
for piece in self.current_pieces:
for x in range(10):
for y in range(10):
move = Move(piece, x, y)
if self.is_valid_move(move):
out.append(move)
return out
def has_valid_moves(self):
for piece in self.current_pieces:
for x in range(10):
for y in range(10):
move = Move(piece, x, y)
if self.is_valid_move(move):
return True
return False
"""Returns a list of numbers containing the numbers of the rows that are currently full in the board"""
def get_full_rows(self):
output = []
for y in range(10):
found0 = False
for x in range(10):
if self.matrix[x][y] == 0:
found0 = True
break
if not found0:
output.append(y)
return output
"""Returns a list of numbers containing the numbers of the columns that are currently full in the board"""
def get_full_cols(self):
output = []
for x in range(10):
if 0 not in self.matrix[x]:
output.append(x)
return output
"""Places a piece p at a space loc (given by a tuple with two coordinates), updates board accordingly
Returns a tuple of two lists that contains rows and cols cleared respectively"""
def make_move(self, move):
if self.is_valid_move(move):
move_str = move.export_as_str()
# placing piece
for block in move.piece.blocks:
self.matrix[block[0] + move.x][block[1] + move.y] = 1
# clearing appropriate rows/cols
full_rows, full_cols = self.get_full_rows(), self.get_full_cols()
for y in full_rows:
move_str += "{}y".format(y)
for x in range(10):
self.matrix[x][y] = 0
for x in full_cols:
move_str += "{}x".format(x)
for y in range(10):
self.matrix[x][y] = 0
self.current_pieces.remove(move.piece)
self.move_str += move_str
return full_rows, full_cols
else:
print("Here is a string representing the current board:")
print(self.export_as_str())
raise InvalidMoveException('{} is not a valid move'.format(move))
"""Places a piece p at a space loc (given by a tuple with two coordinates), updates board accordingly
Returns a tuple of two lists that contains rows and cols cleared respectively"""
def force_move(self, move):
# placing piece
move_str = move.export_as_str()
for block in move.piece.blocks:
self.matrix[block[0] + move.x][block[1] + move.y] = 1
# clearing appropriate rows/cols
full_rows, full_cols = self.get_full_rows(), self.get_full_cols()
for y in full_rows:
move_str += "{}y".format(y)
for x in range(10):
self.matrix[x][y] = 0
for x in full_cols:
move_str += "{}x".format(x)
for y in range(10):
self.matrix[x][y] = 0
# self.current_pieces.remove(move.piece)
self.move_str += move_str
return full_rows, full_cols
def refresh_pieces(self):
for num in range(3):
self.current_pieces.append(random.choice(piece_list))
def export_as_str(self):
out = ''
for x in range(10):
for y in range(10):
if self.matrix[x][y] == 1:
out += '1'
else:
out += '0'
for piece in self.current_pieces:
out += piece.char_rep
return out
def import_as_str(self, input_str):
for x in range(10):
for y in range(10):
self.matrix[x][y] = int(input_str[x*10+y])
for char in input_str[100:]:
self.current_pieces.append(piece_dict[char])
def undo_move(self):
"undoes the last move based on the board's move_str"
index = len(self.move_str) -1
# print "BEFORE"
# print self.move_str
last_char = self.move_str[index]
while last_char == 'x' or last_char == 'y':
if last_char == 'x':
for y in range(10):
self.matrix[int(self.move_str[index-1])][y] = 1
elif last_char == 'y':
for x in range(10):
self.matrix[x][int(self.move_str[index-1])] = 1
index -= 2
last_char = self.move_str[index]
move = import_move_as_str(self.move_str[index - 1:index + 1])
for block in move.piece.blocks:
self.matrix[block[0] + move.x][block[1] + move.y] = 0
self.current_pieces.append(move.piece)
self.move_str = self.move_str[:index - 1]
# print "AFTER"
# print self.move_str
# def import_move_str(self, move_str):
# while x <
# curr_pair = move_str
def __str__(self):
out = " "
for i in range(10):
out += " {} ".format(i)
out += '\n'
for y in range(10):
out += ' '+'-' * 41 + '\n'
out += '{}| '.format(y)
for x in range(10):
if self.matrix[x][y] == 0:
out += ' '
else:
out += 'X'
out+= ' | '
out += '\n'
out += ' '+'-' * 41
out += "\nPieces:"
for i in range(len(self.current_pieces)):
out += "\n{})\n".format(i+1)
out += str(self.current_pieces[i])
return out
# All possible pieces
piece_list = [
Piece([(0,0)],'single', 'a'), #singleton
Piece([(0,0),(1,0)],'2 horizontal', 'b'), #2 horizontal
Piece([(0,0),(1,0),(2,0)],'3 horizontal', 'c'), #
Piece([(0,0),(1,0),(2,0),(3,0)],'4 horizontal', 'd'), #
Piece([(0,0),(1,0),(2,0),(3,0),(4,0)], '5 horizontal', 'e'), #5 horizontal
Piece([(0,0),(0,1)], '2 vertical', 'f'), #2 vertical
Piece([(0,0),(0,1),(0,2)], '3 vertical', 'g'), #3 vertical
Piece([(0,0),(0,1),(0,2),(0,3)], '4 vertical', 'h'), #4 vertical
Piece([(0,0),(0,1),(0,2),(0,3),(0,4)], '5 vertical', 'i'), #5 vertical
Piece([(0,0),(0,1),(1,1)], 'short L', 'j'), #short L
Piece([(0,0),(0,1),(-1,1)], 'short L mirrored', 'k'), #short L mirrored
Piece([(0,0),(0,1),(1,0)], 'short L flipped', 'l'), #short L flipped
Piece([(0,0),(1,1),(1,0)], 'short L mirrored flipped', 'm'), #short L mirrored flipped
Piece([(0,0),(0,1),(0,2),(1,2),(2,2)], 'Long L', 'n'), #Long L
Piece([(0,0),(0,1),(0,2),(-1,2),(-2,2)], 'Long L mirrored', 'o'), #Long L mirrored
Piece([(0,0),(0,1),(0,2),(1,0),(2,0)], 'Long L flipped', 'p'), #short L flipped
Piece([(0,0),(2,1),(2,2),(1,0),(2,0)], 'Long L mirrored flipped', 'q'), #short L mirrored flipped
Piece([(0,0),(0,1),(1,0),(1,1)], '2x2', 'r'), #2x2
Piece([(0,0),(0,1),(0,2),(1,0),(1,1),(1,2),(2,0),(2,1),(2,2)], '3x3', 's') #3x3
]
import string
piece_dict = dict(zip(string.ascii_lowercase, piece_list))
class Result:
"""Class that defines result of a run, and includes the score, the move string,
time stamp, and a list of strings that are tags that can be aggregated"""
def __init__(self, score, timestamp, length, move_str, tags):
self.score = score
self.move_str = move_str
self.tags = tags
self.timestamp = timestamp
self.length = length
def play(get_move, verbose = True):
move_num = 1
cleared_lines = 0
score = 0
board = Board()
board.refresh_pieces()
while board.has_valid_moves():
if verbose:
print("##########################################")
print ("Move {}:".format(move_num))
print ("Score {}:".format(score))
print ("Here is the current board: \n"+str(board))
if verbose:
print("------------------------------------------")
move = get_move(board)
# try:
cleared_rows, cleared_cols = board.make_move(move)
score += len(move.piece.blocks)
if verbose:
print("{}placed at {},{}".format(move.piece,move.x,move.y))
if cleared_cols != []:
for col in cleared_cols:
print("Cleared Column {}!".format(col))
if cleared_rows != []:
for row in cleared_rows:
print("Cleared Row {}!".format(row))
cleared_lines += len(cleared_rows) + len(cleared_cols)
clear_bonus = 0
for x in range(len(cleared_rows) + len(cleared_cols)):
clear_bonus += x + 1
score += clear_bonus * 10
move_num += 1
# except InvalidMoveException:
# print("That is an invalid move, please make sure you are placing the piece in a valid space")
# except:
# print("Please enter a valid piece number, enter the correct format for a move: piece_num, x, y")
if board.current_pieces == []:
board.refresh_pieces()
if verbose:
print("##########################################")
print ("Move {}:".format(move_num))
print ("Score: {}".format(score))
print ("Here is the current board: \n"+str(board))
print("Import this string to see a replay of the game:\n{}".format(board.move_str))
return move_num, cleared_lines, score, board, board.move_str
def replay(move_string, verbose = True):
move_string = move_string.strip("\n")
move_num = 1
cleared_lines = 0
score = 0
board = Board()
# board.refresh_pieces()
while move_string != "":
if verbose:
print("##########################################")
print ("Move {}:".format(move_num))
print ("Score {}:".format(score))
print ("Here is the current board: \n"+str(board))
if verbose:
print("------------------------------------------")
next_move_chars = 'xx'
while next_move_chars[1] == 'x' or next_move_chars[1] == 'y':
next_move_chars = move_string[:2]
move_string = move_string[2:]
move = import_move_as_str(next_move_chars)
# try:
cleared_rows, cleared_cols = board.force_move(move)
score += len(move.piece.blocks)
if verbose:
print("{}placed at {},{}".format(move.piece,move.x,move.y))
if cleared_cols != []:
for col in cleared_cols:
print("Cleared Column {}!".format(col))
if cleared_rows != []:
for row in cleared_rows:
print("Cleared Row {}!".format(row))
cleared_lines += len(cleared_rows) + len(cleared_cols)
clear_bonus = 0
for x in range(len(cleared_rows) + len(cleared_cols)):
clear_bonus += x + 1
score += clear_bonus * 10
move_num += 1
# except InvalidMoveException:
# print("That is an invalid move, please make sure you are placing the piece in a valid space")
# except:
# print("Please enter a valid piece number, enter the correct format for a move: piece_num, x, y")
# if board.current_pieces == []:
# board.refresh_pieces()
if verbose:
print("##########################################")
print ("Move {}:".format(move_num))
print ("Score: {}".format(score))
print ("Here is the current board: \n"+str(board))
print("Import this string to see a replay of the game:\n{}".format(board.move_str))
return move_num, cleared_lines, score, board, board.move_str
|
"""
Plotting utility functions
Author: Arkar Min Aung
"""
import matplotlib.pyplot as plt
import numpy as np
import PIL.Image as Image
import os
def plot_grid(rows, cols, figsize, image_root_path, labels, data_shape):
f, axes = plt.subplots(rows, cols, sharex=True, sharey=True, figsize=figsize)
for ax, label, name in zip(axes.ravel(), labels['Label'], labels['Common Name']):
img = np.random.choice(os.listdir(os.path.join(image_root_path, label)))
img = Image.open(os.path.join(image_root_path, label, img))
img = img.resize(data_shape)
ax.imshow(img)
ax.axis('off')
ax.set_title("{}: {}".format(label, name))
def visualize_probdist(count, pred_bayes_dist, images, labels, label_mapping, ood_mapping=None):
test_id = np.random.randint(0, high=len(labels), size=(count,))
f, axes = plt.subplots(len(test_id), 2, figsize=(10, 4*count))
f.tight_layout(h_pad=5, w_pad=0)
axs = axes.ravel()
ax_idx = 0
for tid in test_id:
current_ax = axs[ax_idx]
for i in range(5):
current_ax.hist(pred_bayes_dist[tid][:, i], alpha=0.3, label=label_mapping[i])
current_ax.axvline(np.quantile(pred_bayes_dist[tid][:, i], 0.5), color='red', linestyle=':', alpha=0.4)
current_ax.axvline(0.5, color='green', linestyle='--')
current_ax.legend()
current_ax.set_xlabel('Probability')
current_ax.set_ylabel('Count')
if ood_mapping is not None:
current_ax.title.set_text("Correct Label: {}".format(ood_mapping[labels[tid]]))
else:
current_ax.title.set_text("Correct Label: {}".format(label_mapping[labels[tid]]))
np.set_printoptions(False)
ax_idx += 1
current_ax = axs[ax_idx]
current_ax.axis('off')
current_ax.title.set_text("For Test Image Index: {}".format(tid))
current_ax.imshow(images[tid])
ax_idx += 1 |
from lxml import etree
import os
from os.path import join
path = 'C:/Users/djpillen/GitHub/vandura/Real_Masters_all'
for filename in os.listdir(path):
tree = etree.parse(join(path,filename))
coll_title = tree.xpath('//archdesc/did/unittitle')[0]
coll_title = etree.tostring(coll_title)
extptrs = tree.xpath('//extptr')
for extptr in extptrs:
if extptr.attrib['href'] == 'uarpacc':
if 'University of Michigan' not in coll_title:
print filename
|
class ManageObject:
def __init__(self, logger):
self.lg = logger
def dump(self):
self.lg.info("DUMP: " + str(self.__dict__))
|
import sqlite3
import time, requests
from pymodbus.client.sync import ModbusSerialClient as ModbusClient
import datetime, calendar
import Fault_Records
DB_NAME = "database_files/database("+(datetime.datetime.now()).strftime("%d_%m")+").db"
TOKEN = "BBFF-lvCLFYli6t042gOWnPST2SXp3sFsKS"
DEVICE_LABEL = "demo"
url = "http://industrial.api.ubidots.com/api/v1.6/devices/"
url = url + DEVICE_LABEL +"/"
header = {"X-Auth-Token":TOKEN,"Content-Type":"application/json"}
VAR_NAME = ["PPV","VPV1","PV1CURR","PV1WATT",
"VPV2","PV2CURR","PV2WATT","PAC","FAC","ENERGY_TODAY","ENERGY_TOTAL_HIGH",
"ENERGY_TOTAL_LOW","TEMPERATURE","IMP_TEMPERATURE"]
STATE = {"1":"Normal-Mode", "3":"Waiting-Mode", "0":"Fault-Mode"}
MODBUS_ERROR_WTIME = 15
def startCommunication(START, COUNT, retries = 0):
client = ModbusClient(method='rtu', port='/dev/ttyUSB0',
baudrate=9600, stopbits=1,
parity='N', bytesize=8, timeout=5)
r = retries
try:
client.connect()
reg1 = client.read_input_registers(START, COUNT)
client.close()
regArray = reg1.registers
return regArray
except Exception as e:
r -= 1
if (r<0):
return (-1)
print("[ERROR](REP) MODBUS Error (%s)" %(str(e)))
print("[INFO](REP) Retrying Communication... ")
time.sleep(MODBUS_ERROR_WTIME)
regArray = startCommunication(START, COUNT, r)
count = 0
return regArray
def postData(url, header, payload, retries = 2):
r = retries
try:
req = requests.post(url = url,headers = header,json = payload, timeout = 10)
return req.status_code
except Exception as e:
r -= 1
print("[ERROR] Retry Number : %d" %(r))
if(r <= 0):
return -1
time.sleep(5)
code = postData(url, header, payload, r)
return code
def post_into_database(name, command, data=0):
conn = sqlite3.connect(name)
cur = conn.cursor()
try:
if ("INSERT" in command):
cur.execute(command, data)
conn.commit()
cur.close()
conn.close()
return 0
elif ("SELECT" in command):
cur.execute(command)
rows = cur.fetchall()
conn.commit()
cur.close()
conn.close()
if(len(rows)==0):
return -1
return rows
else:
cur.execute(command)
conn.commit()
cur.close()
conn.close()
return 0
except Exception as e:
print("[ERROR](REP) Posting/Getting data into/from database failed...\n Error: %s" %(str(e)))
cur.close()
conn.close()
return -1
def fault_switch():
try:
req = requests.get(url = "http://industrial.api.ubidots.com/api/v1.6/devices/Demo/fault_switch/lv?token={}".format(TOKEN), timeout = 10)
if(req.json() == 1.0):
faultchk = Fault_Records.main()
## if(faultchk == -1):
## print("[ERROR](rep) Couldnt Get values Values...")
## #postData(url = url, header = header, payload = {"fault_switch":0})
if(faultchk == 0):
print("[INFO](rep) Fault Record Sent...")
postData(url = url, header = header, payload = {"fault_switch":0})
except Exception as e:
print("[ERROR](rep) Couldnt Update the fault records. (%s)"%(e))
def genTimeStamp(t):
return (int((t - datetime.datetime(1970,1,1)).total_seconds()*1000))
|
'''command line program usage example'''
import math
from unittest import TestCase
import numpy as np
import matplotlib.pyplot as plt
from prettycolors import generate_colors, generate_color
class TestGenerate(TestCase):
def test_generate_colors(self):
'''main function to run if ran as main'''
colors = 24
factor = next(i for i in range(int(math.sqrt(colors)), 0, -1) if colors % i is 0)
img = np.reshape(generate_colors(colors), (factor, colors//factor, 3))
plt.imshow(img, interpolation='none')
plt.show()
def test_generate_color(self):
'''main function to run if ran as main'''
colors = 24
factor = next(i for i in range(int(math.sqrt(colors)), 0, -1) if colors % i is 0)
img = np.reshape([generate_color(i) for i in range(colors)], (factor, colors//factor, 3))
plt.imshow(img, interpolation='none')
plt.show()
|
#!/usr/bin/python
words = raw_input().split()
spaces = []
def go_right( word ):
print ''.join( spaces ) + word
spaces.extend( [ " " for c in word[:-1] ] )
def go_left( word ):
for c in word[:-1]:
spaces.pop(-1)
print ''.join( spaces ) + word[::-1]
for i, word in enumerate( words ):
if i % 4 == 0:
if len( word[:-1] ) > len( spaces ):
go_right( word )
else:
go_left( word )
elif i % 2 == 0:
go_right( word )
else:
if i == len( words ) - 1:
for c in word[1:]:
print ''.join( spaces ) + c
else:
for c in word[1:-1]:
print ''.join( spaces ) + c
|
import math
from model.battery_capacity_fade import CapacityFade
class EnergyStorage:
def __init__(self, max_p_mw, max_e_mwh, initial_power=0.0, initial_soc=0.0, soc_history=[]):
self.max_p_mw = max_p_mw
self.max_e_mwh = max_e_mwh
self.capacity_fade = CapacityFade(soc_history)
if initial_power < 0:
self.energyStorageState = DischargingState(self, initial_soc)
self.energyStorageState.set_power(initial_power)
elif initial_power > 0:
self.energyStorageState = ChargingState(self, initial_soc)
self.energyStorageState.set_power(initial_power)
else:
self.energyStorageState = IdleState(self, initial_soc)
def capacity(self):
return self.max_e_mwh - (self.max_e_mwh * self.capacity_fade.fade)
def get_power(self):
return self.energyStorageState.power
def send_action(self, action):
if self._is_discharging(self._get_action(action)):
self.energyStorageState.discharge(self._get_action(action))
elif self._is_charging(self._get_action(action)):
self.energyStorageState.charge(self._get_action(action))
else:
self.energyStorageState.turn_off()
can_execute = True
actual_action = self.energyStorageState.power / self.max_p_mw
if self.energyStorageState.power == 0 and action != 0:
can_execute = False
self.energyStorageState.update_soc()
return actual_action, can_execute
# positive action is charging
# with eps we check overcharging or overdischarging
def _is_charging(self, action, eps=0.05):
if action > 0 and (self.energyStorageState.soc + (action / self.capacity()) - eps) < 1:
return True
else:
return False
# negative action is discharging
# with eps we check overcharging or overdischarging
def _is_discharging(self, action, eps=0.05):
if action < 0 and (self.energyStorageState.soc - (action / self.capacity()) + eps) > 0:
return True
else:
return False
def _get_action(self, action):
if abs(action) > self.max_p_mw:
return math.copysign(self.max_p_mw, action)
else:
return action
class EnergyStorageState:
def __init__(self, energy_storage, initial_soc):
self._energy_storage = energy_storage
self.soc = initial_soc
self.power = 0
def charge(self, power):
return
def discharge(self, power):
return
def turn_off(self):
return
def update_soc(self):
return
def update_capacity_fade(self):
return
def set_power(self, power):
if self._energy_storage.max_p_mw < abs(power):
self.power = self._energy_storage.max_p_mw * math.copysign(1, power)
else:
self.power = power
# 1h elapsed
def update_soc(self):
if self.soc + (self.power / self._energy_storage.capacity()) > 1:
self.soc = 1
self._energy_storage.energyStorageState = IdleState(self._energy_storage, self.soc)
elif self.soc + (self.power / self._energy_storage.capacity()) < 0:
self.soc = 0
self._energy_storage.energyStorageState = IdleState(self._energy_storage, self.soc)
else:
self.soc = self.soc + (self.power / self._energy_storage.capacity())
self._energy_storage.capacity_fade.update_capacity_fade(self.soc)
class IdleState(EnergyStorageState):
# command for charge
def charge(self, power):
if self.soc < 1:
self._energy_storage.energyStorageState = ChargingState(self._energy_storage, self.soc)
self._energy_storage.energyStorageState.set_power(power)
# command for discharge
def discharge(self, power):
if self.soc > 0:
self._energy_storage.energyStorageState = DischargingState(self._energy_storage, self.soc)
self._energy_storage.energyStorageState.set_power(power)
def turn_off(self):
self.set_power(0)
class ChargingState(EnergyStorageState):
def charge(self, power):
self._energy_storage.energyStorageState.set_power(power)
# command for charge
def discharge(self, power):
if self.soc > 0:
self._energy_storage.energyStorageState = DischargingState(self._energy_storage, self.soc)
self._energy_storage.energyStorageState.set_power(power)
else:
self.turn_off()
# turn off energy storage
def turn_off(self):
self._energy_storage.energyStorageState = IdleState(self._energy_storage, self.soc)
class DischargingState(EnergyStorageState):
# command for discharge
def charge(self, power):
if self.soc < 1:
self._energy_storage.energyStorageState = ChargingState(self._energy_storage, self.soc)
self._energy_storage.energyStorageState.set_power(power)
else:
self.turn_off()
def discharge(self, power):
self._energy_storage.energyStorageState.set_power(power)
# turn off energy storage
def turn_off(self):
self._energy_storage.energyStorageState = IdleState(self._energy_storage, self.soc)
def _test_energy_storage1():
es = EnergyStorage(max_p_mw=1, max_e_mwh=4, initial_soc=0)
check = 0
es.send_action(1)
if math.isclose(es.energyStorageState.soc, 0.25) and math.isclose(es.energyStorageState.power, 1) and type(
es.energyStorageState) == ChargingState:
check += 1
es.send_action(1)
if math.isclose(es.energyStorageState.soc, 0.5) and math.isclose(es.energyStorageState.power, 1) and type(
es.energyStorageState) == ChargingState:
check += 1
es.send_action(1)
if math.isclose(es.energyStorageState.soc, 0.75) and math.isclose(es.energyStorageState.power, 1) and type(
es.energyStorageState) == ChargingState:
check += 1
es.send_action(1)
if math.isclose(es.energyStorageState.soc, 1) and math.isclose(es.energyStorageState.power, 1) and type(
es.energyStorageState) == ChargingState:
check += 1
es.send_action(1)
if math.isclose(es.energyStorageState.soc, 1) and math.isclose(es.energyStorageState.power, 0) and type(
es.energyStorageState) == IdleState:
check += 1
es.send_action(-1)
if math.isclose(es.energyStorageState.soc, 0.75) and math.isclose(es.energyStorageState.power, -1) and type(
es.energyStorageState) == DischargingState:
check += 1
es.send_action(-1)
if math.isclose(es.energyStorageState.soc, 0.5) and math.isclose(es.energyStorageState.power, -1) and type(
es.energyStorageState) == DischargingState:
check += 1
es.send_action(-1)
if math.isclose(es.energyStorageState.soc, 0.25) and math.isclose(es.energyStorageState.power, -1) and type(
es.energyStorageState) == DischargingState:
check += 1
es.send_action(-1)
if math.isclose(es.energyStorageState.soc, 0) and math.isclose(es.energyStorageState.power, -1) and type(
es.energyStorageState) == DischargingState:
check += 1
if check == 9:
print("TEST 1: OK")
else:
print("TEST 1: FAIL")
def _test_energy_storage_2():
es = EnergyStorage(max_p_mw=1, max_e_mwh=5, initial_soc=0.83)
es.send_action(1.2)
if math.isclose(es.energyStorageState.soc, 1) and math.isclose(es.energyStorageState.power, 0) and type(
es.energyStorageState) == IdleState:
print("Test 2: OK")
else:
print("Test 2: FAIL")
def _test_energy_storage_3():
es = EnergyStorage(max_p_mw=1, max_e_mwh=5, initial_soc=0.18)
es.send_action(-1.2)
if math.isclose(es.energyStorageState.soc, 0) and math.isclose(es.energyStorageState.power, 0) and type(
es.energyStorageState) == IdleState:
print("Test 3: OK")
else:
print("Test 3: FAIL")
def _test_energy_storage_4():
es = EnergyStorage(max_p_mw=1, max_e_mwh=5, initial_soc=0.5)
es.send_action(0.5)
es.send_action(-0.5)
if math.isclose(es.energyStorageState.soc, 0.5) and math.isclose(es.energyStorageState.power, -0.5) and type(
es.energyStorageState) == DischargingState:
print("Test 4: OK")
else:
print("Test 4: FAIL")
|
from python_ottawa_transit import api
from python_ottawa_transit.api import OCTransportApi
__version__ = '0.2.0'
__all__ = ["api", "cli"]
|
#!/usr/bin/env python
# coding=utf-8
import feedparser
import json
import urllib
import urllib2
from flask import Flask
from flask import render_template
from flask import request
app = Flask(__name__)
RSS_FEEDS = {'hacking': 'https://rss.packetstormsecurity.com/news/tags/hacking',
'dos': 'https://rss.packetstormsecurity.com/files/tags/denial_of_service/',
'code_exec': 'https://rss.packetstormsecurity.com/files/tags/code_execution/',
'encryption': 'https://rss.packetstormsecurity.com/files/tags/encryption/',
'exploit': 'https://rss.packetstormsecurity.com/files/tags/exploit/'}
DEFAULTS = {'publication':'hacking',
'city':'Belize City,Belize'}
WEATHER_URL = "http://api.openweathermap.org/data/2.5/weather?q={}&units=metric&appid=97931e3b253b78925314710b42497ae0"
@app.route("/")
def home():
# get customized headlines based on user input or default
publication = request.args.get('publication')
if not publication:
publication = DEFAULTS['publication']
articles = get_news(publication)
# get customized weather based on user input or default
city = request.args.get('city')
if not city:
city = DEFAULTS['city']
weather = get_weather(city)
return render_template("home.html", articles=articles, weather=weather)
def get_news(query):
if not query or query.lower() not in RSS_FEEDS:
publication = DEFAULTS['publications']
else:
publication = query.lower()
feed = feedparser.parse(RSS_FEEDS[publication])
return feed['entries']
def get_weather(query):
query = urllib.quote(query)
url = WEATHER_URL.format(query)
data = urllib2.urlopen(url).read()
parsed = json.loads(data)
weather = None
if parsed.get("weather"):
weather = {"description":
parsed["weather"][0]["description"],
"temperature":parsed["main"]["temp"],
"city":parsed["name"]
}
return weather
if __name__ == '__main__':
app.run(port=5000,debug=True)
|
# -*- coding: utf-8 -*-
# author : anthony
# version : python 3.6
'''
主程序处理模块,处理所有用户交互的东西
'''
import time
from core import auth
from core import accounts
from core import logger
from core import accounts
from core import transaction
from core.auth import login_required
# transaction logger 交易记录器
trans_logger = logger.logger('transaction')
# access logger 访问记录器
access_logger = logger.logger('access')
# 临时帐户数据,仅将数据保存在内存中,验证登陆状态需要在每个方法里面用
user_data = {
'account_id': None, # 账户id
'is_authenticated': False, # 是否已经验证
'account_data': None # 账户信息
}
def account_info(acc_data):
print(user_data)
@login_required
def repay(acc_data):
'''
print current balance and let user repay the bill
:return:
'''
account_data = accounts.load_current_balance(acc_data['account_id'])
#for k,v in account_data.items():
# print(k,v )
current_balance = ''' --------- BALANCE INFO --------
Credit : %s
Balance: %s''' % (account_data['credit'], account_data['balance'])
print(current_balance)
back_flag = False
while not back_flag:
repay_amount = input("\033[33;1mInput repay amount ('b' exit):\033[0m").strip()
if len(repay_amount) > 0 and repay_amount.isdigit():
print('ddd 00')
new_balance = transaction.make_transaction(trans_logger, account_data, 'repay', repay_amount)
if new_balance:
print('''\033[42;1mNew Balance:%s\033[0m''' % (new_balance['balance']))
elif repay_amount == 'b':
back_flag = True
else:
print('\033[31;1m[%s] is not a valid amount, only accept integer!\033[0m' % repay_amount)
def withdraw(acc_data):
"""
打印当前余额并让用户执行提款操作
:param acc_data:
:return:
"""
account_data = accounts.load_current_balance(acc_data['account_id'])
current_balance = '''
--------- 余额信息 --------
Credit : %s
Banlance: %s
''' %(account_data['credit'],account_data['balance'])
print(current_balance)
back_flag = False
while not back_flag:
withdraw_amount = input("\033[33;1m input withdraw amount(if want back use 'b'):\033[0m").strip()
if len(withdraw_amount) > 0 and withdraw_amount.isdigit():
new_balance = transaction.make_transaction(trans_logger, account_data, 'withdraw', withdraw_amount)
if new_balance:
print('''\033[42;1mNew Balance:%s\033[0m''' % (new_balance['balance']))
elif withdraw_amount == 'b':
back_flag = True
else:
print('\033[31;1m[%s] is not a valid amount, only accept integer!\033[0m' % withdraw_amount)
def transfer(acc_data):
pass
def pay_check(acc_data):
pass
def logout(acc_data):
exit(0)
def interactive(acc_data):
"""
和用户进行交互
:param acc_data: # 账户信息
:return:
"""
# 前面加了 u 代表以unicode方式来存储
menu = u'''
--------- test Bank ------
\033[32;1m 1. 账号信息
2. 还款
3. 取款
4. 转账
5. 账单
6. 退出
\033[0m'''
menu_dic = {
'1': account_info,
'2': repay,
'3': withdraw,
'4': transfer,
'5': pay_check,
'6': logout,
}
exit_flag = False
while not exit_flag:
print(menu)
user_option = input(">>>:").strip()
if user_option in menu_dic:
print('accdata', acc_data)
menu_dic[user_option](acc_data)
else:
print("\033[31;1m选项不存在\033[0m")
def run():
"""
该函数将在程序启动时被正确调用,这里处理用户交互的东西:
:return:
"""
acc_data = auth.acc_login(user_data, access_logger)
if user_data['is_authenticated']:
user_data['account_data'] = acc_data
interactive(user_data) # 交互
|
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import KFold
import keras
from keras.models import Sequential
from keras.layers.core import Dense,Activation,Dropout
from keras.optimizers import SGD,Adam
from keras.utils import np_utils
n_classes=4
n_epoch=20
n_sample=10000
learning_rate=0.001
# Read data and true label
x_train=np.loadtxt('Xtrain_%i_3.txt' %n_sample,delimiter=' ')
l_train_tmp=np.loadtxt('Ltrain_%i_3.txt'%n_sample)
for i in range(n_sample):
l_train_tmp[i]-=1
l_train=keras.utils.to_categorical(l_train_tmp, num_classes=n_classes)
x_input,y_input= x_train,l_train
k=-1
kf = KFold(n_splits=10)
Accuracy=np.zeros((10,10))
#k-Fold cross-validation
for train_ind, test_ind in kf.split(x_input):
k+=1
x_train=x_input[train_ind]
x_test=x_input[test_ind]
y_train=y_input[train_ind]
y_test=y_input[test_ind]
# optimize the number of perceptron
for n_perceptron in range(1,11):
#Set Model
model = Sequential([Dense(n_perceptron,input_dim=3,activation="softplus"),
Dense(n_classes,input_dim=n_perceptron,activation='softmax')
])
sgd = SGD(lr=learning_rate, momentum=0.8, decay=1e-5, nesterov=True)
model.compile(optimizer=sgd,loss='categorical_crossentropy',metrics=['accuracy'])
#Begin training
converged=0
tolerance=0.001
last_score=0
while converged==0:
model.fit(x_train, y_train, epochs = n_epoch, verbose = 0)
score = model.evaluate(x_test,y_test,verbose = 0)
if np.abs(score[1] - last_score)<=tolerance:
converged=1
last_score=score[1]
print (n_perceptron,k)
#save the result
Accuracy[k,n_perceptron-1]=last_score
print Accuracy
|
from django.contrib import admin
from django.urls import path
from . import views
urlpatterns = [
path('admin/', admin.site.urls),
path('insert_new_form', views.insert_new_form),
path('delete_new_form', views.delete_new_form),
path('upadte_new_form', views.upadte_new_form),
path('enable_disable', views.enable_disable),
path('badges_data_retrival', views.badges_data_retrival),
path('retriving_data', views.retriving_data),
path('log_data_retrival', views.log_data_retrival),
path('approval_data_dropdown', views.approval_data_dropdown),
path('Business_type_data_dropdown', views.Business_type_data_dropdown),
path('SAP_data_dropdown', views.SAP_data_dropdown),
path('PO_data_dropdown', views.PO_data_dropdown),
path('Indent_data_dropdown', views.Indent_data_dropdown),
path('Requirement_data_dropdown', views.Requirement_data_dropdown),
path('plants_drop_down', views.plants_drop_down),
path('C_P_Received_by_dropdown', views.C_P_Received_by_dropdown),
path('yes_no_data_dropdown', views.yes_no_data_dropdown),
path('dummy_insert', views.dummy_insert),
]
|
from pathlib import Path
from shutil import copy
from tempfile import TemporaryDirectory
from pyshould import should, should_not
from momos import cli
EXAMPLE_PATH = Path(__file__).parent / '..' / 'examples' / 'basic'
def test_cli_include(capsys):
try:
cli.include.main(['gtest'])
except SystemExit as ex:
ex.code | should.be_equal(0)
output = capsys.readouterr()
output.out | should_not.be_empty()
def test_cli_include_invalid(capsys):
try:
cli.include.main(['invalid'])
except SystemExit as ex:
ex.code | should.be_equal(1)
output = capsys.readouterr()
output.err.strip() | should.be_equal('Error: Unknown flavor "invalid"')
def test_cli_graph():
with TemporaryDirectory() as tmp:
out_path = Path(tmp) / 'graph.png'
try:
cli.graph.main([
'--input-file',
str(EXAMPLE_PATH / 'src' / 'states.c'),
'--output-file',
str(out_path),
])
except SystemExit as ex:
ex.code | should.be_equal(0)
out_path.exists() | should.be_true()
def test_cli_analyze(capsys):
try:
cli.analyze.main([
'--input-file',
str(EXAMPLE_PATH / 'src' / 'states.c'),
])
except SystemExit as ex:
ex.code | should.be_equal(0)
output = capsys.readouterr()
stdout = output.out.strip()
stdout | should.contain_the_substring('Initial state: ')
stdout | should.contain_the_substring('States: ')
stdout | should.contain_the_substring('Transitions: ')
stdout | should.contain_the_substring('Closed graph: ')
stdout | should.contain_the_substring('Isolated states: ')
stdout | should.contain_the_substring('Single degree states: ')
def test_cli_build():
with TemporaryDirectory() as tmp:
out_path = Path(tmp) / 'test_states.cpp'
base_path = Path(tmp) / 'base_states.cpp'
copy(EXAMPLE_PATH / 'test' / 'base_states.cpp', base_path)
try:
cli.build.main([
'--input-file',
str(EXAMPLE_PATH / 'src' / 'states.c'),
'--base-file',
str(base_path),
'--output-file',
str(out_path),
])
except SystemExit as ex:
ex.code | should.be_equal(0)
out_path.exists() | should.be_true()
|
"""
Uso básicos de: Casting, If, Inputs, Búcles
"""
menu = """
Bienvenido al conversor de divisas 💸
1: Dolar américano
2: Dolar canadiense
3: Peso colombiano
Elige una opción: """
opcion = int(input(menu))
def convertir(nombre_divisa, valor_divisa):
pesos = input("Escribe la cantidad de pesos mexicanos: ")
pesos = float(pesos)
valor_dolar = valor_divisa
dolares = pesos / valor_dolar
dolares = round(dolares, 2)
print("Tienes $" + str(dolares) + " " + nombre_divisa + " con " + str(pesos) + " pesos mexicanos")
if opcion == 1:
convertir("Dolares américanos", 21.5)
elif opcion == 2:
convertir("Dolares canadienses", 16.34)
elif opcion == 3:
convertir("Pesos colombianos", 172.44)
else:
print("Opción inválida") |
import time
from locust import HttpUser, task
class QuickstartUser(HttpUser):
@task
def hello_world(self):
self.client.get("/")
@task(3)
def view_item(self):
self.client.post("/predict", json={"CHAS":{"0":0},"RM":{"0":6.575},"TAX":{"0":296.0},"PTRATIO":{"0":15.3},"B":{"0":396.9},"LSTAT":{"0":4.98}})
|
"""
value_type_extractor.py
Alex Davis, January 2019
Chris Tordi, January 2019
Script for generating a list of all the enumerations/types from Siemens point description
reports
"""
import csv
import os
import re
POINT_DESCRIPTION_DIRECTORY = "/Volumes/Seven/Downloads/Siemens Point Descriptions/"
def try_cast_int(s):
try:
return int(s)
except ValueError:
return None
def try_cast_float(s):
try:
return float(s)
except ValueError:
return None
def generate_enum_cases_map():
enum_name_cases_map = {}
for filename in os.listdir(POINT_DESCRIPTION_DIRECTORY):
if ".csv" not in filename:
continue
print(filename)
with open(os.path.join(POINT_DESCRIPTION_DIRECTORY, filename), "r") as csv_file:
reader = csv.reader(csv_file)
for row in reader:
if len(row) == 0 or (len(row) == 1 and len(row[0]) == 0):
continue
if row[0] == "Text Table:":
enum_name = row[1]
enum_name_cases_map.setdefault(enum_name, set())
for caseRow in reader:
match = re.match("\s+(\d+)\s+-\s+(\w+)", caseRow[0])
if match is None:
break
enum_name_cases_map[enum_name].add((int(match[1]), match[2]))
return enum_name_cases_map
def main():
enum_cases_map = generate_enum_cases_map()
for enum_name, enum_cases in enum_cases_map.items():
print(enum_name)
for case in enum_cases:
print("\t", case)
if __name__ == "__main__":
main()
|
def compare(a,b,equal):
dummy = "\0"
for i in range(len(a),0,-1):
if a[:i]==b[:i] and (a[:i] not in equal):
#print("equal",m)
return a[:i]
return dummy
def compare2(a,b,equal):
dummy = "\0"
for i in range(len(a)):
if a[i:]==b[i:] and (a[i:] not in e):
#print("equal",m)
return a[i:]
return dummy
temp = ""
output = ""
y = 0
for test in range(int(input())):
words = []
equal = []
w = []
e = []
y = 0
for inp in range (int(input())):
temp = input()
w.append(temp)
words.append(temp[::-1])
#print(words)
words.sort()
#w.sort
if len(words)>1:
i = 0
while i<(len(words)-1):
tempo = compare(words[i],words[i+1],equal)
if tempo != "\0" :
equal.append(tempo)
i+=2
else: i += 1
i = 0
while i<(len(w)-1):
tempo = compare2(w[i],w[i+1],e)
if tempo != "\0" :
e.append(tempo)
i+=2
else: i += 1
y = max(len(equal)*2,len(e)*2)
output += "Case #"+str(test+1)+": "+str(y)+"\n"
print(equal,words,e,w)
print(output.rstrip('\n')) |
import bagit
import os
import re
import subprocess
import shutil
import sys
from tqdm import tqdm
def repackage_aips(AIPRepackager):
doing_dir = os.path.join(AIPRepackager.aip_to_item_queue, "Doing")
for uuid in tqdm(AIPRepackager.project_metadata["uuids"], desc="Repackaging AIPs"):
name = AIPRepackager.project_metadata["uuids_to_aip_names"][uuid]
aip_dir = os.path.join(doing_dir, name)
if os.path.exists(aip_dir):
if name.endswith(".7z"):
cmd = [
"7za", "x", aip_dir, "-o{}".format(doing_dir)
]
subprocess.check_call(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
name = re.sub(r"\.7z$", "", name).strip()
aip_dir = os.path.join(doing_dir, name)
if not bagit.Bag(aip_dir).is_valid(fast=True):
print("AIP BAG {} IS NOT VALID ACCORDING TO PAYLOAD OXUM".format(aip_dir))
sys.exit()
aip_objects = os.path.join(aip_dir, "data", "objects")
repackaged_objects = os.path.join(aip_dir, "objects")
zipped_objects = os.path.join(aip_dir, "objects.zip")
zipped_metadata = os.path.join(aip_dir, "metadata.zip")
os.mkdir(repackaged_objects)
for item in os.listdir(aip_objects):
if item in ["metadata", "MetaData", "submissionDocumentation"]:
continue
os.rename(
os.path.join(aip_objects, item),
os.path.join(repackaged_objects, item)
)
# zip objects
cmd = [
"7za", "a",
"-bd",
"-tzip",
"-y",
"-mtc=on",
"-mmt=on",
zipped_objects,
repackaged_objects
]
subprocess.check_call(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
shutil.rmtree(repackaged_objects)
# zip metadata
cmd = [
"7za", "a",
"-bd",
"-tzip",
"-y",
"-mtc=on",
"-mmt=on",
"-x!" + os.path.join(name, "objects.zip"),
zipped_metadata,
aip_dir
]
subprocess.check_call(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
for item in os.listdir(aip_dir):
if item in ["objects.zip", "metadata.zip"]:
continue
elif item == "data":
shutil.rmtree(os.path.join(aip_dir, item))
elif item in ["bag-info.txt", "bagit.txt", "manifest-sha256.txt", "tagmanifest-md5.txt"]:
os.remove(os.path.join(aip_dir, item))
|
import pytest
from selenium.common.exceptions import TimeoutException
from pages.base_page import BasePage
from pages.locators import MainPageLocators, PortfolioLocators
class MainPage(BasePage):
def open_portfolio(self):
try:
self.browser.find_element(*MainPageLocators.PARENT_ROLE).click()
self.wait_until_element_displayed(*MainPageLocators.PORTFOLIO_BUTTON, 30)
except TimeoutException:
pytest.fail('Не найдена ссылка на портфолио')
try:
self.wait_until_element_is_clickable(*MainPageLocators.PORTFOLIO_BUTTON, 30)
self.browser.find_element(*MainPageLocators.PORTFOLIO_BUTTON).click()
self.wait_until_element_displayed(*PortfolioLocators.USER_INFO_BUTTON, 30)
except TimeoutException:
pytest.fail('Не удалось перейти в портфолио') |
##########################################################################
#
# Copyright (c) 2020, Cinesite VFX Ltd. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of Cinesite VFX Ltd. nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import imath
import IECore
import IECoreScene
import GafferUITest
import GafferScene
# Needs to be imported to register the visualisers
import GafferSceneUI
class VisualiserTest( GafferUITest.TestCase ) :
def testCameraVisualiserFramingBound( self ) :
# Certain visualisations should be able to opt-out of affecting
# a locations bounds (generally to prevent 'large' visualisations
# from breaking 'f' to fit to the viewer).
renderer = GafferScene.Private.IECoreScenePreview.Renderer.create(
"OpenGL",
GafferScene.Private.IECoreScenePreview.Renderer.RenderType.Interactive
)
camera = IECoreScene.Camera()
camera.setProjection( "perspective" )
# The expected bound is the size of the green camera body visualisation.
# We want to make sure the renderer bound it doesn't contain the frustum
# visualisation which extends to the far clipping plane.
expectedBodyBound = imath.Box3f( imath.V3f( -0.85, -0.85, -0.75 ), imath.V3f( 0.85, 0.85, 1.8 ) )
# Make sure the far plane is bigger than the camera body visualisation
clippingPlanes = camera.getClippingPlanes()
self.assertTrue( clippingPlanes[1] > abs(expectedBodyBound.min()[2]) )
_ = renderer.object(
"/camera",
camera,
renderer.attributes( IECore.CompoundObject() )
)
cameraBound = renderer.command( "gl:queryBound", {} )
self.assertEqual( cameraBound, expectedBodyBound )
if __name__ == "__main__":
unittest.main()
|
from django import forms
from django.contrib.auth.models import User
from django.forms import ModelChoiceField
from .models import Profile, Project, Project_Avaliacao
class MyModelChoiceField(ModelChoiceField):
def label_from_instance(self, obj):
return "{}".format(obj.user.username)
class LoginForm(forms.Form):
username = forms.CharField(
label="Username",
max_length=60,
widget=forms.TextInput(
attrs={
'class': 'form-control',
'placeholder': 'Username'
})
)
password = forms.CharField(
label="Password",
widget=forms.PasswordInput(
attrs={
'class': 'form-control',
'placeholder': 'Password'
})
)
class RegisterForm(forms.Form):
CHOICES = (('1', 'Interno',), ('0', 'Externo',))
user_type = forms.ChoiceField(
choices=CHOICES,
widget=forms.RadioSelect(
attrs={
'class': 'form-check-input',
})
)
password = forms.CharField(
label="Password",
widget=forms.PasswordInput(
attrs={
'class': 'form-control',
'placeholder': 'Password'
})
)
class UserForm(forms.ModelForm):
class Meta:
model = User
fields = ['username', 'first_name', 'last_name', 'email']
username = forms.CharField(
label="Username",
widget=forms.TextInput(
attrs={
'class': 'form-control mx-1',
'placeholder': 'Username'
})
)
first_name = forms.CharField(
label="Nome",
widget=forms.TextInput(
attrs={
'class': 'form-control mx-1',
'placeholder': 'Nome'
})
)
last_name = forms.CharField(
label="Apelido",
widget=forms.TextInput(
attrs={
'class': 'form-control mx-1',
'placeholder': 'Apelido'
})
)
email = forms.CharField(
label="Email",
widget=forms.TextInput(
attrs={
'class': 'form-control',
'placeholder': 'Email'
})
)
class ProfileForm(forms.ModelForm):
class Meta:
model = Profile
fields = ['age', 'area_de_interesse', 'descricao', 'profissao', 'empresa', 'gender', 'status']
age = forms.IntegerField(
label="Idade",
widget=forms.NumberInput(
attrs={
'class': 'form-control mx-1',
'placeholder': 'Idade'
}),
required=False
)
area_de_interesse = forms.CharField(
label="Area de Interesse",
widget=forms.Textarea(
attrs={
'class': 'form-control',
'placeholder': 'Area de Interesse'
}),
required=False
)
descricao = forms.CharField(
label="Descrição",
widget=forms.Textarea(
attrs={
'class': 'form-control',
'placeholder': 'Descrição'
}),
required=False
)
profissao = forms.CharField(
label="Profissão",
widget=forms.TextInput(
attrs={
'class': 'form-control',
'placeholder': 'Profissão'
}),
required=False
)
empresa = forms.CharField(
label="Empresa",
widget=forms.TextInput(
attrs={
'class': 'form-control',
'placeholder': 'Empresa'
}),
required=False
)
genderChoices = (('1', 'Male',), ('0', 'Female',))
gender = forms.ChoiceField(
choices=genderChoices,
widget=forms.RadioSelect(
attrs={
'class': 'form-check-input',
'placeholder': 'Gender'
}),
required=False
)
statusChoices = (('1', 'Interno',), ('0', 'Externo',))
status = forms.ChoiceField(
choices=statusChoices,
widget=forms.RadioSelect(
attrs={
'class': 'form-check-input',
'placeholder': 'Status'
})
)
morada = forms.CharField(
label="Morada",
widget=forms.TextInput(
attrs={
'class': 'form-control',
'placeholder': 'Rua da Bananas nº4 ...'
})
)
class ProjectForm(forms.ModelForm):
class Meta:
model = Project
fields = ['name_text', 'empresa', 'descricao', 'responsavel']
name_text = forms.CharField(
label="Nome",
widget=forms.TextInput(
attrs={
'class': 'form-control mx-1',
'placeholder': 'Nome'
})
)
empresa = forms.CharField(
label="Empresa",
widget=forms.TextInput(
attrs={
'class': 'form-control mx-1',
'placeholder': 'Empresa'
})
)
descricao = forms.CharField(
label="Descricao",
widget=forms.Textarea(
attrs={
'class': 'form-control mx-1',
'placeholder': 'Descricao'
})
)
users = forms.CharField(
label="Users",
widget=forms.Textarea(
attrs={
'class': 'form-control mx-1',
'placeholder': 'Users Atribuidos'
})
)
responsavel = MyModelChoiceField(queryset=Profile.objects.all(), empty_label="(Choose field)", to_field_name="user")
|
from django.db import models
from users.models import User
# Create your models here.
class Notification(models.Model):
"""通知"""
content = models.CharField(max_length=200)
is_checked = models.BooleanField()
user = models.ForeignKey(User, on_delete=models.CASCADE)
created_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return '[Alert] ' + self.content
class ProjectPhase(models.Model):
"""案件のフェーズ"""
phase = models.CharField(max_length=20)
def __str__(self):
return self.phase
class Project(models.Model):
"""案件"""
name = models.CharField(max_length=200)
start_date = models.DateField()
end_date = models.DateField()
# start_date = models.CharField(max_length=10)
# end_date = models.CharField(max_length=10)
phase = models.ForeignKey(ProjectPhase, on_delete=models.PROTECT)
user = models.ManyToManyField(
User,
through="ProjectMember",
)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return self.name + ' [' + self.start_date.strftime('%Y-%m-%d')[:-3] + '-' + self.end_date.strftime('%Y-%m-%d')[:-3] + '] '
class ProjectMember(models.Model):
"""案件×メンバー"""
project = models.ForeignKey(Project, on_delete=models.CASCADE)
user = models.ForeignKey(User, on_delete=models.CASCADE)
created_at = models.DateTimeField(auto_now_add=True)
class Meta:
constraints = [
models.UniqueConstraint(
fields=["project", "user"],
name="project_member_unique"
),
]
def __str__(self):
return self.project.name + ' | ' + self.user.username
class MonthlyWorkingTime(models.Model):
"""月次の稼働時間の予定と実績"""
project_member = models.ForeignKey(ProjectMember, default=0, on_delete=models.CASCADE)
target_month = models.DateField()
planed_working_time = models.IntegerField()
actual_working_time = models.IntegerField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return self.project_member.project.name + ' | ' + self.project_member.user.username + ' | ' + self.target_month.strftime('%Y-%m-%d')[:-3]
class DailyWorkingTime(models.Model):
"""日次の稼働時間の実績"""
project_member = models.ForeignKey(ProjectMember, default=0, on_delete=models.CASCADE)
target_day = models.DateField()
target_month = models.DateField()
working_time = models.IntegerField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return self.project_member + self.target_month.strftime('%Y-%m-%d')[:-3] + self.target_day.strftime('%Y-%m-%d')
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import time
import sys
import io
import re
import math
import itertools
#sys.stdin=file('input.txt')
#sys.stdout=file('output.txt','w')
#10**9+7
mod=1000000007
#start = time.clock()
n=int(raw_input())
l=map(int, raw_input().split())
d={}
for i in l:
if i in d:
d[i]+=1
else:
d[i]=1
l.sort()
if len(d)!=1:
print l[-1]-l[0],d[l[-1]]*d[l[0]]
else:
print 0,max(1,d[l[0]]*(d[l[0]]-1)/2)
#end = time.clock()
#print end - start
|
import werkzeug
werkzeug.cached_property = werkzeug.utils.cached_property
# Import libraries
from keras.preprocessing.image import img_to_array
from keras.models import load_model
from flask_restplus import Api, Resource, fields
from flask import Flask, request, jsonify
import numpy as np
from werkzeug.datastructures import FileStorage
from PIL import Image
import tensorflow as tf
# Define app and API
app = Flask(__name__)
api = Api(app, version='1.0', title='MNIST Classification', description='CNN for Mnist')
ns = api.namespace('cnn', description='Methods')
# Define parser
single_parser = api.parser()
single_parser.add_argument('file', location='files', type=FileStorage, required=True)
# Load model and define tensorflow graph
model = load_model('my_model.h5')
graph = tf.get_default_graph()
@ns.route('/prediction')
class CNNPrediction(Resource):
"""Uploads your data to the CNN"""
@api.doc(parser=single_parser, description='Upload an mnist image')
def post(self):
# Parse args
args = single_parser.parse_args()
image_file = args.file
# Save file and open it again
image_file.save('mnist.png')
img = Image.open('mnist.png')
# Resize and convert to array
image_red = img.resize((28, 28))
image = img_to_array(image_red)
print(f"Image shape: {image.shape}")
# Reshape and scale the image array
x = image.reshape(1, 28, 28, 1)
x = x/255
# Make the prediction
with graph.as_default():
out = model.predict(x)
# Round
print(f"Prediction: {out[0]}")
print(f"Argmax of prediction: {np.argmax(out[0])}")
r = np.argmax(out[0])
# Return prediction as json
return {'prediction': str(r)}
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8000) |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from modules import TVResourceTemplate, research
import re, logging
logger = logging.getLogger(__name__)
class TVResource(TVResourceTemplate):
baseurl = 'http://tv-only.org'
def __init__(self, baseurl=baseurl):
super(TVResource, self).__init__(baseurl)
self.cost = 13
def _get_channels(self):
channels = []
html = self.get_url(self.baseurl)
soup = self._get_soup(html.decode('utf8','ignore'))
for index, l in enumerate(soup.findAll('li',{'class':'item_tv'})):
try:
title = l.find('img').get('alt').replace(u'смотреть онлайн','').strip()
logo = self.baseurl + l.find('img').get('src')
link = l.find('a').get('href')
if link:
channels.append(dict(
title=unicode(title),
link=unicode(link),
logo=unicode(logo)
))
logger.info('get channel %s', link)
except AttributeError:
pass
except Exception as e:
logger.error('%s:%s - %s', self.baseurl, index, repr(e)[:50])
return channels
def _get_stream(self, channel):
html = self.get_url(channel['link']).decode('utf8','ignore')
stream = research("var src = \"(.*?)\"",html)
return stream
|
#!/usr/bin/env python
"""
Print the GUANO metadata found in a file or files.
usage::
$> guano_dump.py [--strict] WAVFILE...
"""
from __future__ import print_function
import sys
import os
import os.path
from guano import GuanoFile
def dump(fname, strict=False):
print()
print(fname)
gfile = GuanoFile(fname, strict=strict)
print(gfile.to_string())
if __name__ == '__main__':
from glob import glob
import logging
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s\t%(levelname)s\t%(message)s')
if len(sys.argv) < 2:
print('usage: %s [--strict] FILE...' % os.path.basename(sys.argv[0]), file=sys.stderr)
sys.exit(2)
if os.name == 'nt' and '*' in sys.argv[1]:
fnames = glob(sys.argv[1])
else:
fnames = sys.argv[1:]
strict = False
if '--strict' in fnames:
fnames.remove('--strict')
strict = True
for fname in fnames:
if os.path.isdir(fname):
for subfname in glob(os.path.join(fname, '*.[Ww][Aa][Vv]')):
dump(subfname, strict=strict)
else:
dump(fname, strict=strict)
|
'''
print("hello world!world!")
print("hello world! ")
print("------------------------------")
print(999*123456789)
print("------------------------------")
type(2.333)
print(type(2.333))
Valuetype =type(2333)
print(Valuetype)
print("------------------------------")
print(1==1)
print("------------------------------")
'''
#元组
'''
yuanzu = (2,3,5,"哈哈哈哈",False,None,2.333,"xixi")
print(yuanzu)
'''
# 统计值数量
'''
print(count(yuanzu))
'''
#下标(索引)
# print(type(yuanzu[6]))
'''
X = 1
Y = 2
print(X)
print(type(Y))
'''
# x = 'wahha'
# Y = "娃哈哈"
# print(type(x))
# print(type(Y))
print("---------------------------------------------")
print("---------------------------------------------")
print("---------------------------------------------")
# 数组
# shuzu = [2,3,5,"哈哈哈哈",False,None,2.333,2333,2333,"xixi","xixi"]
'''
shuzu.remove("xixi")
shuzu.remove(2333)
print(shuzu)
'''
'''
shuzu = [5,2,"haha"]
shuzu.pop(1)
print(shuzu)
'''
'''
shuzu = [2,3,5,"哈哈哈哈",False,None,2.333,2333,2333,"xixi","xixi"]
y = tuple(yuanzu)
print(y)
print(type(y))
'''
'''
xx = input("请输入你的年龄:")
print(xx)
print(int(xx)+6)
'''
'''
name = input("请输入你的名字:")
age = input("请输入你的年龄:")
ai = input("请输入你的爱好:")
print("你好!{name1},我今年{age1},我的爱好是{aihao}。\
我今年{age1},我的爱好是{aihao}".format(name1=name,age1=age,aihao=ai))
'''
'''
a = int(input("请输入第一个数字:"))
b = int(input("请输入第二个数字:"))
if a == b:
print("第一个数字比第二个数字大!")
elif a == b:
print("第一个数字和第二个数字一样大!")
else:
print("第一个数字比第二个数字小!")
'''
'''
输入一个数字,如果大于60就放到hlist数组中,如果小于60就放到lowlist中。
'''
'''
a = range(20)
print(a)
'''
'''
# 生成20个值得数组
a = list(range(20))
print(a)
'''
'''
# 生成20个值得元组
a = tuple(range(20))
print(a)
'''
'''
hlist = "你好,现在是北京时间0:30,你会成功的!"
for i in hlist:
print(i)
'''
'''
for i in range(100):
print(i)
'''
'''
hlist = [1,2,3,4,5,6]
highnum = []
lownum = []
for i in hlist:
if i > 4:
highnum.append(i)
else:
lownum.append(i)
print('大于4的数字有{}个 ,小于4的数字有{}个'.format(len(highnum),len(lownum)))
'''
'''
hlist = [1,2,3,4,5,6]
highnum = []
lownum = []
for i in hlist:
if i > 4:
highnum.append(i)
else:
lownum.append(i)
print("大于4的数字有{}个,小于4的数字有{}个".format(len(highnum),len(lownum)))
'''
'''
for i in range(1,10):
for j in range(1,i+1): #1是遍历起始值;i+1是因为,i索取为i的前一位值(即成了i-1),故此i要+1
print('{}X{}={}'.format(i,j,i*j),end=' ')
print("")
'''
'''
for i in range(1,4):
for j in range(1,i+1):
print('{}X{}={}'.format(i,j,i*j))
print("---------------------------------------------------")
for i in range(1,4):
for j in range(i+1):
print('{}X{}={}'.format(i,j,i*j))
print("---------------------------------------------------")
for i in range(1,4):
for j in range(1,i+1): #1是遍历起始值;i+1是因为,i索取为i的前一位值(即i-1),故此i要+1
print('{}X{}={}'.format(i,j,i*j))
'''
a = [1,2,3]
print(a[0])
|
MOD = 1000000007
ii = lambda : int(input())
si = lambda : input()
dgl = lambda : list(map(int, input()))
f = lambda : map(int, input().split())
il = lambda : list(map(int, input().split()))
ls = lambda : list(input())
#inplace Heap sort
'''insert and removeMin functions of priority queue
were used with a little modification'''
def inplcehpsrt(pq,sz):
for i in range(1,sz):
CI = i
while CI > 0:
PI = (CI - 1) // 2
if pq[PI] > pq[CI]:
pq[CI], pq[PI] = pq[PI], pq[CI]
else:
break
CI = PI
size=sz
for i in range(sz):
pq[size-1],pq[0]=pq[0],pq[size-1]
size-=1
pi=0
lci,rci=2*pi+1,2*pi+2
while lci<size:
mi=pi
if pq[mi]>pq[lci]:
mi=lci
if rci<size and pq[rci]<pq[mi]:
mi=rci
if mi==pi:
break
pq[mi],pq[pi]=pq[pi],pq[mi]
pi=mi
lci, rci = 2 * pi + 1, 2 * pi + 2
print(*pq)
l=[1,2,3,4,5,6]
inplcehpsrt(l,6)
|
# Guess the secret number
# 4/20/2018
# CTI-110 P5HW2-Random Number Guessing Game
# Lafayette King
#
# use the random module
import random
def main():
guess = 1
again = "Y"
secret_number = random.randint(1, 100)
guess = 1
guesses = 1
print("Guess the secret number! ")
while guess != secret_number:
guess =int(input("is it... "))
guesses = guesses + 1
if guess == secret_number:
print("You are correct !! , Great Job!!")
print("It only took you",guesses, "guesses!")
elif guess > secret_number:
print("Too high, try again.")
elif guess < secret_number:
print("You are too low , try again!!")
if guesses > 10:
print("Nice try, ")
again=input("Play again? (y = yes):")
main()
|
N, T = [int(_) for _ in input().split()]
CT = [[int(_) for _ in input().split()] for i in range(N)]
cs = [c for c, t in CT if t <= T]
if cs:
print(min(cs))
else:
print("TLE")
|
import os
from dataclasses import dataclass
from dotenv import load_dotenv
from fastapi.security import OAuth2PasswordBearer
from passlib.context import CryptContext
load_dotenv()
@dataclass
class Settings:
SECRET_KEY = os.getenv("SECRET_KEY", "mysecret")
ALGORITHM = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES = 30
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="token")
settings = Settings()
|
# dict of states and their abbreviation
states = {
'Oregon' : 'OR',
'California' : 'CA',
'Florida' : 'FL',
'New York' : 'NY'}
# dict of states and their cities
cities = {
'CA' : 'San Fran',
'FL' : 'Orlando'}
# add more to cities
cities['NY'] = 'New York'
cities['OR'] = 'Portland'
# print some cities
print '-' *10
print 'NY State has: ', cities['NY']
print 'OR state has: ', cities['OR']
# print some states
print '-' *10
print 'California\'s abbreviation is: ', states['California']
print 'Oregon\'s abbreviation is: ', states['Oregon']
# print cities from the state name
print '-' *10
print 'California has: ', cities[states['California']]
# print every abbreviation
print '-' *10
for state,abbre in states.items():
print '%s is abbreviated %s' %(state,abbre)
# print every city
print '-' *10
for abbre,city in cities.items():
print '%s has %s' %(abbre,city)
print '-' *10
for state,abbre in states.items():
print '%s is abbreviated %s and has %s' %(state,abbre,cities[abbre])
state = states.get('Texas')
if not state:
print 'No Texas'
city = cities.get('TX', 'Does not exist')
print 'The city in \'TX\' is : %s' % city |
hang = [0]
card = [0]
def hang_card(hang,card):
hang_len = len(hang)
#print hang
#print hang_len
if hang[-1] <= card:
while hang[-1] < card:
hang.append(1000000/(hang_len + 1) + hang[-1] + 1)
hang_len += 1
return hang_len - 1
else:
for i in range(hang_len):
if hang[i] >= card:
return i
def getline(card):
try:
card[0] = int(float(raw_input())*1000000)
return True
except EOFError:
return False
while getline(card):
if card[0] == 0:
continue
#print card[0]
print hang_card(hang, card[0]), "card(s)" |
from django.http import Http404
from django.shortcuts import render
from django.contrib.auth.decorators import login_required, permission_required
from .models import *
@permission_required('home.view_news', login_url="/login/")
def yantar(request):
admin = Adminyantar.objects.all()
if request.method == 'POST':
post = request.POST
if ('processing1' in post):
certres = Certificatestore.objects.filter(fio=post['processing1']).order_by("commentary")
elif ('control1' in post):
certres = Certificatestore.objects.filter(fio=post['control1']).order_by("commentary")
elif ('processing2' in post):
certres = Certificatestore.objects.filter(fio=post['processing2']).order_by("commentary")
elif ('control2' in post):
certres = Certificatestore.objects.filter(fio=post['control2']).order_by("commentary")
else:
certres = Certificatestore.objects.order_by("commentary")
all_date = dict()
all_date['admin'] = admin
all_date['certres'] = certres
return render(request, 'yantar/cert.html', all_date)
def details(request, id):
try:
certres = Certificatestore.objects.get(pk=id)
except Certificatestore.DoesNotExist:
raise Http404("Certificate does not exist")
return render(request, 'yantar/details.html', {'certres': certres}) |
while True:
sentence = str(input('\nEnter any word or sentence:'))
while True:
character=str(input("Of which single charcter count you want?:"))
character=character.lower()
if len(character) !=1:
print("\nPlease enter one letter only.")
else:
break
c_character=(sentence.count(character))
print("\nThe character",'"' + character + '"',"appeared",c_character,"times in given word or sentence.")
while True:
Repeat=input("\nDo you want to calculate again?\n\nYes or No:")
Repeat=Repeat.lower()
if Repeat not in ["yes","y","no","n"]:
print("\nPlease select correct option")
else:
break
if Repeat in ["yes","y"]:
continue
else:
if Repeat in ["no","n"]:
print("\n-----Thank you for using-----")
input()
break |
from django import forms
from .models import *
import datetime
from django.core.exceptions import ValidationError
class CustomerForm(forms.ModelForm):
def __init__(self , *args , **kwargs):
super ( CustomerForm , self ).__init__ ( *args , **kwargs )
name = forms.CharField(error_messages={'required': 'Name is empty.'})
email = forms.EmailField(error_messages={'required': 'Email is empty.'})
salary = forms.DecimalField(error_messages={'required': 'Salary is empty.', 'invalid': 'Invalid Salary.'})
gender = forms.ChoiceField(error_messages={'required': 'Gender is empty.'})
imageCustomer = forms.FileField(error_messages={'required': 'Image is empty.'})
email = forms.EmailField(error_messages={'invalid': 'Invalid Email.'})
birthday = forms.DateField(initial=date.today().replace(year=date.today().year - 18).strftime("%d/%m/%Y"), error_messages={'required': 'Birthday is empty.', 'invalid': 'Invalid Birthday.'})
birthday.widget.attrs['readonly'] = True
def clean_salary(self):
salary = self.cleaned_data['salary']
if salary < 0:
raise forms.ValidationError("Salary can not be less than zero.")
return salary
def clean_birthday(self):
birthday = self.cleaned_data['birthday']
if(birthday != ""):
today_18 = datetime.date ( year=datetime.datetime.now ( ).year - 18 , month=datetime.datetime.now ( ).month ,
day=datetime.datetime.now ( ).day )
birthday = self.cleaned_data['birthday']
if not birthday <= today_18:
raise forms.ValidationError("Customer must be 18 years or older.")
return birthday
else:
raise forms.ValidationError("Birthday is empty.")
class Meta:
model = Customer
fields = "__all__"
class CustomerCarForm(forms.ModelForm):
def __init__(self , *args , **kwargs):
super ( CustomerCarForm , self ).__init__ ( *args , **kwargs )
id = forms.CharField(error_messages={'required': 'ID is empty.'})
id = forms.IntegerField ( widget=forms.TextInput ( attrs={'readonly': 'readonly'} ) )
name = forms.CharField ( widget=forms.TextInput ( attrs={'readonly': 'readonly'} ) )
class Meta:
model = Customer
fields = "__all__"
class CarForm(forms.ModelForm):
model = forms.CharField(error_messages={'required': 'Model is empty.'})
plate = forms.RegexField (initial="", regex=r'^[A-Z]{3}\d{4}$' , max_length=7, error_messages={'invalid': 'Invalid plate. (EX: AAA1111)', 'required': 'Plate is empty.'})
yearCar = forms.RegexField (initial="", regex=r'^\d{4}$' , min_length=4 , max_length=4, error_messages={'invalid': "Invalid car's year.", 'required': 'Year is empty.'} )
marketVal = forms.DecimalField(initial="", max_digits=12 , decimal_places=2, error_messages={'required': 'Market Value is empty.', 'invalid':'Invalid Market value.'})
imageCar = forms.ImageField(error_messages={'required': 'Car image is empty.'})
description = forms.CharField(error_messages={'required': 'Description is empty.'})
class Meta:
model = Car
fields = "__all__" |
import json
from tqdm import tqdm
from collections import Counter
import operator
import decimal
import sklearn.model_selection
from nltk.tokenize import RegexpTokenizer
from nltk.stem.snowball import SnowballStemmer
import numpy as np
if __name__ == '__main__':
file_data_set = {}
data_set = []
classes = []
tokenizer = RegexpTokenizer(r'\w+')
snowball = SnowballStemmer(language='english')
with open("../../wiki_dump.json", "r", encoding="utf8") as my_file_read:
file_data_set = json.load(my_file_read)
my_file_read.close()
""" Fill classes. """
for dict in file_data_set:
classes.append(dict["category"])
for dict in tqdm(file_data_set): # dict includes (category:"", subcategories:[])
for subcat in dict['subcategories']: # subcat is dictionary (name:"", articles:[])
for article in subcat['articles']: # article is string
label = dict["category"]
tokenized_msg = tokenizer.tokenize(article)
stemmed_msg = [snowball.stem(token) for token in tokenized_msg]
data_set.append((label, stemmed_msg))
train_set, test_set = sklearn.model_selection.train_test_split(data_set, train_size=0.8)
print(f'\nTrain len: {len(train_set)}. Test len: {len(test_set)} \n')
""" Prepare train dictionaries containing counted words. """
train_dicts = []
for _ in range(len(classes)):
train_dicts.append(Counter([]))
""" Prepare counter dictionary for every word which occurred in class. """
train_class_cnts = Counter([])
""" Train naive Bayes qualifier dictionaries. """
for i in tqdm(range(len(train_set))):
for j in range(len(classes)):
if train_set[i][0] == classes[j]:
for k in range(len(train_set[i][1])):
train_class_cnts[train_set[i][0]] += 1
train_dicts[j][train_set[i][1][k]] += 1
break
""" Show size categories percentage. """
# print("\n\n### TRAINING ###")
# for i in range(len(classes)):
# class_size_perc = (train_class_cnts[classes[i]] / sum(train_class_cnts.values())) * 100
# print("{:.3f}% of messages was {}.".format(class_size_perc, classes[i].upper()))
""" ------------------------------------------------------------------------------------------------ """
alpha_values = [x / 100000.0 for x in range(1, 100, 1)]
correctness = []
for alpha in alpha_values:
print("Alpha value: " + str(alpha))
""" Prepare qualifier variables. """
#alpha = 0.1
test_class_cnts = Counter([])
corr_ans = 0
all_words = 0
for i in range(len(classes)):
all_words += sum(train_dicts[i].values())
""" Qualify test messages. """
for i in range(len(test_set)):
prob_vector = np.zeros(len(classes))
for j in range(len(classes)):
for word in test_set[i][1]:
prob_vector[j] += train_dicts[j][word]
words_number_class = sum(train_dicts[j].values())
prob_vector[j] += alpha
prob_vector[j] = prob_vector[j] / (words_number_class + (all_words * alpha))
max_class_index, prob = max(enumerate(prob_vector), key=operator.itemgetter(1))
if test_set[i][0] == classes[max_class_index]:
corr_ans += 1
test_class_cnts[test_set[i][0]] += 1
# """ Show size categories percentage. """
# print("\n### TEST ###")
# for i in range(len(classes)):
# class_size_perc = (test_class_cnts[classes[i]] / sum(test_class_cnts.values())) * 100
# print("{:.3f}% of messages was {}.".format(class_size_perc, classes[i].upper()))
#
# print("\n### CORRECTNESS ###")
# print("{:.3f}% of messages was qualified correctly.".format((corr_ans / len(test_set)) * 100))
correctness.append((corr_ans / len(test_set)) * 100)
# Find the best alpha.
max_corr_index, corr = max(enumerate(correctness), key=operator.itemgetter(1))
print("Maximum correctness: " + str(corr) +"% for alpha: " + str(alpha_values[max_corr_index]))
|
def soma_elementos(lista):
num = 0 # Numero para iniciar a contagem
for i in lista:
# Vai somar o número (num) padrão mais o i da lista.
num += i
return num |
import logging
from .logger import get_handler, get_logger, \
LOG_NOSET, LOG_DEBUG, LOG_INFO, \
LOG_ERROR, LOG_WARNING, LOG_CRITICAL, \
set_name, set_level, add_console_handler, \
add_loghub_handler, debug, info, warning, \
error, critical, oss, init_logger
logging.getLogger = get_logger
|
#! usr/bin/env python
from ryu.ofproto.ether import ETH_TYPE_IP, ETH_TYPE_ARP,ETH_TYPE_LLDP,ETH_TYPE_MPLS,ETH_TYPE_IPV6
from ryu.ofproto.inet import IPPROTO_ICMP, IPPROTO_TCP, IPPROTO_UDP,IPPROTO_SCTP
from flow_addition import FlowAdd
import logging
class Construct():
"""
Constructs Match object from supplied field.
The default value of all parameters is don't-care-match-all wildcard.
If no parameters are given, the returned match matches everything.
"""
def __init__(self):
logging.info("Rule will be constructed")
def add_flow(self,datapath,actions, priority = 1000 ,in_port=None, in_phy_port=None, metadata=None, eth_dst=None, eth_src=None, eth_type=None,
vlan_vid=None, vlan_pcp=None, ip_dscp=None, ip_ecn=None, ip_proto=None, ipv4_src=None, ipv4_dst=None,
tcp_src=None, tcp_dst=None, udp_src=None, udp_dst=None, sctp_src=None, sctp_dst=None, icmpv4_type=None,
icmpv4_code=None, arp_op=None, arp_spa=None, arp_tpa=None, arp_sha=None, arp_tha=None,
ipv6_src=None, ipv6_dst=None, ipv6_flabel=None, icmpv6_type=None, icmpv6_code=None,
ipv6_nd_target=None, ipv6_nd_sll=None, ipv6_nd_tll=None, mpls_label=None, mpls_tc=None, mpls_bos=None,
pbb_isid=None, tunnel_id=None, ipv6_exthdr=None):
assert (datapath is not None),"Datapath Object is Not set. "
assert (actions is not None),"Actions Object is Not set. "
parser = datapath.ofproto_parser
""" please check for actions that where it fits and what is its advantage """
matchflow = FlowAdd()
match = parser.OFPMatch()
if (eth_type is not None):
if (eth_type == ETH_TYPE_IP):
if (ip_proto is not None):
# For ICMP flow rules.
if (ip_proto == IPPROTO_ICMP):
match = parser.OFPMatch(in_port = in_port, eth_type = eth_type, ip_proto= ip_proto,
icmpv4_type = icmpv4_type, ipv4_src = ipv4_src, ipv4_dst = ipv4_dst)
elif(ip_proto == IPPROTO_TCP):
match = parser.OFPMatch(in_port = in_port, eth_type = eth_type, ip_proto= ip_proto,
ipv4_src = ipv4_src, ipv4_dst = ipv4_dst,
tcp_src = tcp_src, tcp_dst = tcp_dst)
elif(ip_proto == IPPROTO_UDP):
match = parser.OFPMatch(in_port = in_port, eth_type = eth_type, ip_proto= ip_proto,
ipv4_src = ipv4_src, ipv4_dst = ipv4_dst,
udp_src = udp_src, udp_dst = udp_dst)
elif (ip_proto == IPPROTO_SCTP):
match = parser.OFPMatch(in_port = in_port, eth_type = eth_type,
eth_src=eth_src, eth_dst=eth_dst,
ip_proto= ip_proto)
else:
# default case
logging.info("Please check OFPMatch--> ip_proto parameter in order to continue.")
else:
logging.info("Please set OFPMatch--> ip_proto parameter in order to continue.")
elif (eth_type == ETH_TYPE_ARP):
match = parser.OFPMatch(in_port = in_port, eth_type = eth_type,eth_src=eth_src,eth_dst=eth_dst)
elif (eth_type == ETH_TYPE_LLDP):
match = parser.OFPMatch(eth_type = eth_type)
elif (eth_type == ETH_TYPE_IPV6):
match = parser.OFPMatch(in_port = in_port, eth_type = eth_type,eth_src=eth_src,eth_dst=eth_dst)
elif (eth_type == ETH_TYPE_MPLS):
match = parser.OFPMatch(in_port = in_port, eth_type = eth_type,eth_src=eth_src,eth_dst=eth_dst)
else:
logging.info("Please set OFPMatch--> eth_type parameter in order to continue.")
#Finally, add this match to flow table entry.
if match is not None:
matchflow.add_flow(datapath, priority, match, actions)
else:
logging.info("Sorry, no matching rule found or added.")
|
#!/usr/bin/python
from random import *
prob = "keys"
cases = [
(100,200),
(100,200),
(100,200),
(100,200),
(5000,10000),
(5000,10000),
(5000,10000),
(5000,10000),
(5000,10000),
(5000,10000),
(5000,10000)
]
cur = 0
for (n,k) in cases :
cout = ""
print "make %d..." % cur
##-----
#TODO generate the data
kk = randint(n, k)
mid = (1 + 10**9) / 2
cout += "%d %d %d\n" % (n, kk, mid + randint(-3 * n, 3 * n))
for i in range(n) :
cout += "%d " % (mid + randint(-3 * n, 3 * n))
cout += "\n"
b = choice( [ 0, 2 ])
for i in range(kk) :
cout += "%d " % randint(1,10**9)
cout += "\n"
##-----
f = file( prob + str(cur) + ".in", "w" )
cur += 1
f.write( cout )
f.close()
|
#
# 따라하며 배우는 파이썬과 데이터과학(생능출판사 2020)
# LAB 5-5 반복을 이용하여 팩토리얼을 계산하기, 126쪽
#
n = int(input("정수를 입력하시오: "))
fact = 1
for i in range(1, n+1):
fact = fact * i
print(n, "!은", fact, "이다.") |
import tkinter as tk
import shapes
shapes.circle()
shapes.triangle()
shapes.hexagon()
shapes.octagon()
shapes.pentagon()
shapes.square()
shapes.star()
tk.mainloop()
|
def bsearch(l, x):
"""
Returns the index of an element in a list using binary srach
Returns -1 if the element cannot be found
"""
index = 0
while len(l) > 0:
mid_index = len(l) // 2
pivot = l[mid_index]
if pivot == x:
return index + mid_index
elif pivot < x:
index += mid_index + 1
l = l[mid_index + 1:]
else:
l = l[0:mid_index]
return -1
t1 = [1, 2, 3, 4, 5, 6, 7]
assert bsearch(t1, 0) == -1
assert bsearch(t1, 1) == 0
assert bsearch(t1, 3) == 2
assert bsearch(t1, 4) == 3
assert bsearch(t1, 5) == 4
assert bsearch(t1, 7) == 6
t2 = [2, 3]
assert bsearch(t2, 0) == -1
print('All tests pass!')
|
from utils.startToTree import StartToTree
from taskUtils import setupTableEnv
from taskUtils import setupTableEnv
import nodes.meta
import nodes.robot
from tableNodes.item2bin import Item2Bin
from taskUtils import tableManip
import numpy
import random
import math
import rospkg, roslib
objects_path = rospkg.RosPack().get_path('modular_action_planning') + '/ordata/objects/'
def add_arguments(parser):
pass
def init_env(common):
env = common['execEnv']
robot = common['execRobot']
table = setupTableEnv.add_table(env, robot)
table.SetName('table')
setupTableEnv.set_robot_pose(env, robot, table)
if robot.GetName() != 'TIM':
bin = setupTableEnv.add_bin(env)
else:
bin = setupTableEnv.add_bin(env, y=1.3)
#tableManip.reset_env(env, robot, N=2, includePlate=False, includeBowls=False)
if robot.GetName() != 'TIM':
robot.head.SetStiffness(1)
robot.head.MoveTo([math.pi/16, -math.pi/16])
detect=False
if robot.GetName() == 'HERB' and not robot.head.simulated and detect:
import percy.kinbody as kb
detector = kb.KinBodyDetector(env, '', 'object_poses_array', objects_path, '/herb_base', '/kinect2_rgb')
detector.update()
else:
#setupTableEnv.place_bowl_on_table(env, table, .7, .8)
setupTableEnv.place_glass_on_table(env, table, .7, .8)
setupTableEnv.place_glass_on_table(env, table, .6, .7)
setupTableEnv.place_glass_on_table(env, table, .65, .55)
setupTableEnv.place_glass_on_table(env, table, .75, .6)
snap_to_table(env, table, [ obj for obj in env.GetBodies() if obj.GetName().startswith('tag_glass') ])
import IPython; IPython.embed()
if robot.GetName() == 'HERB':
robot.left_arm.SetStiffness(1)
def snap_to_table(env, table, objects):
with env:
table_aabb = table.ComputeAABB()
table_height = table_aabb.pos()[2] + table_aabb.extents()[2]
for obj in objects:
T = obj.GetTransform()
T[0:3,0:3] = numpy.eye(3)
T[2,3] = table_height + .01
if 'bowl' in obj.GetName():
T[2,3] = table_height + .03
obj.SetTransform(T)
def reset_env(env, robot):
pass
#tableManip.reset_env(env, robot)
def get_plan(common):
arm = common['robot'].left_arm
subnodes = []
numToMove = 0
env = common['env']
numToMove += sum([1 for b in env.GetBodies() if b.GetName().startswith('glass')])
numToMove += sum([1 for b in env.GetBodies() if b.GetName().startswith('tag_glass')])
numToMove += sum([1 for b in env.GetBodies() if b.GetName().startswith('bowl')])
numToMove += sum([1 for b in env.GetBodies() if b.GetName().startswith('plate')])
for _ in xrange(numToMove):
subnodes.append(nodes.meta.Exec(Item2Bin(arm, **common)))
node = nodes.meta.NormalizedSeq(subnodes)
return node
|
import json
import datetime
import urllib.request, json
import pulsar
from pulsar.schema import *
class Covid19(Record):
date = String()
confirmed = Integer()
deaths = Integer()
recovered = Integer()
country = String()
with urllib.request.urlopen("https://pomber.github.io/covid19/timeseries.json") as url:
covid = json.loads(url.read().decode())
client = pulsar.Client('pulsar://pulsar:6650')
producer = client.create_producer(topic='covid19',schema=AvroSchema(Covid19))
#parse the covid19 data
count=0
for key in covid:
print("Processing all entries for -> "+ key)
for i in covid[key]:
# we need to ensure our data format is correct to merge the data streams
# i.e. -> “date”:“2020-01-02" != “date”:“2020-1-2",
record_date = datetime.datetime.strptime(i['date'], '%Y-%m-%d')
record_date = record_date.strftime('%Y-%m-%d')
i['date'] = record_date
i['country'] = key
#we are sending our records to a new topic called covid19US
record = Covid19(date=i['date'],country=i['country'],confirmed=i['confirmed'],deaths=i['deaths'],recovered=i['recovered'] )
producer.send( partition_key=record_date, content=record )
count+=1
print( str(count) + " Covid19 Records Loaded into Apache Pulsar!")
client.close() |
"""
课程表
链接:https://leetcode-cn.com/problems/course-schedule
你这个学期必须选修 numCourses 门课程,记为 0 到 numCourses - 1 。
在选修某些课程之前需要一些先修课程。
先修课程按数组 prerequisites 给出,其中 prerequisites[i] = [ai, bi] ,
表示如果要学习课程 ai 则必须先学习课程 bi 。
例如,先修课程对 [0, 1] 表示:想要学习课程 0 ,你需要先完成课程 1 。
请你判断是否可能完成所有课程的学习?如果可以,返回 true ;否则,返回 false 。
示例 1:
输入:numCourses = 2, prerequisites = [[1,0]]
输出:true
解释:总共有 2 门课程。学习课程 1 之前,你需要完成课程 0 。这是可能的。
示例 2:
输入:numCourses = 2, prerequisites = [[1,0],[0,1]]
输出:false
解释:总共有 2 门课程。学习课程 1 之前,你需要先完成课程 0 ;并且学习课程 0 之前,你还应先完成课程 1 。这是不可能的。
提示:
1 <= numCourses <= 105
0 <= prerequisites.length <= 5000
prerequisites[i].length == 2
0 <= ai, bi < numCourses
prerequisites[i] 中的所有课程对互不相同。
解法:
1. 拓扑排序 - DFS。
通过 DFS 判断图中是否有环。
算法:
1)使用标记列表 flags,用于记录和判断每个节点 i 的状态:
1.1)未被 DFS 访问的节点 flags[i] = 0;
1.2)已被其他节点的 DFS 访问过 flags[i] = -1;
1.3)已被当前节点的 DFS 访问过 flags[i] = 1,存在环。
2)对给定的节点列表依次执行 DFS,判断每个节点 DFS 是否存在环,存在就立即返回 False。
2.1)DFS 流程:
2.1.1)终止条件:
- 当 flags[i] = -1 时,说明当前节点已被其他节点访问过,无需重复搜索,返回 True。
- 当 flags[i] = 1 时,说明本轮 DFS 中节点 i 被第 2 次访问,即存在环,直接返回 False。
2.1.2)将当前访问节点 i 对应的 flags[i] 置 1,即标记其本轮被访问过。
2.1.3)递归访问当前节点的所有邻接节点 j,发现环时直接返回 False。
2.1.4)当前节点的所有邻接节点都被遍历过了,且没有发现环,则将当前节点 flag 置为 -1,并返回 True。
3)整个图遍历完了也没发现环,返回 True。
时间复杂度:O(N+M),遍历图需要访问所有节点和邻边,N 是节点数量,M 是邻边数量。
空间复杂度:O(N+M),建立邻接表需要的空间,adjacency 长为 N,存储了 M 条邻边的数据。
2. 入度表 - BFS。
算法:
1)统计每个课程的入度,生成入度表(indegress)。
2)使用队列(queue),将入度为 0 的节点入队。
3)当队列(queue)非空时,依次将队首节点出队,并在课程安排图中删除该节点(prev):
3.1)并不是真的删除,而是通过将此节点(prev)的所有邻接节点(curr)的入度 - 1,即 indegress[curr] -= 1。
3.2)当入度 - 1 后,邻接节点的入度若为 0,则说明 curr 的所有的前驱节点已被 “删除”,此时将 curr 入队。
4)在每次 prev 出队时,使 numCourses - 1,
- 若整个课程安排表是有向无环图,则所有节点一定都入队和出队过,即完成拓扑排序过。
换句话说,若图中存在环,一定有节点的入度始终不为 0。
- 因此,拓扑排序出队次数等于课程个数,返回 numCourses == 0 判断是否存在环。
时间复杂度:O(N+M),历一个图需要访问所有节点和所有临边,N 和 M 分别为节点数量和临边数量。
空间复杂度:O(N+M),为建立邻接表所需额外空间,adjacency 长度为 N ,并存储 M 条临边的数据。
"""
import unittest
from collections import deque
from typing import List
class Solution:
def canFinish(self, numCourses: int, prerequisites: List[List[int]]) -> bool:
# 邻接表,下标对应需要先修的课程,值中存放修完该课程后,可修的课程列表。
adjacency: List[List[int]] = [[] for _ in range(numCourses)]
# 构建邻接表。
for curr, prev in prerequisites:
adjacency[prev].append(curr)
# 各课程的标记:
# 0 表示未被 DFS 访问过。
# -1 表示被其他节点 DFS 访问过。
# 1 表示已被当前节点 DFS 访问过。
flags: List[int] = [0] * numCourses
# DFS 遍历各节点。
for i in range(numCourses):
if not self.dfs(i, adjacency, flags):
return False
# 没有环。
return True
def dfs(self, i: int, adjacency: List[List[int]], flags: List[int]) -> bool:
# 已被其他节点 DFS 遍历过。
if flags[i] == -1:
return True
# 已被当前节点 DFS 遍历过,存在环。
if flags[i] == 1:
return False
# 正在遍历当前节点,标记位置为 1。
flags[i] = 1
# 递归遍历邻接节点。
for j in adjacency[i]:
if not self.dfs(j, adjacency, flags):
return False
# 遍历过当前节点,标记位置为 -1。
flags[i] = -1
# 当前节点及其邻接节点,不存在环。
return True
def canFinish2(self, numCourses: int, prerequisites: List[List[int]]) -> bool:
# 初始化入度表。
indegress: List[int] = [0] * numCourses
# 初始化邻接表。
adjacency: List[List[int]] = [[] for _ in range(numCourses)]
# 初始化入度表,邻接表。
for curr, prev in prerequisites:
indegress[curr] += 1
adjacency[prev].append(curr)
# 初始化队列。
queue = deque()
# 将入度为 0 的节点入队。
for i in range(len(indegress)):
if indegress[i] == 0:
queue.append(i)
while queue:
prev = deque.popleft()
# 将该节点的所有邻接节点的入度 - 1。
for curr in adjacency[prev]:
indegress[curr] -= 1
# 若邻接节点的入度变为 0,说明其不存在前驱节点了,则将其加入队列。
if indegress[curr] == 0:
queue.append(curr)
# 每出队一个节点,就使 numCourses - 1,不存在环时,出队次数应等于课程个数。
numCourses -= 1
# 若图中存在环,则一定有节点的入度不为 0。
return numCourses == 0
class TestSolution(unittest.TestCase):
def setUp(self) -> None:
self.s = Solution()
def test_canFinish(self) -> None:
self.assertTrue(self.s.canFinish(2, [[1, 0]]))
self.assertTrue(self.s.canFinish(3, [[1, 0], [2, 0]]))
self.assertTrue(self.s.canFinish(4, [[3, 2], [2, 1], [1, 0]]))
self.assertFalse(self.s.canFinish(2, [[1, 0], [0, 1]]))
if __name__ == "__main__":
unittest.main()
|
from decimal import Decimal
from decimal import getcontext
print('0.1 + 0.1 + 0.1 - 0.3 =', 0.1 + 0.1 + 0.1 - 0.3)
print('Decimal(0.1) + Decimal(0.1) + Decimal(0.1) - Decimal(0.3) =', Decimal('0.1') + Decimal('0.1') + Decimal('0.1') - Decimal('0.3'))
print('Decimal(1) / Decimal(7) =', Decimal(1) / Decimal(7))
getcontext().prec = 4
print('Decimal(1) / Decimal(7) =', Decimal(1) / Decimal(7))
|
"""Base case, provide cluster specific assertion and cluster
facilities to make test easy to read.
"""
from . import cluster
from docker import errors
class ClusterTestCase:
def __init__(self):
self.cluster = cluster.Cluster()
def assert_key_exists(self, key):
"""Make sure a key exists in the consul k/v store"""
assert key in self.cluster.consul.kv
def assert_volume_exists_only_on(self, volume, node_name, kind='local'):
for name, node in self.cluster.nodes.items():
volumes = node['docker_cli'].volumes.list(
filters=dict(name=volume)
)
if node_name == name:
assert len(volumes) == 1, \
"We expect 1 volume named {} on node {}, " \
"found {} volumes {}".format(
volume, node_name, len(volumes),
[v.name for v in volumes]
)
assert volumes[0].attrs['Driver'] == kind,\
"Volume {} on node {} use {} driver, {} was " \
"expected".format(
volume, node_name, volumes[0].attrs['Driver'], kind
)
else:
assert len(volumes) == 0, \
"We expect 0 volume called {} on node {}, " \
"found {} volumes {}".format(
volume, node_name, len(volumes),
[v.name for v in volumes]
)
def assert_consul_service_on_node(self, service_id, node):
assert self.cluster.consul.catalog.service(
service_id
)[0]['Node'] == node
def assert_btrfs_scheduled(self, kind, volume, nodes):
"""Assert btrfs scheduled are present on given nodes and absent on
others"""
def filter_scheduled(scheduled, start, end):
return [
s for s in scheduled if (
s.startswith(start) and s.endswith(end)
)
]
for name, node in self.cluster.nodes.items():
container = node['docker_cli'].containers.get(
'buttervolume_plugin_1'
)
scheduled = filter_scheduled(
container.exec_run(
'buttervolume scheduled'
).output.decode('utf-8').split('\n'),
kind,
volume
)
if name in nodes:
assert len(scheduled) == 1, \
"We expected 1 schedul {} on node {} for {} volume, " \
"but {} were found.".format(
kind, name, volume, len(scheduled)
)
else:
assert len(scheduled) == 0, \
"We expected 0 schedul {} on node {} for {} volume, " \
"but {} were found.".format(
kind, name, volume, len(scheduled)
)
def assert_container_running_on(self, containers, nodes):
for name, node in self.cluster.nodes.items():
for container_name in containers:
try:
container = node['docker_cli'].containers.get(
container_name
)
except errors.NotFound:
container = None
pass
if name in nodes:
assert container.status == 'running'
else:
assert container is None
|
#!/usr/bin/env python3
# Filename: teireader.py
"""
# Script for reading selected text from TEI files.
"""
import re
import os
import glob
from lxml import etree
def teireader(inpath):
"""Script for reading selected text from TEI files."""
for file in glob.glob(inpath):
with open(file, "r") as infile:
filename = os.path.basename(file)[:-4]
idno = filename[:5]
print(idno)
xml = etree.parse(file)
namespaces = {'tei':'http://www.tei-c.org/ns/1.0'}
### Removes tags but conserves their text content.
#etree.strip_tags(xml, "{http://www.tei-c.org/ns/1.0}hi")
### Removes elements and their text content.
#etree.strip_elements(xml, "{http://www.tei-c.org/ns/1.0}reg")
#etree.strip_elements(xml, "{http://www.tei-c.org/ns/1.0}orig")
#etree.strip_elements(xml, "{http://www.tei-c.org/ns/1.0}note")
#etree.strip_elements(xml, "{http://www.tei-c.org/ns/1.0}l")
#etree.strip_elements(xml, "{http://www.tei-c.org/ns/1.0}p")
#etree.strip_elements(xml, "{http://www.tei-c.org/ns/1.0}head")
#etree.strip_elements(xml, "{http://www.tei-c.org/ns/1.0}stage")
#etree.strip_elements(xml, "{http://www.tei-c.org/ns/1.0}speaker")
### XPath defining which text to select
xp_bodyprose = "//tei:body//tei:p//text()"
xp_bodyverse = "//tei:body//tei:l//text()"
xp_bodytext = "//tei:body//text()"
xp_alltext = "//text()"
xp_castlist = "//tei:castList//text()"
xp_stage = "//tei:stage//text()"
xp_hi = "//tei:body//tei:hi//text()"
### Applying one of the above XPaths
text = xml.xpath(xp_bodytext, namespaces=namespaces)
text = "\n".join(text)
### Some cleaning up
text = re.sub(" ", "", text)
#text = re.sub(" ", "", text)
#text = re.sub("\n{1,6}", "", text)
text = re.sub("\n{1,6}", "\n", text)
text = re.sub("\n{1,6}", "\n", text)
outtext = str(text)
outfile = "./txt/" + filename + ".txt"
with open(outfile,"w") as output:
output.write(outtext)
def main(inpath):
teireader(inpath)
main("./tei/*.xml")
|
import os
import yaml
def load_config(configpath):
with open(configpath) as f:
cfg = yaml.safe_load(f)
experiment_id = os.path.splitext(os.path.basename(configpath))[0]
cfg['experiment_id'] = experiment_id
model_dir = cfg['output']['model_dir']
if model_dir:
model_dir = os.path.join(model_dir, experiment_id)
if not os.path.exists(model_dir):
os.makedirs(model_dir)
log_dir = cfg['output']['log_dir']
if log_dir:
log_dir = os.path.join(log_dir, experiment_id)
if not os.path.exists(log_dir):
os.makedirs(log_dir)
vis_dir = cfg['output']['vis_dir']
if vis_dir:
vis_dir = os.path.join(vis_dir, experiment_id)
if not os.path.exists(vis_dir):
os.makedirs(vis_dir)
return cfg |
# Generated by Django 2.1.7 on 2019-06-25 18:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("small_small_hr", "0006_auto_20181209_0108")]
operations = [
migrations.AlterField(
model_name="annualleave",
name="year",
field=models.PositiveIntegerField(
choices=[
(2017, 2017),
(2018, 2018),
(2019, 2019),
(2020, 2020),
(2021, 2021),
(2022, 2022),
(2023, 2023),
(2024, 2024),
(2025, 2025),
(2026, 2026),
(2027, 2027),
(2028, 2028),
],
db_index=True,
default=2017,
verbose_name="Year",
),
)
]
|
from event_testing.resolver import SingleActorAndObjectResolver, PhotoResolver
from event_testing.test_events import TestEvent, cached_test
from event_testing.tests import TunableTestSet
from interactions import ParticipantType
from sims4.tuning.tunable import HasTunableSingletonFactory, AutoFactoryInit
import event_testing
class TookPhotoTest(HasTunableSingletonFactory, AutoFactoryInit, event_testing.test_base.BaseTest):
test_events = (TestEvent.PhotoTaken,)
USES_EVENT_DATA = True
FACTORY_TUNABLES = {'tests': TunableTestSet(description='\n A set of tests that are run with the photographer as the actor,\n and the photograph as the object and PhotographyTargets as the\n subjects.\n ')}
def get_expected_args(self):
return {'subject': ParticipantType.Actor, 'photo_object': event_testing.test_constants.FROM_EVENT_DATA, 'photo_targets': event_testing.test_constants.FROM_EVENT_DATA}
@cached_test
def __call__(self, photo_object=None, subject=None, photo_targets=None):
resolver = PhotoResolver(subject, photo_object, photo_targets, source=self)
return self.tests.run_tests(resolver)
|
from django.db import models
from customuser.models import Customuser
# Create your models here.
#name,gender,email,phno,birthdate,specilized_in,hospitalname,hospitaladdress,hospitalstate,hospitaldistrict,shift,address,city,state,pincode,country,photo,username,password
class Doctor(models.Model):
user=models.ForeignKey(Customuser,on_delete=models.CASCADE)
Gender_choices=(('Male','Male'),
('Female','Female'),
)
Shift_choices=(('Morning(7am-1pm)','Morning(7am-1pm)'),
('Afternoon(1pm to 7pm)','Afternoon(1pm to 7pm)'),
)
gender = models.CharField(max_length=10,choices=Gender_choices)
email = models.EmailField(unique=True)
ph = models.CharField(max_length=100)
birthdate = models.DateTimeField()
specilized_in = models.CharField(max_length=100)
hospitalname = models.CharField(max_length=100)
hospitaladdress = models.CharField(max_length=100)
hospitalstate = models.CharField(max_length=100)
hospitaldistrict = models.CharField(max_length=100)
shift = models.CharField(max_length=100,choices=Shift_choices)
address = models.CharField(max_length=100)
city = models.CharField(max_length=100)
pincode= models.IntegerField()
state = models.CharField(max_length=100)
country = models.CharField(max_length=100)
photo=models.ImageField(upload_to ='uploads/')
is_approved=models.BooleanField(default=False)
def __str__(self):
return self.email
|
from django.conf import settings
from django.conf.urls import patterns, url
urlpatterns = patterns('apps.userprofile.views',
# Only user's requests.
url(r'^(?P<id>\d+)/$', 'user_profile',
{'template': 'user.html', 'profile': False}, name='user_requests'),
# Show user's full profile (private if this user is logged on,
# otherwise public).
url(r'^(?P<id>\d+)/profile/$', 'user_profile',
{'template': 'user.html', 'profile': True}, name='user_profile'),
# Show user's full profile (private if this user is logged on,
# otherwise public).
url(r'^(?P<id>\d+)/profile/update/$', 'user_profile_update',
{'template': 'user.html'}, name='user_profile_update'),
# Change userpic.
url(r'^(?P<id>\d+)/userpic/$', 'user_set_userpic',
{'template': 'user.html'}, name='user_set_userpic'),
) |
"""This module contains all the elastic search field type mapped to django"""
from elasticsearch_dsl import Object, Text, Date, DocType
from elasticsearch_dsl.connections import connections
from elasticsearch.helpers import bulk
# collection has many items so collection field is repeated all over the item
# Reuse this field
collection_field = Object(
properties={
'collection_name': Text(),
'collection_description': Text(),
}
)
class CollectionDoc(DocType):
"""Collection DocType"""
created_date = Date()
updated_date = Date()
collection_name = Text()
collection_description = Text()
community_name = Text()
title = Text()
class Meta:
index = "pustakalaya"
doc_type = "collection"
def index_collection():
from .models import Collection
# Create an index and populate the mappings
CollectionDoc.init()
# Get elastic search client
es = connections.get_connection()
# Index all community with nested collection
# TODO: logging
print("Indexing communities and collection")
bulk(client=es, actions=(b.index() for b in Collection.objects.all().iterator()))
|
animals = ["cat", "ant", "bat"]
animals.sort()
for animal in animals:
print animal
# Write a for-loop that iterates over start_list and .append()s each number squared (x ** 2) to square_list.
# Then sort square_list!
start_list = [5, 3, 1, 2, 4]
square_list = []
# Your code here!
for number in start_list:
square_list.append(number ** 2)
square_list.sort()
print square_list
# Key Value print
residents = {'Puffin' : 104, 'Sloth' : 105, 'Burmese Python' : 106}
print residents['Puffin'] # Prints Puffin's room number |
__author__ = 'Dave', 'Ryan'
#!/usr/bin/python
# Import Tkinter for GUI
#-----------------------------------------------------------------------------------------------------------------------
try:
from Tkinter import *
except ImportError:
from tkinter import *
#-----------------------------------------------------------------------------------------------------------------------
# Import library for Hue lights
#-----------------------------------------------------------------------------------------------------------------------
from phue import Bridge
#-----------------------------------------------------------------------------------------------------------------------
from rgb_cie import Converter
converter = Converter()
converter.rgbToCIE1931(255,0,0) #See https://github.com/benknight/hue-python-rgb-converter
# Import time library for creating delays in scroll function
#-----------------------------------------------------------------------------------------------------------------------
import time
#-----------------------------------------------------------------------------------------------------------------------
# Toggle variables for each button
#-----------------------------------------------------------------------------------------------------------------------
toggle1 = 0
toggle2 = 0
toggle3 = 0
toggle4 = 0
#-----------------------------------------------------------------------------------------------------------------------
# List used to store each set of hue/sat/brightness for the scroll function
#-----------------------------------------------------------------------------------------------------------------------
store_list = []
#-----------------------------------------------------------------------------------------------------------------------
# Connects to the Bridge and creates the main bridge object, b
#-----------------------------------------------------------------------------------------------------------------------
b=Bridge("192.168.1.159")
b.connect()
#-----------------------------------------------------------------------------------------------------------------------
# Creates the window for the GUI
#-----------------------------------------------------------------------------------------------------------------------
master = Tk(className="Hue Controller")
master.geometry("300x400+810+340")
#-----------------------------------------------------------------------------------------------------------------------
# Gets all devices connected to the bridge (this program is set up for controlling 1-3 lights)
#-----------------------------------------------------------------------------------------------------------------------
lights = b.get_light_objects('id')
#-----------------------------------------------------------------------------------------------------------------------
# Function for lamp 1 control button - Toggles whether or not the light will respond to the slider settings
#-----------------------------------------------------------------------------------------------------------------------
def lampselect1():
global toggle1
global b1
global b
if toggle1 == 0:
b1.config(text="selected")
b.set_light(1, 'on', True)
toggle1 = 1
elif toggle1 == 1:
b1.config(text="lamp 1")
toggle1 = 0
#-----------------------------------------------------------------------------------------------------------------------
# Function for lamp 1 power button - Toggles the light on and off
#-----------------------------------------------------------------------------------------------------------------------
def lamp1_toggle():
if b1_toggle.cget("text") == "off":
b.set_light(1,'on',True)
b1_toggle.config(text="on")
else:
b.set_light(1,'on',False)
b1_toggle.config(text="off")
#-----------------------------------------------------------------------------------------------------------------------
# Function for lamp 2 control button - Toggles whether or not the light will respond to the slider settings
#-----------------------------------------------------------------------------------------------------------------------
def lampselect2():
global toggle2
global b2
global b
if toggle2 == 0:
b2.config(text="selected")
b.set_light(2, 'on', True)
toggle2 = 2
elif toggle2 == 2:
b2.config(text="lamp 2")
toggle2 = 0
#-----------------------------------------------------------------------------------------------------------------------
# Function for lamp 2 power button - Toggles the light on and off
#-----------------------------------------------------------------------------------------------------------------------
def lamp2_toggle():
if b2_toggle.cget("text") == "off":
b.set_light(2,'on',True)
b2_toggle.config(text="on")
else:
b.set_light(2,'on',False)
b2_toggle.config(text="off")
#-----------------------------------------------------------------------------------------------------------------------
# Function for lamp 3 control button - Toggles whether or not the light will respond to the slider settings
#-----------------------------------------------------------------------------------------------------------------------
def lampselect3():
global toggle3
global b3
global b
if toggle3 == 0:
b3.config(text="selected")
b.set_light(3, 'on', True)
toggle3 = 3
elif toggle3 == 3:
b3.config(text="lamp 3")
toggle3 = 0
#-----------------------------------------------------------------------------------------------------------------------
# Function for lamp 3 power button - Toggles the light on and off
#-----------------------------------------------------------------------------------------------------------------------
def lamp3_toggle():
if b3_toggle.cget("text") == "off":
b.set_light(3,'on',True)
b3_toggle.config(text="on")
else:
b.set_light(3,'on',False)
b3_toggle.config(text="off")
#-----------------------------------------------------------------------------------------------------------------------
# Function for hue slider - Sets the hue of the selected lights to the value on the slider
#-----------------------------------------------------------------------------------------------------------------------
def hueslider(val):
global slider1
global toggle1
global toggle2
global toggle3
global b
lamparray = [toggle1, toggle2, toggle3]
while 0 in lamparray:
lamparray.remove(0)
b.set_light(lamparray, 'hue', int(val))
#-----------------------------------------------------------------------------------------------------------------------
# Function for saturation slider - Sets the sat of the selected lights to the value on the slider
#-----------------------------------------------------------------------------------------------------------------------
def satslider(val):
global slider2
global toggle1
global toggle2
global toggle3
global b
lamparray = [toggle1, toggle2, toggle3]
while 0 in lamparray:
lamparray.remove(0)
b.set_light(lamparray, 'sat', int(val))
#-----------------------------------------------------------------------------------------------------------------------
# Function for brightness slider - Sets the brightness of the selected lights to the value on the slider
#-----------------------------------------------------------------------------------------------------------------------
def brightslider(val):
global slider3
global toggle1
global toggle2
global toggle3
global b
lamparray = [toggle1, toggle2, toggle3]
while 0 in lamparray:
lamparray.remove(0)
b.set_light(lamparray, 'bri', int(val))
#-----------------------------------------------------------------------------------------------------------------------
# Function for scroll button - Starts scrolling through the stored hue/sat/bri sets at the specified transition time
#-----------------------------------------------------------------------------------------------------------------------
def scrollfunction():
global store_list
if (b4.cget("text")=="start scroll"):
b4.config(text="stop scroll")
i = 0
while (b4.cget("text")=="stop scroll"):
hue = store_list[3*i]
sat = store_list[3*i+1]
bri = store_list[3*i+2]
timedelay = int(transition.get())
command = {'transitiontime' : int(10*timedelay), 'on' : True, 'hue' : hue, 'sat' : sat, 'bri' : bri}
b.set_light([1,2,3],command)
time.sleep(timedelay)
i += 1
if (i == (len(store_list)/3)):
i = 0
master.update()
else:
b4.config(text="start scroll")
#-----------------------------------------------------------------------------------------------------------------------
# Function for set color button - Stores the current values of the hue/sat/bri sliders into 'store_list' to be used
# by the scroll function
#-----------------------------------------------------------------------------------------------------------------------
def setcolor():
global store_list
button_name = custom_wash.cget("text")
button_number = int(button_name[10:]) + 1
button_name = button_name[:10] + str(button_number)
custom_wash.config(text=button_name)
stored_hue = slider1.get()
stored_sat = slider2.get()
stored_bright = slider3.get()
store_list.append(stored_hue)
store_list.append(stored_sat)
store_list.append(stored_bright)
#-----------------------------------------------------------------------------------------------------------------------
# Function for the clear button - Clears all of the stored colors in 'store_list'
#-----------------------------------------------------------------------------------------------------------------------
def clearcolors():
global store_list
custom_wash.config(text="set color 1")
store_list = []
#-----------------------------------------------------------------------------------------------------------------------
# Creates the lamp 1 control button
#-----------------------------------------------------------------------------------------------------------------------
b1 = Button(master, text="lamp 1", command=lampselect1)
b1.config(height=1, width=10)
b1.place(x=5, y=25)
#-----------------------------------------------------------------------------------------------------------------------
# Creates the lamp 1 power button
#-----------------------------------------------------------------------------------------------------------------------
b1_toggle = Button(master, text="on", command=lamp1_toggle)
b1_toggle.config(height=1, width=10)
b1_toggle.place(x=5, y=60)
#-----------------------------------------------------------------------------------------------------------------------
# Creates the lamp 2 control button
#-----------------------------------------------------------------------------------------------------------------------
b2 = Button(master, text="lamp 2", command=lampselect2)
b2.config(height=1, width=10)
b2.place(x=105, y=25)
#-----------------------------------------------------------------------------------------------------------------------
# Creates the lamp 2 power button
#-----------------------------------------------------------------------------------------------------------------------
b2_toggle = Button(master, text="on", command=lamp2_toggle)
b2_toggle.config(height=1, width=10)
b2_toggle.place(x=105, y=60)
#-----------------------------------------------------------------------------------------------------------------------
# Creates the lamp 3 control button
#-----------------------------------------------------------------------------------------------------------------------
b3 = Button(master, text="lamp 3", command=lampselect3)
b3.config(height=1, width=10)
b3.place(x=205, y=25)
#-----------------------------------------------------------------------------------------------------------------------
# Creates the lamp 3 power button
#-----------------------------------------------------------------------------------------------------------------------
b3_toggle = Button(master, text="on", command=lamp3_toggle)
b3_toggle.config(height=1, width=10)
b3_toggle.place(x=205, y=60)
#-----------------------------------------------------------------------------------------------------------------------
# Creates the hue slider
#-----------------------------------------------------------------------------------------------------------------------
slider1 = Scale(master,from_=0, to=65534, orient=HORIZONTAL)
slider1.config(length=217, command=hueslider)
slider1.place(x=75, y=100)
#-----------------------------------------------------------------------------------------------------------------------
# Creates the saturation slider
#-----------------------------------------------------------------------------------------------------------------------
slider2 = Scale(master,from_=0, to=254, orient=HORIZONTAL)
slider2.config(length=217, command=satslider)
slider2.place(x=75, y=150)
#-----------------------------------------------------------------------------------------------------------------------
# Creates the brightness slider
#-----------------------------------------------------------------------------------------------------------------------
slider3 = Scale(master,from_=0, to=254, orient=HORIZONTAL)
slider3.config(length=217, command=brightslider)
slider3.place(x=75, y=200)
#-----------------------------------------------------------------------------------------------------------------------
# Creates the hue slider label
#-----------------------------------------------------------------------------------------------------------------------
label1 = Label(master, text="hue: ")
label1.place(x=5, y=119)
#-----------------------------------------------------------------------------------------------------------------------
# Creates the saturation slider label
#-----------------------------------------------------------------------------------------------------------------------
label2 = Label(master, text="sat: ")
label2.place(x=5, y=169)
#-----------------------------------------------------------------------------------------------------------------------
# Creates the brightness slider label
#-----------------------------------------------------------------------------------------------------------------------
label3 = Label(master, text="bright: ")
label3.place(x=5, y=219)
#-----------------------------------------------------------------------------------------------------------------------
# Creates the scroll button
#-----------------------------------------------------------------------------------------------------------------------
b4 = Button(master, text="start scroll", command=scrollfunction)
b4.config(height=1, width=10)
b4.place(x=105, y=255)
#-----------------------------------------------------------------------------------------------------------------------
# Creates the store color button
#-----------------------------------------------------------------------------------------------------------------------
custom_wash = Button(master, text="set color 1", command=setcolor)
custom_wash.config(height=1, width=10)
custom_wash.place(x=205, y=320)
#-----------------------------------------------------------------------------------------------------------------------
# Creates the clear colors button
#-----------------------------------------------------------------------------------------------------------------------
delete_wash = Button(master, text="clear colors", command=clearcolors)
delete_wash.config(height=1, width=10)
delete_wash.place(x=205, y=350)
#-----------------------------------------------------------------------------------------------------------------------
# Creates the text box for the transition time user input
#-----------------------------------------------------------------------------------------------------------------------
transition = Entry(master, width=2)
transition.place(x=210, y=295)
#-----------------------------------------------------------------------------------------------------------------------
# Creates the transition time label
#-----------------------------------------------------------------------------------------------------------------------
label4 = Label(master, text="transition time: ")
label4.place(x=120, y=295)
#-----------------------------------------------------------------------------------------------------------------------
# Creates transition time unit label
#-----------------------------------------------------------------------------------------------------------------------
label5 = Label(master, text="sec")
label5.place(x=235, y=295)
#-----------------------------------------------------------------------------------------------------------------------
# Main loop - Program sits idle until event occurs (button click, etc.)
#-----------------------------------------------------------------------------------------------------------------------
mainloop()
#-----------------------------------------------------------------------------------------------------------------------
|
from django.shortcuts import render
from .models import Status
from .serializers import StatusSerializer
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import generics,mixins,permissions
from rest_framework.authentication import SessionAuthentication
from account_app.permissions import IsOwnerOrReadOnly
from account_app.serializers import UserPublicSerializer
from django.shortcuts import get_object_or_404
import json
# from rest_framework import
#custom jwt payload handler
from django.conf import settings
from django.utils import timezone
import datetime
expire_delta = settings.JWT_AUTH['JWT_REFRESH_EXPIRATION_DELTA']
def jwt_response_payload_handler(token,user=None,request=None):
return {
'token':token,
'user':user.username,
'expires': timezone.now() + expire_delta - datetime.timedelta(seconds=3000)
}
##
def is_json(json_data):
try:
real_json = json.loads(json_data)
is_valid = True
except ValueError:
is_valid = False
return is_valid
# Create your views here.
class StatusListAPIView(APIView):
# pass
def get(self,request,format=None):
qs = Status.objects.all()
# serializer = [user.user.username for user in qs]
serializer = StatusSerializer(qs,many=True)
return Response(serializer.data)
# def post(self,request,format=None):
# qs = Status.objects.all()
# # serializer = [user.user.username for user in qs]
# serializer = StatusSerializer(qs,many=True)
# return Response(serializer.data)
#generic LIST VIEW
class StatusListSearchAPIView(generics.ListAPIView):
queryset = Status.objects.all()
serializer_class = StatusSerializer
search_fields = ('user__username','content')
ordering_fields = ('user__username','timestamp')
def get_queryset(self):
qs = Status.objects.all()
query = self.request.GET.get('q')
if query is not None:
qs = qs.filter(content__icontains=query)
return qs
#generics CREATE VIEW
class StatusCreateAPIView(generics.CreateAPIView):
queryset = Status.objects.all()
serializer_class = StatusSerializer
def perform_create(self,serializer):
serializer.save(user =self.request.user)
#generic Detail View
class StatusDetailAPIView(generics.RetrieveAPIView):
lookup_field = 'id' #slug
queryset = Status.objects.all()
serializer_class = StatusSerializer
#### another way to lookup
# def get_object(self,*args,**kwargs):
# kwargs = self.kwargs
# kw_id = kwargs.get('id')
# return Status.objects.get(id=kw_id)
#generic Update API View
class StatusUpdateAPIView(generics.UpdateAPIView):
queryset = Status.objects.all()
serializer_class = StatusSerializer
#generic Delete API View
class StatusDeleteAPIView(generics.DestroyAPIView):
queryset = Status.objects.all()
serializer_class = StatusSerializer
#### MIXINS ############## ############
class StatusListCreateAPIView(mixins.CreateModelMixin,generics.ListAPIView):
serializer_class = StatusSerializer
queryset = Status.objects.all()
# authentication_classes = (SessionAuthentication,)
permission_classes = [permissions.IsAuthenticatedOrReadOnly,]
def get_queryset(self):
print(self.request.user)
qs = Status.objects.all()
query = self.request.GET.get('q')
if query is not None:
qs = qs.filter(content__icontains=query)
return qs
def post(self,request,*args,**kwargs):
return self.create(request,*args,**kwargs)
def perform_create(self,serializer):
serializer.save(user=self.request.user)
# class StatusRetrieveUpdateDeleteAPIView(mixins.DestroyModelMixin,mixins.UpdateModelMixin,generics.RetrieveAPIView):
class StatusRetrieveUpdateDeleteAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Status.objects.all()
serializer_class = StatusSerializer
#mixins
# def put(self,request,*args,**kwargs):
# return self.update(request,*args,**kwargs)
# def patch(self,request,*args,**kwargs):
# return self.update(request,*args,**kwargs)
# def delete(self,request,*args,**kwargs):
# return self.destroy(request,*args,**kwargs)
#Single API View TO DO ALL CRUD
class StatusCrudAPIView(mixins.RetrieveModelMixin,mixins.CreateModelMixin,mixins.DestroyModelMixin,
mixins.UpdateModelMixin,generics.ListAPIView):
serializer_class = StatusSerializer
passed_id = None
#list view
def get_queryset(self):
request = self.request
qs = Status.objects.all()
query = self.request.GET.get('q')
if query is not None:
qs = qs.filter(content__icontains=query)
return qs
# retrive view
def get_object(self):
request = self.request
passed_id = request.GET.get('id',None) or self.passed_id
queryset = self.get_queryset()
obj = None
if passed_id is not None:
obj = get_object_or_404(queryset,id=passed_id)
self.check_object_permissions(request,obj)
return obj
def get(self,request,*args,**kwargs):
url_passed_id = request.GET.get('id',None)
json_data = {}
body_ = request.body
if is_json(body_):
json_data = json.loads(request.body)
# json_data = json.loads(request.body)
json_passed_id = json_data.get('id',None)
passed_id = url_passed_id or json_passed_id or None
self.passed_id = passed_id
if passed_id is not None:
return self.retrieve(request,*args,**kwargs)
return super().get(request,*args,**kwargs)
#post view
def post(self,request,*args,**kwargs):
return self.create(request,*args,**kwargs)
def put(self,request,*args,**kwargs):
url_passed_id = request.GET.get('id',None)
json_data = {}
body_ = request.body
if is_json(body_):
json_data = json.loads(request.body)
# json_data = json.loads(request.body)
json_passed_id = json_data.get('id',None)
requested_id = request.data.get('id')
passed_id = url_passed_id or json_passed_id or requested_id or None
self.passed_id = passed_id
return self.update(request,*args,**kwargs)
def patch(self,request,*args,**kwargs):
return self.update(request,*args,**kwargs)
def perform_destroy(self, instance):
if instance is not None:
return instance.delete()
else:
return None
def delete(self,request,*args,**kwargs):
return self.destroy(request,*args,**kwargs)
######## Status new detail view
class StatusNewDetailAPIView(mixins.UpdateModelMixin,mixins.DestroyModelMixin,generics.RetrieveAPIView):
permission_classes = [permissions.IsAuthenticatedOrReadOnly,IsOwnerOrReadOnly]
authentications_classes = []
serializer_class = StatusSerializer
queryset = Status.objects.all()
lookup_field = 'id'
def put(self,request,*args,**kwargs):
return self.update(request,*args,**kwargs)
def patch(self,request,*args,**kwargs):
return self.update(request,*args,**kwargs)
def delete(self,request,*args,**kwargs):
return self.update(request,*args,**kwargs)
|
import os
from django.core.paginator import Paginator
from django.core.files.storage import FileSystemStorage
from django.shortcuts import redirect
from django.shortcuts import render
from django.shortcuts import get_object_or_404
from django.urls import reverse
from django.http import HttpResponse
from django.views.generic import View
from django.contrib.auth.mixins import LoginRequiredMixin
from blog.models import Tag
from blog.models import Post
from blog.models import User
from blogsite.utils import ObjectDetailMixin
from blogsite.utils import ObjectCreateMixin
from blogsite.utils import ObjectUpdateMixin
from blogsite.utils import ObjectDeleteMixin
from blog.forms import TagForm
from blog.forms import PostForm
from blog.forms import UserForm
class TagCreate(LoginRequiredMixin, ObjectCreateMixin, View):
form_model = TagForm
template = 'blog/tag_create.html'
raise_exception = True
class TagDetail(ObjectDetailMixin, View):
model = Tag
template = 'blog/tag_detail.html'
class TagUpdate(LoginRequiredMixin, ObjectUpdateMixin, View):
model = Tag
form_model = TagForm
template = 'blog/tag_update.html'
raise_exception = True
class TagDelete(LoginRequiredMixin, ObjectDeleteMixin, View):
model = Tag
template = 'blog/tag_delete.html'
redirect_url = 'posts_list_url'
raise_exception = True
class PostCreate(LoginRequiredMixin, ObjectCreateMixin, View):
form_model = PostForm
template = 'blog/post_create.html'
raise_exception = True
def post(self, request):
bound_form = self.form_model(request.POST, request.FILES)
if bound_form.is_valid():
new_obj = bound_form.save()
return redirect(new_obj)
return render(request, self.template,
context={'form': bound_form})
class PostUpdate(LoginRequiredMixin, ObjectUpdateMixin, View):
model = Post
form_model = PostForm
template = 'blog/post_update.html'
raise_exception = True
def post(self, request, slug):
obj = self.model.objects.get(slug__iexact=slug)
bound_form = self.form_model(request.POST, request.FILES)
# if not obj.image:
# obj.image.delete()
# if obj.image:
# obj.image.save(obj.image.name, obj.image)
if bound_form.is_valid():
new_obj = bound_form.save()
return redirect(new_obj)
return render(request, self.template,
context={'form': bound_form,
self.model.__name__.lower(): obj})
class PostDetail(ObjectDetailMixin, View):
model = Post
form_model = PostForm
template = 'blog/post_detail.html'
class PostDelete(LoginRequiredMixin, ObjectDeleteMixin, View):
model = Post
template = 'blog/post_delete.html'
redirect_url = 'posts_list_url'
raise_exception = True
class UserCreate(LoginRequiredMixin, ObjectCreateMixin, View):
form_model = UserForm
template = 'blog/user_create.html'
raise_exception = True
class UserDetail(ObjectDetailMixin, View):
model = User
template = 'blog/user_detail.html'
class UserUpdate(LoginRequiredMixin, ObjectUpdateMixin, View):
model = User
form_model = UserForm
template = 'blog/user_update.html'
raise_exception = True
class UserDelete(LoginRequiredMixin, ObjectDeleteMixin, View):
model = User
template = 'blog/user_delete.html'
redirect_url = 'posts_list_url'
raise_exception = True
def search_list(request):
search_query = request.GET.get('search', '')
if search_query:
posts = Post.objects.filter(title__icontains=search_query)
elif not search_query:
posts = Post.objects.all()
search_name = search_query
context = {
'search': posts,
'search_name': search_name
}
return render(request, 'blog/search.html',
context=context)
def posts_list(request):
posts = Post.objects.all()
paginated_list = paginator_for_obj(request, posts)
next_url = paginated_list[0]
prev_url = paginated_list[1]
is_paginated = paginated_list[2]
page = paginated_list[3]
context = {
'next_url': next_url,
'prev_url': prev_url,
'is_paginated': is_paginated,
'page_object': page
}
return render(request, 'blog/index.html',
context=context)
def tags_list(request):
tags = Tag.objects.all()
return render(request, 'blog/tags_list.html',
context={'tags': tags})
def users_list(request):
users = User.objects.all()
return render(request, 'blog/users_list.html',
context={'users': users})
def paginator_for_obj(request, obj):
paginator = Paginator(obj, 3)
obj_number = request.GET.get('page', 1)
page = paginator.get_page(obj_number)
is_paginated = page.has_other_pages()
if page.has_previous():
prev_url = '?page={}'.format(page.previous_page_number())
else:
prev_url = ''
if page.has_next():
next_url = '?page={}'.format(page.next_page_number())
else:
next_url = ''
pag_list = (next_url, prev_url, is_paginated, page)
return pag_list
def upload_image(request):
if request.method == 'POST':
uploaded_file = request.FILES['document']
fs = FileSystemStorage()
name = fs.save(uploaded_file.name, uploaded_file)
img = os.path.join(fs.base_url, uploaded_file.name)
return render(request, 'blog/upload.html',
context={
'url': fs.url(name),
'img': img
})
return render(request, 'blog/upload.html') |
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARD)
GPIO.setup(16, GPIO.OUT)
for x in range(10):
GPIO.output(16, GPIO.LOW)
time.sleep(0.25)
GPIO.output(16, GPIO.HIGH)
time.sleep(0.25)
GPIO.output(16, GPIO.LOW)
time.sleep(0.25)
|
import sys
from tqdm import tqdm
from glob import glob
from utils import Node, traverse_label, traverse
import numpy as np
import pickle
import os
import torch
from collections import Counter
import re
from os.path import abspath
import nltk
from transformers import *
import warnings
warnings.filterwarnings("ignore")
MODEL_CLASSES = {
"bert": (BertConfig, BertModel, BertTokenizer)}
config_class, model_class, tokenizer_class = MODEL_CLASSES["bert"]
tokenizer = tokenizer_class.from_pretrained("./bert-base-uncased")
model = model_class.from_pretrained('./bert-base-uncased')
def parse(path):
features = []
adj = []
with open(path, "r",errors='ignore') as f:
num_objects = f.readline()
nodes = [Node(num=i, children=[]) for i in range(int(num_objects))]
for i in range(int(num_objects)):
label = " ".join(f.readline().split(" ")[1:])[:-1]
tokens = tokenizer.tokenize(label)
small_tokens = []
for token in tokens:
if token in ['(', ')', '=', '\'', '_']:
continue
small_tokens.append(token)
input_ids = torch.tensor(tokenizer.convert_tokens_to_ids(small_tokens)).unsqueeze(0)
if input_ids.size()[1] > 100:
input_ids = input_ids[:,:100]
outputs = model(input_ids)
last_hidden_states = outputs[0]
last_hidden_array = last_hidden_states.detach().numpy()
feature_multi_len = last_hidden_array[0]
feature = feature_multi_len.sum(0)
features.append(feature)
nodes[i].label = label
while 1:
line = f.readline()
if line == "\n":
break
p, c = map(int, line.split(" "))
adj.append([p,c])
nodes[p].children.append(nodes[c])
nodes[c].parent = nodes[p]
nl = f.readline()[:-1]
return nodes[0],features,adj
def get_method_name(root):
for c in root.children:
if c.label == "name (SimpleName)":
return c.children[0].label[12:-1]
def is_invalid_tree(root):
labels = traverse_label(root)
if root.label == 'root (ConstructorDeclaration)':
return True
if len(labels) >= 100:
return True
method_name = get_method_name(root)
for word in ["test", "Test", "set", "Set", "get", "Get"]:
if method_name[:len(word)] == word:
return True
return False
def parse_dir(data_dir,path_to_dir):
files = sorted(glob(path_to_dir + "/*"))
set_name = path_to_dir.split("/")[-1]
#skip = 0
for file in tqdm(files, "parsing {}".format(path_to_dir)):
number = int(file.split("/")[-1])
tree,features,adj = parse(file)
new_dict = {"features": features, "adj": adj}
with open(data_dir + "/" + "extractfeatures/" + set_name + "/" + str(number), "wb", 1) as f:
pickle.dump(new_dict, f)
def pickling():
args = sys.argv
if len(args) <= 1:
raise Exception("(usage) $ python preprocessing_task.py [dir]")
data_dir = args[1]
dirs = [
data_dir + "/" + "extractfeatures",
data_dir + "/" + "extractfeatures/train",
data_dir + "/" + "extractfeatures/dev",
data_dir + "/" + "extractfeatures/test"
]
for d in dirs:
if not os.path.exists(d):
os.mkdir(d)
for path in [data_dir + "/" + s for s in ["train","test","dev"]]:
parse_dir(data_dir,path)
if __name__ == "__main__":
#nltk.download('punkt')
sys.setrecursionlimit(10000)
pickling()
|
import tkinter
from time import sleep
from math import sin, cos, atan2, pi
# Parameters for canvas appearance
CANVAS_WIDTH = 600
CANVAS_HEIGHT = 300
CANVAS_BACKGROUND_COLOR = 'black'
CANVAS_TITLE = 'Topological Defects'
# Pause time in seconds between canvas update
PAUSE = 1/100.0
# Parameters for arrow appearance
ARROW_LENGTH = 10
ARROW_WIDTH = 1
ARROW_ANGLE_DEFAULT = -pi / 2
ARROW_SHAPE = (3, 4, 2)
ARROW_COLOR_DEFAULT = 'white'
ARROW_COLOR_USE_COLORMAP = True
ARROW_COLORMAP = 'hsv' # or 'twilight'
# Parameters for arrow array
ARROW_SPACING = 12
BORDER_OFFSET = ARROW_LENGTH
NUM_ARROW_X = (CANVAS_WIDTH - BORDER_OFFSET * 2) // ARROW_SPACING + 1
NUM_ARROW_Y = (CANVAS_HEIGHT - BORDER_OFFSET * 2) // ARROW_SPACING + 1
# Parameters for defect marker appearance
DEFECT_X = CANVAS_WIDTH // 4
DEFECT_Y = CANVAS_HEIGHT // 2
DEFECT_RADIUS = 3
DEFECT_COLOR = 'white'
def main():
"""
Draw an array of arrows whose angles follow a pair of
defect (fixed) and antidefect (following cursor position)
"""
canvas = make_canvas(CANVAS_WIDTH, CANVAS_HEIGHT, CANVAS_TITLE)
arrows = initialize_arrows(canvas)
defect = initialize_defect(canvas)
while True:
antidefect = canvas.winfo_pointerxy()
rotate_arrows(canvas, arrows, defect, antidefect)
canvas.update()
sleep(PAUSE)
def rotate_arrows(canvas, arrows, defect, antidefect):
"""
Update the rotation angle of each arrow in the array based on the
defect and antidefect locations. The arrow color is also updated.
:param canvas: the tkinter canvas object
:param arrows: a nested array with arrow objects, [column1, column2, ...]
:param defect: a length-2 tuple with (x, y) coordinates of defect
:param antidefect: a length-2 tuple with (x, y) coordinates of antidefect
"""
for column in arrows:
for arrow in column:
# Get existing coordinates
xstart, ystart, xend, yend = canvas.coords(arrow)
xcenter = (xstart + xend) / 2
ycenter = (ystart + yend) / 2
# Compute rotation angle and set new coordinates
angle = compute_angle(xcenter, ycenter, defect, antidefect)
xstart, ystart, xend, yend = compute_arrow_coords(xcenter,
ycenter, ARROW_LENGTH, angle)
canvas.coords(arrow, xstart, ystart, xend, yend)
# Update color
color = get_arrow_color(angle)
canvas.itemconfig(arrow, fill=color)
def compute_angle(x, y, defect, antidefect):
"""
Returns the angle of rotation for an arrow at coordinates (x, y)
in the presence of a defect and antidefect, whose coordinates
are specified by the length-2 tuples: defect and antidefect.
Note that the canvas coordinate system is left-handed.
"""
angle_defect = atan2(y - defect[1], x - defect[0])
angle_antidefect = atan2(antidefect[1] - y, x - antidefect[0])
return angle_defect + angle_antidefect + ARROW_ANGLE_DEFAULT
def initialize_arrows(canvas):
"""
Draws an array of arrows and returns a nested list of arrows
where the inner list iterates through rows (y-direction) and
the outer list iterates through columns (x-direction).
"""
arrows = []
for i in range(NUM_ARROW_X):
column = []
for j in range(NUM_ARROW_Y):
# Compute coordinates
xcenter = BORDER_OFFSET + i * ARROW_SPACING
ycenter = BORDER_OFFSET + j * ARROW_SPACING
xstart, ystart, xend, yend = compute_arrow_coords(
xcenter, ycenter, ARROW_LENGTH, ARROW_ANGLE_DEFAULT)
# Draw on canvas
arrow = canvas.create_line(xstart, ystart, xend, yend,
width=ARROW_WIDTH, arrow=tkinter.LAST, arrowshape=ARROW_SHAPE)
column.append(arrow)
arrows.append(column)
return arrows
def compute_arrow_coords(xcenter, ycenter, length, angle):
"""
Returns the starting and ending coordinates of a line
segment based on its center location (xcenter, ycenter),
length, and the rotation angle (in radian)
"""
delta_x = length / 2 * cos(angle)
delta_y = length / 2 * sin(angle)
xstart = round(xcenter - delta_x)
ystart = round(ycenter - delta_y)
xend = round(xcenter + delta_x)
yend = round(ycenter + delta_y)
return (xstart, ystart, xend, yend)
def initialize_defect(canvas):
"""
Draws an oval to mark the defect location.
Returns a length-2 tuple of its (x, y) coordinates.
"""
defect = (DEFECT_X, DEFECT_Y)
canvas.create_oval(defect[0] - DEFECT_RADIUS, defect[1] - DEFECT_RADIUS,
defect[0] + DEFECT_RADIUS, defect[1] + DEFECT_RADIUS,
fill=DEFECT_COLOR, outline=DEFECT_COLOR)
return defect
def get_arrow_color(angle):
"""
Returns a string that represents the color for the arrow.
Depending on the configuration, it either uses a fixed
default color or selects from a palette based the arrow
rotation angle (in radian)
"""
if ARROW_COLOR_USE_COLORMAP:
return get_color(angle, ARROW_COLORMAP)
return ARROW_COLOR_DEFAULT
########################################
##### The following functions are #####
##### provided. Do not modify them #####
########################################
def make_canvas(width, height, title):
"""
Creates and returns a drawing canvas
of the given int size with a blue border,
ready for drawing.
"""
top = tkinter.Tk()
top.minsize(width=width, height=height)
top.title(title)
canvas = tkinter.Canvas(top, bg=CANVAS_BACKGROUND_COLOR,
width=width + 1, height=height + 1)
canvas.pack()
return canvas
def get_color(angle, colormap):
"""
Returns a string in the hex format #rrggbb (8 bits per color)
that corresponds to the color of a given angle (radian). colormap
is a string that contains the name of the colormap to be used.
Currently only supports 'hsv' and 'twilight'.
"""
index = int(angle % (2 * pi) / (2 * pi) * 255)
colormap = colormap.lower()
if colormap == 'twilight':
return TWILIGHT[index]
elif colormap == 'hsv':
return HSV[index]
raise ValueError("Invalid colormap! Only supports 'hsv' and 'twilight'.")
"""
Color map for twilight and hsv, exported from the colormap in matplotlib
using the following snippet. You can create your own by selecting other
palettes available in matplotlib, or using the matplotlib module directly.
.. code-block:: python
from pylab import *
cmap = cm.get_cmap('hsv', 256)
for i in range(cmap.N):
rgb = cmap(i)[:3]
print(matplotlib.colors.rgb2hex(rgb))
"""
TWILIGHT = ["#e2d9e2", "#e1d9e2", "#e0d9e2", "#ded9e1", "#ddd9e0", "#dcd9df",
"#dad8df", "#d8d8de", "#d6d7dd", "#d4d6dc", "#d2d5db", "#d0d4d9",
"#cdd3d8", "#cbd2d7", "#c8d0d6", "#c5cfd5", "#c2ced4", "#bfccd3",
"#bccbd1", "#b9c9d0", "#b6c8cf", "#b3c6ce", "#b0c5cd", "#adc3cd",
"#aac2cc", "#a7c0cb", "#a4beca", "#a1bdc9", "#9ebbc9", "#9cb9c8",
"#99b8c8", "#96b6c7", "#93b4c6", "#92b3c6", "#8eb1c5", "#8cafc5",
"#89adc5", "#88acc4", "#85a9c4", "#82a7c3", "#80a5c3", "#7fa5c3",
"#7ca2c2", "#7aa0c2", "#789ec2", "#779dc2", "#759ac1", "#7398c1",
"#7196c1", "#7195c0", "#6e92c0", "#6d90c0", "#6c8ebf", "#6b8dbf",
"#698abf", "#6888be", "#6786be", "#6785be", "#6682bd", "#657fbd",
"#647dbc", "#647cbc", "#6379bb", "#6277bb", "#6275ba", "#6172ba",
"#6171b9", "#606eb8", "#606cb8", "#6069b7", "#6067b6", "#5f65b5",
"#5f62b4", "#5f60b4", "#5f5fb3", "#5f5bb2", "#5f59b1", "#5f57b0",
"#5e54ae", "#5e52ad", "#5e4fac", "#5e4dab", "#5e4caa", "#5e48a8",
"#5e46a6", "#5e43a5", "#5d41a3", "#5d3ea1", "#5d3ca0", "#5d3a9e",
"#5c389d", "#5c359a", "#5b3298", "#5b3095", "#5a2e93", "#5a2b90",
"#59298e", "#58278b", "#58268a", "#572385", "#562182", "#551f7f",
"#531e7c", "#521c79", "#511a75", "#4f1972", "#4f1970", "#4c176b",
"#4b1668", "#491564", "#471461", "#46145e", "#44135a", "#421257",
"#411256", "#3f1251", "#3d114e", "#3c114b", "#3a1149", "#391146",
"#371144", "#361142", "#361141", "#34113e", "#33113c", "#32123a",
"#311339", "#301437", "#301437", "#311337", "#331237", "#341238",
"#341238", "#361139", "#381139", "#3a113a", "#3b113b", "#3d113c",
"#3f123d", "#41123d", "#43123e", "#461240", "#481341", "#4a1342",
"#4d1443", "#4f1444", "#521545", "#541546", "#561546", "#591648",
"#5c1749", "#5f174a", "#61184b", "#64194b", "#67194c", "#691a4d",
"#6c1b4e", "#6f1c4e", "#711d4f", "#741e4f", "#761f4f", "#792050",
"#7b2150", "#7e2250", "#7f2350", "#832550", "#852650", "#872750",
"#8a2950", "#8c2a50", "#8e2c50", "#902e50", "#922f50", "#943150",
"#963350", "#983550", "#9a3750", "#9c3950", "#9e3b50", "#a03d50",
"#a03e50", "#a34150", "#a54350", "#a64550", "#a84750", "#a94950",
"#ab4b50", "#ac4d51", "#ae5051", "#af5251", "#b15452", "#b25652",
"#b35953", "#b55b53", "#b65d54", "#b75f55", "#b86155", "#b96456",
"#ba6657", "#bb6958", "#bc6b59", "#bd6e5a", "#be705b", "#bf725d",
"#c0755e", "#c1775f", "#c27a61", "#c27c63", "#c37f64", "#c48166",
"#c58468", "#c5866a", "#c6876b", "#c68b6e", "#c78e71", "#c89073",
"#c89275", "#c99578", "#c9977b", "#ca9a7d", "#ca9c80", "#cb9f83",
"#cca186", "#cca389", "#cda68c", "#cda88f", "#ceab92", "#cfad96",
"#cfae97", "#d0b29c", "#d1b4a0", "#d1b6a3", "#d2b8a7", "#d3baaa",
"#d4bdad", "#d5bfb1", "#d6c1b4", "#d7c3b8", "#d8c5bb", "#d8c7be",
"#d9c9c2", "#dacbc5", "#dbccc8", "#dccecb", "#dccfcd", "#ddd1d1",
"#ded3d3", "#dfd4d6", "#dfd5d8", "#e0d6da", "#e0d7db", "#e1d8dd",
"#e1d8df", "#e2d9e0", "#e2d9e1", "#e2d9e2"]
HSV = ["#ff0000", "#ff0600", "#ff0c00", "#ff1200", "#ff1800", "#ff1e00",
"#ff2300", "#ff2900", "#ff2f00", "#ff3500", "#ff3b00", "#ff4100",
"#ff4700", "#ff4d00", "#ff5300", "#ff5900", "#ff5f00", "#ff6400",
"#ff6a00", "#ff7000", "#ff7600", "#ff7c00", "#ff8200", "#ff8800",
"#ff8e00", "#ff9400", "#ff9a00", "#ff9f00", "#ffa500", "#ffab00",
"#ffb100", "#ffb700", "#ffbd00", "#ffc300", "#ffc900", "#ffcf00",
"#ffd500", "#ffdb00", "#ffe000", "#ffe600", "#ffec00", "#fef100",
"#fcf500", "#faf900", "#f8fd00", "#f4ff00", "#eeff00", "#e8ff00",
"#e2ff00", "#ddff00", "#d7ff00", "#d1ff00", "#cbff00", "#c5ff00",
"#bfff00", "#b9ff00", "#b3ff00", "#adff00", "#a7ff00", "#a2ff00",
"#9cff00", "#96ff00", "#90ff00", "#8aff00", "#84ff00", "#7eff00",
"#78ff00", "#72ff00", "#6cff00", "#66ff00", "#61ff00", "#5bff00",
"#55ff00", "#4fff00", "#49ff00", "#43ff00", "#3dff00", "#37ff00",
"#31ff00", "#2bff00", "#25ff00", "#20ff00", "#1aff00", "#14ff00",
"#0eff00", "#08ff00", "#06ff04", "#04ff08", "#02ff0c", "#00ff10",
"#00ff16", "#00ff1b", "#00ff21", "#00ff27", "#00ff2d", "#00ff33",
"#00ff39", "#00ff3f", "#00ff45", "#00ff4b", "#00ff51", "#00ff57",
"#00ff5c", "#00ff62", "#00ff68", "#00ff6e", "#00ff74", "#00ff7a",
"#00ff80", "#00ff86", "#00ff8c", "#00ff92", "#00ff97", "#00ff9d",
"#00ffa3", "#00ffa9", "#00ffaf", "#00ffb5", "#00ffbb", "#00ffc1",
"#00ffc7", "#00ffcd", "#00ffd3", "#00ffd8", "#00ffde", "#00ffe4",
"#00ffea", "#00fff0", "#00fff6", "#00fffc", "#00fcff", "#00f6ff",
"#00f0ff", "#00eaff", "#00e5ff", "#00dfff", "#00d9ff", "#00d3ff",
"#00cdff", "#00c7ff", "#00c1ff", "#00bbff", "#00b5ff", "#00afff",
"#00aaff", "#00a4ff", "#009eff", "#0098ff", "#0092ff", "#008cff",
"#0086ff", "#0080ff", "#007aff", "#0074ff", "#006eff", "#0069ff",
"#0063ff", "#005dff", "#0057ff", "#0051ff", "#004bff", "#0045ff",
"#003fff", "#0039ff", "#0033ff", "#002dff", "#0028ff", "#0022ff",
"#001cff", "#0016ff", "#0010ff", "#020cff", "#0408ff", "#0604ff",
"#0800ff", "#0e00ff", "#1300ff", "#1900ff", "#1f00ff", "#2500ff",
"#2b00ff", "#3100ff", "#3700ff", "#3d00ff", "#4300ff", "#4900ff",
"#4f00ff", "#5400ff", "#5a00ff", "#6000ff", "#6600ff", "#6c00ff",
"#7200ff", "#7800ff", "#7e00ff", "#8400ff", "#8a00ff", "#9000ff",
"#9500ff", "#9b00ff", "#a100ff", "#a700ff", "#ad00ff", "#b300ff",
"#b900ff", "#bf00ff", "#c500ff", "#cb00ff", "#d000ff", "#d600ff",
"#dc00ff", "#e200ff", "#e800ff", "#ee00ff", "#f400ff", "#f800fd",
"#fa00f9", "#fc00f5", "#fe00f1", "#ff00ed", "#ff00e7", "#ff00e1",
"#ff00db", "#ff00d5", "#ff00cf", "#ff00c9", "#ff00c3", "#ff00bd",
"#ff00b7", "#ff00b1", "#ff00ac", "#ff00a6", "#ff00a0", "#ff009a",
"#ff0094", "#ff008e", "#ff0088", "#ff0082", "#ff007c", "#ff0076",
"#ff0071", "#ff006b", "#ff0065", "#ff005f", "#ff0059", "#ff0053",
"#ff004d", "#ff0047", "#ff0041", "#ff003b", "#ff0035", "#ff0030",
"#ff002a", "#ff0024", "#ff001e", "#ff0018"]
if __name__ == '__main__':
main() |
# Complete the jumpingOnClouds function below.
def jumpingOnClouds(c):
steps = 0
i = 0
l = len(c)
print(c)
while i < l:
print(i, steps)
if i+2 c[i+2] or c[i+1] == c[i+2] else 1
steps += 1
elif i+1 < l:
i += 1
steps += 1
else:
i+=1
return steps |
import datetime
import time
from . import room_info_bp, new_room_info_bp, hot_room_bp, country_hp, room_hp, type_hp, personal_hp
from flask import request, render_template
from ...utils.mysql_db import db
from ...utils.util import unix_time
@room_info_bp.route("/room_info")
def room_info():
try:
page = int(request.args.get("p", 1))
except:
page = 1
sort_str = request.args.get("sort")
skip_num = (page - 1) * 15
sql = "select * from room_info limit {} ,15".format(skip_num)
if sort_str:
sql = "select * from room_info order by {} desc,day_sumamount desc limit {} ,15".format(sort_str, skip_num)
result_tuple = db.fetchall(sql)
data = []
for result in result_tuple:
value_dict = {}
barid = result[0]
barlevel = result[3]
onlinenum = result[4]
heatnow = result[5]
name = result[6]
isofficial = result[7]
labelname = result[8]
data_time = result[9]
day_sumamount = result[11]
week_sumamount = result[12]
membernum = result[13]
value_dict["barid"] = barid
value_dict["barlevel"] = barlevel
value_dict["onlinenum"] = onlinenum
value_dict["heatnow"] = heatnow
value_dict["name"] = name
value_dict["isofficial"] = isofficial
value_dict["labelname"] = labelname
value_dict["data_time"] = data_time
value_dict["day_sumamount"] = day_sumamount
value_dict["week_sumamount"] = week_sumamount
value_dict["membernum"] = membernum
data.append(value_dict)
return render_template("room_info.html", data=data)
@new_room_info_bp.route("/new_room_info")
def new_room_info():
try:
page = int(request.args.get("p", 1))
except:
page = 1
sort_str = request.args.get("sort")
skip_num = (page - 1) * 15
sql = "select * from new_room_info limit {} ,15".format(skip_num)
if sort_str:
sql = "select * from new_room_info order by {} desc limit {} ,15".format(sort_str, skip_num)
result_tuple = db.fetchall(sql)
data = []
for result in result_tuple:
value_dict = {}
barid = result[0]
barlevel = result[4]
onlinenum = result[5]
heatnow = result[6]
name = result[7]
isofficial = result[8]
labelname = result[9]
data_time = result[10]
day_sumamount = result[12]
week_sumamount = result[13]
membernum = result[14]
value_dict["barid"] = barid
value_dict["barlevel"] = barlevel
value_dict["onlinenum"] = onlinenum
value_dict["heatnow"] = heatnow
value_dict["name"] = name
value_dict["isofficial"] = isofficial
value_dict["labelname"] = labelname
value_dict["data_time"] = data_time
value_dict["day_sumamount"] = day_sumamount
value_dict["week_sumamount"] = week_sumamount
value_dict["membernum"] = membernum
data.append(value_dict)
return render_template("new_room_info.html", data=data)
@hot_room_bp.route("/hot_room")
def new_room_info():
try:
page = int(request.args.get("p", 1))
except:
page = 1
sort_str = request.args.get("sort")
skip_num = (page - 1) * 15
sql = "select * from hot_room_info limit {} ,15".format(skip_num)
if sort_str:
sql = "select * from hot_room_info order by {} desc limit {} ,15".format(sort_str, skip_num)
result_tuple = db.fetchall(sql)
data = []
for result in result_tuple:
value_dict = {}
barid = result[0]
barlevel = result[4]
onlinenum = result[5]
heatnow = result[6]
name = result[7]
isofficial = result[8]
labelname = result[9]
data_time = result[10]
value_dict["barid"] = barid
value_dict["barlevel"] = barlevel
value_dict["onlinenum"] = onlinenum
value_dict["heatnow"] = heatnow
value_dict["name"] = name
value_dict["isofficial"] = isofficial
value_dict["labelname"] = labelname
value_dict["data_time"] = data_time
data.append(value_dict)
return render_template("hot_room.html", data=data)
@country_hp.route("/country")
def country():
data_time = request.args.get("data_time", '')
if not data_time:
data_time = (datetime.date.today()).strftime("%Y-%m-%d")
if int(time.time()) - 64800 < unix_time(dt=data_time):
data_time = (datetime.datetime.now() + datetime.timedelta(days=-1)).date().strftime('%Y-%m-%d')
sql = "select * from country_aggregate_info where data_time='{}' ORDER BY room_num DESC".format(data_time)
result_tuple = db.fetchall(sql)
data = []
for result in result_tuple:
value_dict = {}
value_dict["date_time"] = result[0]
value_dict["name"] = result[1]
value_dict["room_num"] = result[2]
value_dict["day_sumamount"] = result[3]
value_dict["avg_day_sumamount"] = result[4]
value_dict["week_sumamount"] = result[5]
value_dict["avg_week_sumamount"] = result[6]
value_dict["onlinenum"] = result[7]
value_dict["avg_onlinenum"] = result[8]
data.append(value_dict)
data = sorted(data, key=lambda x: x['room_num'], reverse=True)
return render_template("country.html", data=data)
@room_hp.route("/room")
def room():
sql = "select * from room_aggregate_info ORDER BY data_time DESC"
result_tuple = db.fetchall(sql)
data = []
for result in result_tuple:
value_dict = {}
value_dict["date_time"] = result[0]
value_dict["room_num"] = result[1]
value_dict["day_sumamount"] = result[2]
value_dict["avg_day_sumamount"] = result[3]
value_dict["onlinenum"] = result[4]
value_dict["avg_onlinenum"] = result[5]
value_dict["week_sumamount"] = result[6]
value_dict["avg_week_sumamount"] = result[7]
data.append(value_dict)
return render_template("room.html", data=data)
@type_hp.route("/type")
def type_demo():
date_time = request.args.get("date_time", '')
if not date_time:
date_time = (datetime.date.today()).strftime("%Y-%m-%d")
if int(time.time()) - 64800 < unix_time(dt=date_time):
date_time = (datetime.datetime.now() + datetime.timedelta(days=-1)).date().strftime('%Y-%m-%d')
sql_data_time = "select data_time from type_aggregate_info GROUP BY data_time desc limit 7"
date_time_list = []
date_time_lists = db.fetchall(sql_data_time)
for i in date_time_lists:
date_time_list.append(i[0])
sql = "select * from type_aggregate_info where data_time='{}' ORDER BY room_num DESC".format(date_time)
result_tuple = db.fetchall(sql)
data = []
for result in result_tuple:
value_dict = {}
value_dict["date_time"] = result[0]
value_dict["name"] = result[1]
value_dict["room_num"] = result[2]
value_dict["day_sumamount"] = result[3]
value_dict["avg_day_sumamount"] = result[4]
value_dict["week_sumamount"] = result[5]
value_dict["avg_week_sumamount"] = result[6]
value_dict["onlinenum"] = result[7]
value_dict["avg_onlinenum"] = result[8]
data.append(value_dict)
data = sorted(data, key=lambda x: x['room_num'], reverse=True)
return render_template("type.html", data=data, datatime=date_time_list)
@personal_hp.route("/personal")
def personal():
barid = request.args.get("barid")
sql = "select * from room_info where barid= {} order by data_time desc ".format(barid)
result_tuple = db.fetchall(sql)
data = []
for result in result_tuple:
value_dict = {}
barid = result[0]
barlevel = result[3]
onlinenum = result[4]
heatnow = result[5]
name = result[6]
isofficial = result[7]
labelname = result[8]
data_time = result[9]
day_sumamount = result[11]
week_sumamount = result[12]
membernum = result[13]
value_dict["barid"] = barid
value_dict["barlevel"] = barlevel
value_dict["onlinenum"] = onlinenum
value_dict["heatnow"] = heatnow
value_dict["name"] = name
value_dict["isofficial"] = isofficial
value_dict["labelname"] = labelname
value_dict["data_time"] = data_time
value_dict["day_sumamount"] = day_sumamount
value_dict["week_sumamount"] = week_sumamount
value_dict["membernum"] = membernum
data.append(value_dict)
return render_template("personal.html", data=data)
|
#!/usr/local/bin/python
#encoding:utf8
'''
Classify goods by name and rules.
Name: ../data/goods_name.kv
Rules: ../data/catwords.tsv
In catwords.tsv,
group name \t must have words \t optional words
'''
import sys
import os
man_woman_child_rule = {
'man':set('男 商务 绅士 新郎'.split()),
'woman':set('女 妇 软妹 淑女 雪纺 文胸 裙 OL 通勤 优雅 甜美 性感 露肩 小香风 公主 妈妈 孕妇 包臀 蕾丝 波西米亚 旗袍 打底裤 显瘦 旗袍'.split()),
'child':set('童 婴儿 宝贝 宝宝 爬服'.split())
}
def read_rule(fname):
# cname -> (must have set, optional set)
rules = {'man':{}, 'woman':{}, 'child':{}}
for line in open(fname, 'r'):
parts = line.strip().split('\t')
l = len(parts)
if l != 2 and l != 3:
continue
elif l == 2:
parts.append('')
else:
pass
if parts[0].startswith('男'):
rules['man'][parts[0]] = (set(parts[1].split(',')), set(parts[2].split(',')))
elif parts[0].startswith('女'):
rules['woman'][parts[0]] = (set(parts[1].split(',')), set(parts[2].split(',')))
elif parts[0].startswith('童'):
rules['child'][parts[0]] = (set(parts[1].split(',')), set(parts[2].split(',')))
else:
pass
return rules
def first_level(name):
global man_woman_child_rule
first_categories = []
for first_category in man_woman_child_rule:
words = man_woman_child_rule[first_category]
for word in words:
if word in name:
first_categories.append(first_category)
break
return first_categories
def word_in_name(word, name):
if '|' not in word:
return word in name
else:
return any([w in name for w in word.split('|')])
def classify(name, rules):
first_categories = first_level(name) # in []
if not first_categories:
return {}
else:
score_board = {} # cname -> score
for first_category in first_categories:
crules = rules[first_category]
for cname in crules:
mset, oset = crules[cname]
if not mset:
continue
exists = [word_in_name(item, name) for item in mset]
if not all(exists):
continue
score_board[cname] = 100
for item in oset:
if item in name:
score_board[cname] += len(item)
items = score_board.items()
items.sort(key=lambda x:x[1], reverse=True)
items = items[:3]
return dict(items)
def main():
if len(sys.argv) != 3:
print 'Usage:<goods.name> <rule>'
sys.exit(0)
rules = read_rule(sys.argv[2])
fgoodsname = open(sys.argv[1])
for no, line in enumerate(fgoodsname):
parts = line.strip().split('\t')
if len(parts) < 2:
continue
gid = int(parts[0])
name = parts[1]
categories = classify(name, rules) # in {}, classname -> weight
if not categories:
print '%d\t%s\tNOCAT' % (gid, name)
else:
for category in categories:
print '%d\t%s\t%s\t%.2f' % (gid, name, category, categories[category])
fgoodsname.close()
if __name__ == '__main__':
main()
|
import requests
import os
import shutil
import pandas
import datetime
import pickle
from pandas_datareader import data as pd_data
from pandas_datareader import base as pd_base
from bs4 import BeautifulSoup
from formats.price_history import Instruments, Indice
from formats.fundamentals import Valuations, StackedValuations
class Storage():
def __init__(self, exchange = "ASX", root_folder = "D:\\Investing\\"):
self.root = root_folder
self.exchange = exchange
@property
def data(self):
return os.path.join(self.root, "Data", self.exchange)
@property
def valuations(self):
return os.path.join(self.root, "Valuations", self.exchange)
def workspace(self, resource):
return os.path.join(self.root, "Workspace")
def load(self, resource):
folder = resource.select_folder(self)
filename = resource.filename()
return resource.load_from(os.path.join(folder, filename))
def save(self, resource):
folder = resource.select_folder(self)
self.check_directory(folder)
file_path = os.path.join(folder, resource.filename())
resource.save_to(file_path)
def exchange_information(self, resource):
return os.path.join(self.root, "Data")
def indice_folder(self, resource):
return os.path.join(self.root, "Data", "Indices")
def stock_folder(self, resource):
return os.path.join(self.data, resource.ticker)
def financials(self, resource):
return os.path.join(self.stock_folder(resource), "Financials")
def CMCsummary(self, resource):
return self.financials(resource)
def annual_financials(self, resource):
return os.path.join(self.stock_folder(resource), "Financials", "Annual")
def interim_financials(self, resource):
return os.path.join(self.stock_folder(resource), "Financials", "Interim")
def price_history(self, resource):
return self.stock_folder(resource)
def analysis_summary(self, resource):
return self.stock_folder(resource)
def valuation_summary(self, resource):
return self.valuations
def check_directory(self, path):
if "." in os.path.basename(path):
path = os.path.dirname(path)
if not os.path.exists(path):
os.makedirs(path)
def list_files(self, root_dir, search_term = ""):
all_files = os.listdir(root_dir)
return [filename for filename in all_files if search_term in filename]
def migrate_file(self, old_folder, destination, filename):
dest_file = os.path.join(destination, filename)
self.check_directory(dest_file)
shutil.move(os.path.join(old_folder, filename), dest_file)
def get_instruments(self, excluded_tickers = None):
instruments = Instruments(self.exchange)
instruments = self.load(instruments)
if excluded_tickers is not None:
instruments.exclude(excluded_tickers)
return instruments
def get_valuations(self, type, date = None):
if date is None:
# Find the most recent valuations
files = os.listdir(self.valuations)
filename = StackedValuations(type).filename()
date = self.find_latest_date(files, filename)
valuations = StackedValuations(type, date)
return self.load(valuations)
def get_indice(self, ticker):
indice = Indice(ticker)
return self.load(indice)
def find_latest_date(self, files, filename):
"""
From a list of files (e.g. retrieved from a directory), and the
filename format of a file type find the latest dated file of that
type. Relies on the date format being of form YYYYMMDD.
Used for data formats which include a date in the filename, e.g.
Valuations, ValuationMetrics.
"""
label = filename[:filename.find("*")]
suffix = filename[(filename.find("*") + 1):]
valuation_files = [file for file in files if label in file]
datenums = [int(file.replace(label, '').replace(suffix, '')) for file in valuation_files]
latest_date = max(datenums)
return str(latest_date)
|
# t0pic - pic.t0.vc
# MIT License
import random
import string
from flask import abort, Flask, request, redirect
from pathlib import Path
from PIL import Image
PICS = Path('data')
MAX_SIZE = 1920
PORT = 5003
URL = 'https://pic.t0.vc'
POST = 'pic'
def help():
form = (
'<form action="{0}" method="POST" accept-charset="UTF-8" enctype="multipart/form-data">'
'<input name="web" type="hidden" value="true">'
'<input name="pic" type="file" accept="image/*" />'
'<br><br><button type="submit">Submit</button></form>'.format(URL, POST)
)
return """
<pre>
pic.t0.vc
NAME
t0pic: command line image host.
USAGE
<image output> | curl -F '{0}=@/dev/stdin' {1}
or upload from the web:
{2}
or paste the image into this page.
DESCRIPTION
I got sick of imgur not working on mobile, so I built this
Images are resized to a max dimension of {3} px
Don't use this for anything serious
EXAMPLES
~$ cat kitten.jpg | curl -F '{0}=@/dev/stdin' {1}
{1}/YXKV.jpg
~$ firefox {1}/YXKV.jpg
Add this to your .bashrc:
alias {0}="curl -F '{0}=@/dev/stdin' {1}"
Now you can pipe directly into {0}!
SOURCE CODE
https://txt.t0.vc/CQQE
nginx config: https://txt.t0.vc/ZKEH
https://github.com/tannercollin/t0pic
SEE ALSO
https://txt.t0.vc
https://url.t0.vc
</pre>""".format(POST, URL, form, MAX_SIZE)
def paste():
return """
window.addEventListener('paste', e => {
const file = e.clipboardData.items[0].getAsFile();
const url = URL.createObjectURL(file);
const image = new Image();
image.src = url;
const form = new FormData();
form.append('pic', file);
fetch('/', {
method: 'POST',
body: form
})
.then(r => r.text())
.then(u => window.location = u)
.catch(e => alert(e.message));
});
"""
def new_id():
return ''.join(random.choice(string.ascii_uppercase) for _ in range(4))
flask_app = Flask(__name__)
@flask_app.route('/', methods=['GET'])
def index():
return '<html><script>{}</script><body>{}</body></html>'.format(paste(), help())
@flask_app.route('/', methods=['POST'])
def new():
try:
nid = new_id()
while nid in [p.stem for p in PICS.iterdir()]:
nid = new_id()
pic = request.files['pic']
if not pic: raise
pic = Image.open(pic)
if pic.format == 'PNG':
ext = '.png'
elif pic.format == 'JPEG':
ext = '.jpg'
else:
raise
filename = nid + ext
pic.thumbnail([MAX_SIZE, MAX_SIZE], Image.ANTIALIAS)
pic.save(str(PICS.joinpath(filename)))
print('Adding pic {}'.format(nid))
url = URL + '/' + nid
if 'web' in request.form:
return redirect(url)
else:
return url + '\n'
except:
abort(400)
flask_app.run(port=PORT)
|
# /usr/bin/env python3.5
# -*- mode: python -*-
# =============================================================================
# @@-COPYRIGHT-START-@@
#
# Copyright (c) 2020, Qualcomm Innovation Center, Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# SPDX-License-Identifier: BSD-3-Clause
#
# @@-COPYRIGHT-END-@@
# =============================================================================
""" Implements straight through gradient computation for Quant op"""
import torch
def compute_dloss_by_dx(x, grad, encoding_min, encoding_max):
"""
compute derivative w.r.t input using straight through estimator.
:param grad: gradient flowing
:param encoding_min: encoding min grid param used on forward pass
:param encoding_max: encoding max grid param used on forward pass
:return: gradient w.r.t input
"""
# compute dloss_by_dx = dq_by_dx * grad
device = x.device
inner_cond = torch.where(torch.le(x, encoding_max), # condition to check per value
torch.ones_like(x), # execute if true
torch.zeros_like(x)) # execute if false
dloss_by_dx = (torch.where(torch.le(torch.Tensor([encoding_min]).to(device), x), # condition to check per value
inner_cond, # execute if true
torch.zeros_like(x))) * grad
return dloss_by_dx
|
def count_words(arr):
words = {}
for word in arr:
if word not in words:
words[word] = 1
else:
words[word] = words[word] + 1
return words
|
import time
import random
from FixedLightStrip import FixedLightStrip
from ZenShiftLightStrip import ZenShiftLightStrip
if __name__ == '__main__':
light_strip = ZenShiftLightStrip()
while True:
light_strip.get_next_state()
light_strip.update()
time.sleep(0.1)
|
from datetime import timedelta, datetime
import MySQLdb
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import urllib3
def query_mysql(fecha_fin, minutos_antes):
horizonte_temporal = datetime.strptime('2018-07-19T23:00:00', '%Y-%m-%dT%H:%M:%S')
flag = False
cont = 0
while not flag:
fecha_inicio = fecha_fin - timedelta(minutes=minutos_antes)
http = urllib3.PoolManager()
if minutos_antes > 0:
url = "http://innotech.selfip.com:8282/consulta_ssvv.php?ini='{}'&fin='{}'&id=6".format(fecha_inicio, fecha_fin)
else:
url = "http://innotech.selfip.com:8282/consulta_ssvv.php?ini='{}'&fin='{}'&id=6".format(fecha_fin, fecha_inicio)
r = http.request('GET', url)
resultado = list(str(r.data).split("<br>"))[:-2]
for i in np.arange(len(resultado), 0, -1):
if 'r' in resultado[i - 1]:
del (resultado[i - 1])
if 0.9 * 24000 <= len(resultado) <= 1.1 * 24000:
flag = True
else:
cont += 1
fecha_fin = fecha_inicio
if fecha_inicio.date() < horizonte_temporal.date() or cont > 10:
print('Busqueda de ventana temporal fuera de rango.\nNo es posible estudiar en frecuencia este intervalo.\n')
df = pd.DataFrame()
return df, fecha_inicio
print(f'{fecha_inicio} & {fecha_fin}')
lista = [None] * len(resultado)
for i, row in enumerate(resultado):
u = row.split(',')
lista[i] = u[1:]
df = pd.DataFrame(lista, columns=['x', 'y', 'z'])
df['x'] = pd.to_numeric(df.x)
df['y'] = pd.to_numeric(df.y)
df['z'] = pd.to_numeric(df.z)
return df, fecha_inicio
def busqueda_back(path):
fecha_i = datetime.strptime('2019-01-03T18:10:00', '%Y-%m-%dT%H:%M:%S')
for i in np.arange(20):
df, f = query_mysql(fecha_i, 1)
fecha_i = f
plt.scatter(df.index.values, df['x'].values, alpha=0.2)
plt.savefig(f'{path}/Backward-{fecha_i}.png', dpi=500)
plt.close()
return
def busqueda_for(path):
fecha_i = datetime.strptime('2019-01-03T18:10:00', '%Y-%m-%dT%H:%M:%S')
for i in np.arange(20):
df, f = query_mysql(fecha_i, -1)
fecha_i = f
plt.scatter(df.index.values, df['x'].values, alpha=0.2)
plt.savefig(f'{path}/Forward-{fecha_i}.png', dpi=500)
plt.close()
return
if __name__ == '__main__':
# Cada 3 minutos
ruta = '/home/arielmardones/Documentos/Respaldo-Ariel/SensoVal/delay_mysql/val6'
# Cada un minuto
# path = '/home/arielmardones/Documentos/Respaldo-Ariel/SensoVal/delay_mysql/val6/minutes_backward'
busqueda_back(ruta)
busqueda_for(ruta)
|
#!/usr/bin/env python
import sys, os
import urllib2, urllib, cookielib
import socket, random
import time
import gzip
try:
from cStringIO import StringIO
except Exception, e:
from StringIO import StringIO
class urllibUtil(object):
def __init__(self):
socket.setdefaulttimeout(10)
def openUrl(self, url, data=None):
cookie_support = urllib2.HTTPCookieProcessor(cookielib.CookieJar())
self.opener = urllib2.build_opener(cookie_support, urllib2.HTTPHandler, urllib2.HTTPRedirectHandler)
urllib2.install_opener(self.opener)
# proxy_support = urllib2.ProxyHandler({'http', 'http://XX.XX.XX.XX:XXXX'})
# opener = urllib2.build_opener(proxy_support, urllib2.HTTPHandler)
# urllib2.install_opener(opener)
user_agents = [
'Mozilla/5.0 (Windows; U; Windows NT 5.1; it; rv:1.8.1.11) Gecko/20071127 Firefox/2.0.0.11',
'Opera/9.25 (Windows NT 5.1; U; en)',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322; .NET CLR 2.0.50727)',
'Mozilla/5.0 (compatible; Konqueror/3.5; Linux) KHTML/3.5.5 (like Gecko) (Kubuntu)',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.0.12) Gecko/20070731 Ubuntu/dapper-security Firefox/1.5.0.12',
'Lynx/2.8.5rel.1 libwww-FM/2.14 SSL-MM/1.4.1 GNUTLS/1.2.9',
"Mozilla/5.0 (X11; Linux i686) AppleWebKit/535.7 (KHTML, like Gecko) Ubuntu/11.04 Chromium/16.0.912.77 Chrome/16.0.912.77 Safari/535.7",
"Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:10.0) Gecko/20100101 Firefox/10.0 ",
]
agent = random.choice(user_agents)
self.opener.addheaders = [('User-agent', agent),("Accept","*/*"),('Referer',url)]
try:
handler = self.opener.open(url)
except Exception, e:
return None
return handler
def getHtmls(self, url):
try:
handler = self.openUrl(url)
if handler.headers.has_key('content-encoding'):
if 'gzip' in handler.headers['content-encoding']:
fileobj = StringIO()
fileobj.write(handler.read())
fileobj.seek(0)
gzip_file = gzip.GzipFile(fileobj = fileobj)
return gzip_file.read()
else:
return handler.read()
except Exception, e:
return handler.read()
return ''
@staticmethod
def retrievefile(url, storage=None, callback=None):
def cbk(a, b, c):
per = 100.0 * a * b /c
if per > 100:
per = 100
print '%.2f%%' % per
if callback is None:
callback = cbk
if storage is None:
storage = url.split('/')[-1]
urllib.urlretrieve(url, storage, callback)
@staticmethod
def testescape():
# url escape
data = 'name=dasf'
data1 = urllib.quote(data)
print data1
print urllib.unquote(data1)
# json file
data3 = urllib.urlencode({ 'name': 'dark-bull', 'age': 200 })
print data3
data4 = urllib.pathname2url(r'd:/a/b/c/23.php')
print data4 # result: ///D|/a/b/c/23.php
print urllib.url2pathname(data4) # result: D:/a/b/c/23.php
if __name__ == '__main__':
data = {'email': '523720676@qq.com', 'password':'123456ygs'}
print urllib.urlencode(data)
url = 'https://github.com/login'
auth_handler = urllib2.HTTPBasicAuthHandler()
auth_handler.add_password(
realm='PDQ Application',
uri='https://github.com/login',
user='523720676@qq.com',
passwd='123456ygs')
opener = urllib2.build_opener(auth_handler)
urllib2.install_opener(opener)
handler = urllibUtil().openUrl(url, data)
print handler.getcode()
print handler.info()
print handler.geturl()
|
from django.db import models
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.template.loader import render_to_string
from django.db.models import Max
class Post(models.Model):
post = models.CharField(max_length=42)
user = models.ForeignKey(User,on_delete=models.CASCADE)
time = models.DateTimeField(auto_now_add=True)
last_changed = models.DateTimeField(auto_now=True)
def __str__(self):
return self.post
@staticmethod
def get_max_time():
return Post.objects.all().aggregate(Max('last_changed'))['last_changed__max'] or "1970-01-01T00:00+00:00"
@staticmethod
def get_max_time_follower(req_user):
profile=Profile.objects.get(user=req_user)
followees = profile.followees.all()
posts = Post.objects.filter(user__in=followees).distinct()
return posts.aggregate(Max('last_changed'))['last_changed__max'] or "1970-01-01T00:00+00:00"
@staticmethod
def get_max_time_profile(profile_user):
posts = Post.objects.filter(user=profile_user).distinct()
return posts.aggregate(Max('last_changed'))['last_changed__max'] or "1970-01-01T00:00+00:00"
@staticmethod
def update(time="1970-01-01T00:00+00:00"):
posts = Post.objects.filter(last_changed__gt=time).distinct()
return posts
@staticmethod
def update_follower(req_user, time="1970-01-01T00:00+00:00"):
profile=Profile.objects.get(user=req_user)
followees = profile.followees.all()
posts = Post.objects.filter(user__in=followees, last_changed__gt=time).distinct().order_by("time")
return posts
@staticmethod
def update_profile(profile_user, time="1970-01-01T00:00+00:00"):
posts = Post.objects.filter(user=profile_user, last_changed__gt=time).distinct().order_by("time")
return posts
@property
def html(self):
return render_to_string("post.html", {"user":self.user,"post":self.post,"time":self.time,"post_id":self.id}).replace("\n", "");
class Comment(models.Model):
content = models.CharField(max_length=42)
user = models.ForeignKey(User,on_delete=models.CASCADE)
post = models.ForeignKey(Post,on_delete=models.CASCADE)
time = models.DateTimeField(auto_now_add=True)
last_changed = models.DateTimeField(auto_now=True)
def __str__(self):
return self.content
@staticmethod
def update(id, changeTime="1970-01-01T00:00+00:00"):
post = Post.objects.get(id=id)
comms = Comment.objects.filter(post=post, last_changed__gt=changeTime).distinct().order_by("time")
return comms
@property
def html(self):
return render_to_string("comment.html", {"user":self.user,"content":self.content,"time":self.time,"comment_id":self.id}).replace("\n", "");
@staticmethod
def get_max_time():
return Comment.objects.all().aggregate(Max('last_changed'))['last_changed__max'] or "1970-01-01T00:00+00:00"
@staticmethod
def get_max_time_follower(post_id):
post = Post.objects.get(id=post_id)
comments = Comment.objects.filter(post=post).distinct()
return comments.aggregate(Max('last_changed'))['last_changed__max'] or "1970-01-01T00:00+00:00"
class Profile(models.Model):
age = models.IntegerField()
bio = models.CharField(max_length=420, default="Say something about yourself here.", blank=True)
user = models.OneToOneField(User, primary_key=True, on_delete=models.CASCADE)
picture = models.ImageField(upload_to="profile_pictures",default="profile.png")
followees = models.ManyToManyField(User,related_name='following')
|
# -*- coding: utf-8 -*-
"""
Created on Fri Jan 1 15:19:49 2021
@author: Qalbe
"""
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
dataset = pd.read_csv('Poly_dataSet.csv')
X = dataset.iloc[:, 0:1].values
y = dataset.iloc[:, 1].values
from sklearn.ensemble import RandomForestRegressor
rfr = RandomForestRegressor(n_estimators = 10, random_state = 0)
rfr.fit(X, y)
# Visualising results
x_grid=np.arange(min(X),max(X), 0.1)
x_grid =x_grid.reshape(len(x_grid),1)
plt.scatter(X, y, color = 'red')
plt.plot(x_grid, rfr.predict(x_grid), color = 'blue')
plt.title('Truth or Bluff (DT Regression)')
plt.xlabel('Position level')
plt.ylabel('Salary')
plt.show()
#Prdict by Polynominal Regression
rfr.predict(np.reshape(1,(1,1))) |
import requests
from datetime import datetime
from django.conf import settings
from celery import shared_task
from celery_progress.backend import ProgressRecorder
from .models import Channel, Video
@shared_task(bind=True)
def get_video_stats(self):
progress_recorder = ProgressRecorder(self)
Video.objects.all().delete()
channels = Channel.objects.all()
channel_ids = ','.join([channel.playlist_id for channel in channels])
url = f'https://www.googleapis.com/youtube/v3/playlists?id={channel_ids}&part=contentDetails&key={settings.YOUTUBE_API_KEY}'
res = requests.get(url)
total_requests = 0
for item in res.json()['items']:
total_requests += int(item['contentDetails']['itemCount']) // 50 + 1
print(total_requests)
i = 0
for channel in channels:
playlist_api_url = f'https://www.googleapis.com/youtube/v3/playlistItems?playlistId={channel.playlist_id}&part=snippet&maxResults=50&key={settings.YOUTUBE_API_KEY}'
while True:
playlist_res = requests.get(playlist_api_url)
results = playlist_res.json()
videos_ids = []
for item in results['items']:
videos_ids.append(item['snippet']['resourceId']['videoId'])
video_ids_string = ','.join(videos_ids)
video_api_url = f'https://www.googleapis.com/youtube/v3/videos?id={video_ids_string}&part=snippet,statistics&key={settings.YOUTUBE_API_KEY}'
video_res = requests.get(video_api_url)
for item in video_res.json()['items']:
date_published = datetime.strptime(item['snippet']['publishedAt'], '%Y-%m-%dT%H:%M:%SZ')
video = Video(
title=item['snippet']['title'],
views=item['statistics']['viewCount'],
likes=item['statistics']['likeCount'],
youtube_id=item['id'],
date_published=date_published,
channel=channel
)
video.save()
i += 1
progress_recorder.set_progress(i, total_requests, f'On iteration {i}')
if 'nextPageToken' in results:
nextPageToken = results['nextPageToken']
playlist_api_url = f'https://www.googleapis.com/youtube/v3/playlistItems?playlistId={channel.playlist_id}&part=snippet&maxResults=50&key={settings.YOUTUBE_API_KEY}&pageToken={nextPageToken}'
else:
break |
from processamento import Processar_Strings # Bibliotecas de suporte à execução do programa
import pandas as pd
import numpy as np
import math
import random
class NaiveBayes:
def __init__(self):
self.ficheiro = None # Inicialização da variável ficheiro, que contém os dados lidos de "spam.csv"
self.lista = [] # Inicialização da lista com o texto, após ser processado
self.x_treino = [] # Inicialização da lista com o conjunto de treino de x
self.y_treino = [] # Inicialização da lista com o conjunto de treino de y
self.x_teste = [] # Inicialização da lista com o conjunto de teste de x
self.y_teste = [] # Inicialização da lista com o conjunto de teste de y
self.x_validacao = [] # Inicialização da lista com o conjunto de validação de x
self.y_validacao = [] # Inicialização da lista com o conjunto de validação de y
self.spam = 0 # Inicialização da variável spam
self.ham = 0 # Inicialização da variável ham
self.totalPalavras = [] # Inicialização da lista com o conjunto total de palavras
self.matrizP = {} # Inicialização da matriz P
self.matrizP_Relativa = {} # Inicialização da matriz P relativa
self.b = 0 # Inicialização da variável b
self.c = 158698421 # Inicialização da variável c
self.confMatriz = None # Inicialização da matriz de confusão
self.confMatriz_Relativa = None # Inicialização da matriz de confusão relativa
def algoritmo(self): # Método que corre os vários passos do algoritmo
self.processamento() # Chamada do método processamento()
self.train_X_Y() # Chamada do método train_X_Y()
self.lista_Palavras(self.getX_treino()) # Chamada do método lista_Palavras()
self.computacao_Ham_Spam(self.getX_treino(), self.getY_treino()) # Chamada do método computacao_Ham_Spam()
self.inicializa_P() # Chamada do método inicializa_P()
self.contar_Palavras() # Chamada do método contar_Palavras()
self.normalizar_Contagem() # Chamada do método normalizar_Contagem()
self.inicializa_B() # Chamada do método inicializa_B()
self.classificar(self.getX_treino(), self.getY_treino()) # Chamada do método classificar()
def processamento(self): # Método para ler o ficheiro e processar o texto
self.ficheiro = pd.read_csv("spam.csv", encoding = "latin-1")
self.ficheiro = self.ficheiro.drop(["Unnamed: 2", "Unnamed: 3", "Unnamed: 4"], axis = 1)
self.ficheiro = self.ficheiro.rename(columns = {"v1": "Tipo", "v2": "Texto"})
proc_palavra = Processar_Strings(self.ficheiro["Texto"], self.lista)
proc_palavra.processamentoPal()
def train_X_Y(self): # Método para definir o conjunto de treino, teste e validação
y = self.ficheiro["Tipo"]
y = np.array(y)
validacao_Inverso = round(len(self.lista) * 0.7)
teste_Inverso = round(len(self.lista) * 0.85)
self.x_treino, self.y_treino, = self.lista[:validacao_Inverso], y[:validacao_Inverso]
self.x_validacao, self.y_validacao = self.lista[validacao_Inverso:teste_Inverso], y[validacao_Inverso:teste_Inverso]
self.x_teste, self.y_teste = self.lista[teste_Inverso:], y[teste_Inverso:]
def lista_Palavras(self, x_treino): # Método para criar uma lista de todas as palavras contidas no conjunto de treino
for email in x_treino:
for palavra in email.split():
if palavra not in self.totalPalavras:
self.totalPalavras.append(palavra)
def computacao_Ham_Spam(self, X_treino, Y_treino): # Método para calcular o número de mensagens spam e ham
x_treino = np.array(X_treino)
y_treino = np.array(Y_treino)
total = 0
for tipo in y_treino:
if(tipo == "spam"):
self.spam += 1
else:
self.ham += 1
total += 1
print("\n------------- Naïve Bayes -------------")
print("\nNúmero de mensagens de treino Spam: ", self.spam)
print("Número de mensagens de treino Ham: ", self.ham)
print("Número Total de mensagens de treino: ", total)
def inicializa_B(self): # Método para calcular o valor da variável b
self.b = math.log(self.c) + math.log(self.ham) - math.log(self.spam)
def inicializa_P(self): # Método para criar uma matriz P com todas as palavras do totalPalavras
for palavra in self.totalPalavras:
self.matrizP[palavra] = [1, 1]
def contar_Palavras(self): # Método para contar quantas determinada palavra surge em sms/email spam ou sms/email ham
X_treino = np.array(self.x_treino)
Y_treino = np.array(self.y_treino)
for i in range(len(X_treino)):
if Y_treino[i] == "spam":
for palavra in self.totalPalavras:
if palavra in X_treino[i]:
self.matrizP[palavra][0] += 1
else:
for palavra in self.totalPalavras:
if palavra in X_treino[i]:
self.matrizP[palavra][1] += 1
def normalizar_Contagem(self): # Método para converter os valores absolutos da Matriz P em valores relativos, numa nova matriz
palavras_totais_spam = 0
palavras_totais_ham = 0
self.matrizP_Relativa = dict()
for x in self.matrizP:
palavras_totais_spam += self.matrizP[x][0]
palavras_totais_ham += self.matrizP[x][1]
self.matrizP_Relativa[x] = [self.matrizP[x][0], self.matrizP[x][1]]
for x in self.matrizP_Relativa:
self.matrizP_Relativa[x][0] = self.matrizP_Relativa[x][0] / palavras_totais_spam
self.matrizP_Relativa[x][1] = self.matrizP_Relativa[x][1] / palavras_totais_ham
self.palavras_totais_spam = palavras_totais_spam
self.palavras_totais_ham = palavras_totais_ham
def classificar(self, x, y): # Método para classificar o sms/email como spam ou ham e obter o valor das métricas
x_treino = np.array(x)
y_treino = np.array(y)
verdadeiros_Positivos = 0
falsos_Negativos = 0
verdadeiros_Negativos = 0
falsos_Positivos = 0
for i in range(len(x_treino)):
t = self.limite_T(x_treino[i])
if t > 0 and y_treino[i] == "spam":
verdadeiros_Positivos += 1
if t > 0 and y_treino[i] == "ham":
falsos_Negativos += 1
if t < 0 and y_treino[i] == "ham":
verdadeiros_Negativos += 1
if t < 0 and y_treino[i] == "spam":
falsos_Positivos += 1
t = self.limite_T(x_treino[i])
self.exatidao = ((verdadeiros_Positivos + verdadeiros_Negativos) /
(verdadeiros_Positivos + verdadeiros_Negativos +
falsos_Positivos + falsos_Negativos)) * 100
self.sensibilidade = (verdadeiros_Positivos / (verdadeiros_Positivos + falsos_Negativos)) * 100
self.precisao = (verdadeiros_Positivos / (verdadeiros_Positivos
+ falsos_Positivos)) * 100
self.fScore = 2 * (float(self.sensibilidade * self.precisao) /
float(self.sensibilidade + self.precisao))
self.confMatriz = np.array([[verdadeiros_Positivos, falsos_Negativos], [falsos_Positivos, verdadeiros_Negativos]])
self.confMatriz_Relativa = np.array([[round(verdadeiros_Positivos/(verdadeiros_Positivos + falsos_Positivos), 3),
round(falsos_Negativos/(verdadeiros_Negativos + falsos_Negativos), 3)],
[round(falsos_Positivos/(verdadeiros_Positivos + falsos_Positivos), 3),
round(verdadeiros_Negativos/(verdadeiros_Negativos + falsos_Negativos), 3)]])
print("\n------ Métricas de Classificação ------\n")
print("Exatidão {:0.3f}" .format(self.exatidao))
print("Sensibilidade {:0.3f}" .format(self.sensibilidade))
print("Precisão {:0.3f}" .format(self.precisao))
print("fScore {:0.3f}\n" .format(self.fScore))
print("---------- Matriz de Confusão ---------\n")
print(self.confMatriz)
print()
print(self.confMatriz_Relativa)
def limite_T(self, email): # Método para calcular t
t = self.b * (-1)
vetor = []
vetorAux = []
for word in email.split():
vetor.append(word)
for ele in vetor:
if ele in vetorAux:
pass
else:
if str(ele) in self.matrizP_Relativa:
t += vetor.count(ele) * (math.log(self.matrizP_Relativa[ele][0] - math.log(self.matrizP_Relativa[ele][1])))
vetorAux.append(ele)
else:
pass
return t
def obterLogC(self): # Método para calcular o valor de C
self.computacao_Ham_Spam(self.x_treino, self.y_treino)
self.lista_Palavras(self.x_treino)
self.inicializa_P()
self.contar_Palavras()
self.normalizar_Contagem()
previ = 90
i = 1
vetor = list(range(100000000, 1000000000, 1))
random.shuffle(vetor)
while self.precisao <= 90:
if previ < 90:
vetor.remove(vetor[0])
previ = self.precisao
print("Valor de C: ", self.c)
print("processo: ", i)
self.inicializa_B()
self.classificar(self.x_validacao, self.y_validacao)
print(previ)
i += 1
self.c = vetor[0]
def getX_treino(self): # Método para retornar o valor de x_treino
return self.x_treino
def getY_treino(self): # Método para retornar o valor de y_treino
return self.y_treino |
# -*- coding: utf-8 -*-
import scrapy
import re
import datetime
class FootballResultSpider(scrapy.Spider):
name = 'football_result'
allowed_domains = ['info.sporttery.cn']
edate = datetime.date.today()
sdate = edate - datetime.timedelta(days=7)
start_urls = [
'http://info.sporttery.cn/football/match_result.php?search_league=0&start_date={sdate}&end_date={edate}&dan='.format(
sdate=sdate, edate=edate)]
weekday_number = {
"周一": "1",
"周二": "2",
"周三": "3",
"周四": "4",
"周五": "5",
"周六": "6",
"周日": "7",
}
def parse(self, response):
# 获取分页
pages = response.xpath(
'//div[@class="match_list"]//table[@class="m-page"]//li[contains(@class,"u-pg")]/a/text()|//div[@class="match_list"]//table[@class="m-page"]//li[contains(@class,"u-pg")]/span/text()').extract()
page_list = []
for item in pages:
if item.isdigit():
page_list.append(int(item))
if page_list:
last_page = max(page_list)
for item in range(1, last_page + 1):
# 查询一周
url = "match_result.php?page={page}&search_league=0&start_date={sdate}&end_date={edate}&dan=".format(
sdate=self.sdate,
edate=self.edate, page=item)
yield scrapy.Request(url=response.urljoin(url),
callback=self.parse_item,
dont_filter=True)
def parse_item(self, response):
match_tables = response.xpath('//div[@class="match_list"]/table[@class="m-tab"]')
page_data = {'data':[]}
if len(match_tables) > 0:
match_table = match_tables[0]
trs = match_table.xpath('./tr')
for tr in trs:
tds = tr.xpath('./td')
if len(tds) <= 1: # 分页行跳过
continue
match_date = tds[0].xpath('./text()').extract_first()
pattern = re.compile(r'([\u4e00-\u9fff]+)(\d+)', re.I)
m = re.match(pattern, tds[1].xpath('./text()').extract_first())
match_weekday_str, match_index = m.groups()
match_weekday = self.weekday_number[match_weekday_str]
match_number = match_date.replace("-",
"").strip() + "*" + match_weekday + "*" + match_index.strip()
status = tds[9].xpath('./text()').extract_first() # 已完成 进行中
result_detail_url = tds[10].xpath('./a/@href').extract_first()
if "已完成" in status:
rqs = tds[3].xpath('.//span/text()').extract_first()
bcbf = tds[4].xpath('./span/text()').extract_first()
qcbf = tds[5].xpath('./span/text()').extract_first()
pattern = re.compile(r'()\((.*?)\)', re.I)
rqs_data = pattern.findall(rqs)
ft_let_point_multi = 0
if len(rqs_data) <= 0:
raise Exception("让球数 格式解析错误")
else:
ft_let_point_multi = int(rqs_data[0][1])
ft_half_home_point, ft_half_away_point = bcbf.split(":")
ft_home_point, ft_away_point = qcbf.split(":")
data = {
"ft_half_home_point": int(ft_half_home_point),
"ft_half_away_point": int(ft_half_away_point),
"ft_home_point": int(ft_home_point),
"ft_away_point": int(ft_away_point),
"match_status": 2, # 2-比赛结束(赛果已出)
"ft_let_point_multi": int(ft_let_point_multi),
"match_number": match_number
}
page_data['data'].append(data)
# yield scrapy.Request(url=response.urljoin(result_detail_url),
# callback=self.parse_award,
# dont_filter=True)
yield page_data
def parse_award(self, response):
print(response.body)
|
# Copyright (c) 2020-2022, Manfred Moitzi
# License: MIT License
import pathlib
import ezdxf
from ezdxf import zoom
CWD = pathlib.Path("~/Desktop/Outbox").expanduser()
if not CWD.exists():
CWD = pathlib.Path(".")
# ------------------------------------------------------------------------------
# This example shows how to convert an ARC into a SPLINE.
# ------------------------------------------------------------------------------
def main():
doc = ezdxf.new()
msp = doc.modelspace()
arc = msp.add_arc(
center=(0, 0),
radius=1.0,
start_angle=0,
end_angle=360,
dxfattribs={"layer": "arc"},
)
spline = arc.to_spline(replace=False)
spline.dxf.layer = "B-spline"
spline.dxf.color = 1
zoom.extents(msp)
doc.saveas(CWD / "spline_from_arc.dxf")
if __name__ == "__main__":
main()
|
from django.db import models
# Create your models here.
class Movie(models.Model) :
title = models.CharField('title', max_length=20)
genre = models.CharField('genre', max_length=10)
year = models.CharField('year', max_length=5)
date = models.CharField('date', max_length=10)
rating = models.CharField('rating', max_length=10)
vote_count = models.CharField('vote_count', max_length=10)
plot = models.TextField('plot')
main_act = models.CharField('main_act', max_length=20)
supp_act = models.CharField('supp_act', max_length=20)
page_url = models.URLField('page_url')
img_url = models.URLField('img_url')
class movieTitle(models.Model) :
title = models.CharField('title', max_length=20)
class movieInfo(models.Model) :
title = models.CharField('title', max_length=20)
titleNoSpace = models.CharField('titleNoSpace', max_length=20)
genre = models.CharField('genre', max_length=10)
year = models.CharField('year', max_length=5)
date = models.CharField('date', max_length=10)
rating = models.CharField('rating', max_length=10)
vote_count = models.CharField('vote_count', max_length=10)
plot = models.TextField('plot')
main_act = models.CharField('main_act', max_length=20)
supp_act = models.CharField('supp_act', max_length=20)
page_url = models.URLField('page_url')
img_url = models.URLField('img_url')
class sentiment(models.Model) :
rating = models.TextField('rating', max_length=2) |
from PylabUtils.misc.normCols import normCols
from PylabUtils.misc.normRows import normRows
from PylabUtils.misc.timing import Timer, tic, toc
from PylabUtils.misc.expecting import expecting
from PylabUtils.misc.find2 import find2
from PylabUtils.misc.minimizedAngle import minimizedAngle
from PylabUtils.misc.circularMean import circularMean
|
import csv
import math
import random
age_group = {}
jobs = {"admin.": 0,"blue-collar": 1,"entrepreneur": 2,"housemaid": 3,"management": 4,"retired": 5,"self-employed": 6,"services": 7,"student": 8,"technician": 9,"unemployed": 10}
marital = {"single": 0, "married": 1, "divorced": 2}
education = {"illiterate": 0, "basic.4y": 1, "basic.6y": 2, "basic.9y": 3, "high.school": 4, "university.degree": 5, "professional.course": 6}
contact = {"telephone": 0, "cellular": 1}
yes_no = {"no": 0, "yes": 1}
poutcome = {"nonexistent": 0, "failure": 1, "success": 2}
month = {"jan":0, "feb": 1, "mar": 2, "apr":3, "may": 4, "jun":5, "jul": 6, "aug": 7, "sep": 8, "oct": 9, "nov": 10, "dec": 11}
days_week = {"mon": 0, "tue":1, "wed": 2, "thu": 3, "fri": 4}
vector = [{},jobs, marital, education, yes_no, yes_no, contact, \
month, days_week, {}, {}, {}, {}, poutcome, \
{}, {}, {}, {},{}, yes_no]
vector_not_in = [0,9,10,11,12,14,15,16,17,18]
vector_empty = []
responses = {"yes":0, "no":0}
def getKey(item):
return item[19]
def main():
with open('bank_cleaned.csv') as csvfile:
row_count = 0
csv_reader = csv.reader(csvfile, delimiter=',')
for row in csv_reader:
if row_count == 0:
row_count += 1
continue
new_vector = []
column_count = 0
for column in row:
if column_count in vector_not_in:
new_vector.append(column)
else:
new_vector.append(vector[column_count][column])
if (column_count == 19):
responses[column] += 1
column_count += 1
vector_empty.append(new_vector)
row_count += 1
with open('bank_cleaned_preprocessed.csv', 'w') as csvfile2:
wr = csv.writer(csvfile2, delimiter=',', quotechar='"')
row_count = 0
for row in vector_empty:
wr.writerow(row)
'''
vector_empty_sorted = sorted(vector_empty,key=getKey)
vector_no = vector_empty_sorted[0:responses["no"]-1]
vector_yes = vector_empty_sorted[responses["no"]:-1]
vector_no = random.sample(vector_no, responses["yes"])
new_vector = random.sample(vector_no + vector_yes, (2*responses["yes"])-1)
with open('./result/bank_cleaned_preprocessed_balanced.csv', 'w') as csvfile2:
wr = csv.writer(csvfile2, delimiter=',', quotechar='"')
row_count = 0
for row in new_vector:
wr.writerow(row)
'''
main()
|
# --*-- coding : utf-8 --*--
# Project : Python_app
# Current file : app_01.py
# Author : 大壮
# Create time : 2019-12-21 11:14
# IDE : PyCharm
# TODO 成长很苦,进步很甜,加油!
from appium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from appium.webdriver.common.mobileby import MobileBy
desired_caps = {
'platformName': 'Android', # 操作系统
'platformVersion': '5.1', # 系统版本
'deviceName': 'huawei', # 设备名称
'noReset': True, # 应用不重置
# app:独一无二的包名
# apk包。
'appPackage': 'com.lemon.lemonban', # 包名
'appActivity': 'com.lemon.lemonban.activity.MainActivity'
}
# 链接 appium
driver = webdriver.Remote('http://127.0.0.1:4723/wd/hub', desired_caps)
wait = WebDriverWait(driver, 20)
# 我的柠檬 元素可见
loc = (MobileBy.ID, "com.lemon.lemonban:id/navigation_my")
wait.until(EC.visibility_of_element_located(loc))
# 点击我的柠檬
driver.find_element(*loc).click()
# classname
# driver.find_element_by_class_name("android.widget.FrameLayout")
# content -desc
driver.find_element_by_accessibility_id()
"""
1、python代码写好了
2、打开appium server ,与 appium 简历链接,发送你的命令
3、打开模拟器/真机,USB 调试,保证设置时可以被识别的
adb命令:检测已链接的设备-命令:adb devices
emulator-5554
1、usb连接了一个设备(android5.1)到电脑端,开启了USB调试模式
2、appium server --(android/IOS)
3、python代码
任务:通过写一段python代码,在android设备上,打开 柠檬班app.
1、你告诉appium server,你要在XX设备上,打开XXapp
2、appium收到你的命令之后,检测一下是否有XX设备,检测一下设备上是否有XXapp
3、2)确认成功,就执行命令。
获取应用包名和入口activity:aapt命令
aapt目录:
安卓sdk的build-tools目录下
示例:adt-bundle-windows-x86_64-20140702\sdk\build-tools\android-4.4W
命令语法:
aapt dump badging apk应用名
示例:aapt dump badging D:\BaiduNetdiskDownload\Future-release-2018.apk
"""
|
from flask_wtf import FlaskForm
from flask_babel import lazy_gettext as _l
from flask_pagedown.fields import PageDownField
from wtforms.fields import StringField,TextAreaField,SubmitField,PasswordField,BooleanField
from wtforms.ext.sqlalchemy.fields import QuerySelectField
from wtforms.validators import DataRequired,Length,EqualTo,Email
from app.mysql import articleType_db,articleSource_db
class PageDownForm(FlaskForm):
title = StringField(_l('Title'), validators=[DataRequired(),Length(1, 64)])
source = QuerySelectField(_l('Source'),query_factory=articleSource_db.getAll,get_label="name")
type = QuerySelectField(_l('Type'),query_factory=articleType_db.getAll,get_label="name")
summary = TextAreaField(_l('Summary'), validators=[DataRequired()])
tags = StringField(_l('Tags'))
content = PageDownField(_l("Enter MarkDown Text"),validators=[DataRequired()])
no_clean = None
submit = SubmitField(_l('Submit'))
class AdvancedPageDownForm(PageDownForm):
no_clean = BooleanField(_l("Using Advanced Mode"))
class DeleteForm(FlaskForm):
submit = SubmitField(_l("Confirm"))
class ProfileEditForm(FlaskForm):
username = StringField(_l('Username'), validators=[DataRequired(), Length(4, 16)])
email = StringField(_l('Email'), validators=[DataRequired(), Email(),Length(7, 31)])
submit = SubmitField(_l('Update Info'))
class ChangePasswordForm(FlaskForm):
password = PasswordField(_l('Password'), validators=[DataRequired(), Length(6, 16),EqualTo("confirm")])
confirm= PasswordField(_l('ConfirmedPassword'), validators=[DataRequired(), Length(6, 16)])
submit = SubmitField(u'Submit')
|
import os
import requests
import slackPostman as sp
from bs4 import BeautifulSoup
line_career_url = 'https://recruit.linepluscorp.com/lineplus/career/list?classId=148'
base_url = 'https://recruit.linepluscorp.com/'
def get_stored_seqs():
seqs = []
if os.path.isfile('line.txt'):
file = open('line.txt', 'r', -1, 'utf-8')
lines = file.readlines()
for line in lines:
tokens = line.split()
seq = tokens[0]
seqs.append(seq)
return seqs
def save_new_recruits(new_recruits):
file = open('line.txt', 'a', -1, 'utf-8')
for recruit in new_recruits:
file.write(recruit + '\n')
def send_updates(new_recruits):
if len(new_recruits) > 0:
message = '새 공고가 올라왔습니다.'
for recruit in new_recruits:
message = message + '\n' + recruit
sp.send(message)
def recruit_tostring(seq, link, title, level, due):
return seq+' '+level+' '+title+' '+due+' '+base_url+link
line_html = requests.get(line_career_url).text
soup = BeautifulSoup(line_html, 'html.parser')
table = soup.select('body > div.container > div.jobs_wrap > table > tbody > tr')
print(get_stored_seqs())
new_recruits = []
for tr in table:
td = tr.find_all('td')
seq = td[0].text
link = td[1].find('a').get('href')
title = td[1].find('a').text
level = td[3].text
due = td[4].text
if seq not in get_stored_seqs():
new_recruits.append(recruit_tostring(seq,link,title,level,due))
save_new_recruits(new_recruits)
send_updates(new_recruits) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.