index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
987,700 | cde3014c218bbe4793fdaea163eaae55fc61bf63 | # -------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# -------------------------------------------------------------
# Autogenerated By : src/main/python/generator/generator.py
# Autogenerated From : scripts/builtin/shortestPath.dml
from typing import Dict, Iterable
from systemds.operator import OperationNode, Matrix, Frame, List, MultiReturn, Scalar
from systemds.script_building.dag import OutputType
from systemds.utils.consts import VALID_INPUT_TYPES
def shortestPath(G: Matrix,
sourceNode: int,
**kwargs: Dict[str, VALID_INPUT_TYPES]):
"""
Computes the minimum distances (shortest-path) between a single source vertex and every other vertex in the graph.
Grzegorz Malewicz, Matthew H. Austern, Aart J. C. Bilk,
James C. Dehnert, Ikkan Horn, Naty Leiser and Grzegorz Czajkowski:
Pregel: A System for Large-Scale Graph Processing, SIGMOD 2010
:param G: adjacency matrix of the labeled graph: Such graph can be directed
(G is symmetric) or undirected (G is not symmetric).
The values of G can be 0/1 (just specifying whether the nodes
are connected or not) or integer values (representing the weight
of the edges or the distances between nodes, 0 if not connected).
:param maxi: Integer max number of iterations accepted (0 for FALSE, i.e.
max number of iterations not defined)
:param sourceNode: node index to calculate the shortest paths to all other nodes.
:param verbose: flag for verbose debug output
:return: Output matrix (double) of minimum distances (shortest-path) between
vertices: The value of the ith row and the jth column of the output
matrix is the minimum distance shortest-path from vertex i to vertex j.
When the value of the minimum distance is infinity, the two nodes are
not connected.
"""
params_dict = {'G': G, 'sourceNode': sourceNode}
params_dict.update(kwargs)
return Matrix(G.sds_context,
'shortestPath',
named_input_nodes=params_dict)
|
987,701 | 3283f68099fef800f3d625bf014d52e23eb742b9 | import os
from django.shortcuts import render, redirect
from django.views.generic import ListView, DetailView, View
from django.contrib.auth.models import User, auth
from .models import UserProfile, UnusedPins, UsedPins, BulkStudent, AttendClass
from django.contrib.auth.decorators import login_required
import random
import string
import xlwt
import requests
from django.http import HttpResponse
import base64
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
from cryptography.fernet import Fernet
import uuid
from SchoolManagement.settings import salt
import qrcode
from PIL import Image
from pyzbar.pyzbar import decode
def generate_fernet_key(master_key, salt):
kdf = PBKDF2HMAC(
algorithm=hashes.SHA512(),
length=32,
salt=salt.encode(),
iterations=100000,
backend=default_backend()
)
key = base64.urlsafe_b64encode(kdf.derive(master_key.encode()))
return key.decode("utf-8")
def encrypt_text(text, key):
encryptor = Fernet(key)
hash = encryptor.encrypt(text.encode())
return hash.decode()
key="adebowaleadeolu"
key= generate_fernet_key(key,salt)
@login_required
def index(request):
items = ["stats"]
stats = {
"stats": {"students": UserProfile.objects.all().filter(user_type="Student").count(), "pins": UnusedPins.objects.all().count(), "teachers": UserProfile.objects.all().filter(user_type="Teacher").count(), "parents": UserProfile.objects.all().filter(user_type="Parent").count(), "staffs": UserProfile.objects.all().filter(user_type="Students").count()}
}
params = {"items": []}
for item in items:
new_params = stats[item]
params["items"].append(new_params)
global user_type
user = request.user
account_type = user.profile.user_type
if (account_type == "Student"):
return redirect("auth-lock-screen.html", params)
elif(account_type == "Parent"):
return render(request, "index.html", params)
elif(account_type == "Teacher"):
return render(request, "index.html", params)
elif(account_type == "Admin"):
if user.profile.session == "":
user.profile.session = request.POST.get('session')
user.profile.save()
return render(request, "index1.html", params)
elif(account_type == "Liberian"):
return render(request, "index.html")
elif(account_type == "Accountant"):
return render(request, "index.html")
else:
return render(request, 'auth-login.html')
def login(request):
if (request.method == 'POST'):
username = request.POST['username']
password = request.POST['userpassword']
user = auth.authenticate(username=username, password=password)
if (user is not None):
auth.login(request, user)
return redirect("index1.html")
else:
return render(request, 'auth-login.html', {"message": "The user does not exist"})
else:
return render(request, 'auth-login.html')
def recover(request):
return render(request, "auth-recoverpw.html")
def verify(request):
if request.method == 'POST':
secret_key = (request.POST['secret_pin'])
user = request.user
if UnusedPins.objects.filter(pin=secret_key):
if not UsedPins.objects.filter(pin=secret_key):
profile = user.profile
profile.secret_pin = secret_key
profile.save()
special = UsedPins.objects.create(pin=secret_key)
special.save()
key = UnusedPins.objects.filter(pin=secret_key)
key.delete()
return render(request, "index.html")
else:
if UsedPins.objects.filter(pin=secret_key):
# User has used the key before
return render(request, "index.html")
else:
# user doesnt have a key, hes probably forging or putting a pin incorrectly
return "Please input a valid pin"
else:
return render(request, 'auth-lock-screen.html')
@login_required
def logout(request):
auth.logout(request)
return redirect("auth-login.html")
def randomStringDigits(stringLength=6):
"""Generate a random string of letters and digits """
lettersAndDigits = string.ascii_letters + string.digits
return ''.join(random.choice(lettersAndDigits) for i in range(stringLength))
def generate(request):
params = {"pins": UnusedPins.objects.all()}
if request.method == 'POST':
number = request.POST['number']
x = int(number)
for i in range(x):
code = UnusedPins.objects.create(pin=randomStringDigits(12))
code.save()
print(randomStringDigits(12))
return render(request, "gen.html", params)
else:
return render(request, "gen.html")
# so create a database for used and unused and delete used ones from unused and add to used and assign the current used to user
def addStudent(request):
account_type = UserProfile.objects.all().filter(user_type="Admin")
account = account_type.values('session')
context = {"parents": UserProfile.objects.all().filter(user_type="Parent")}
if request.method == 'POST':
if request.POST.get('form_type') == "addstudent":
name = request.POST['name']
username = request.POST['username']
parent = request.POST['parent']
class_room = request.POST['class_room']
section = request.POST['section']
gender = request.POST['gender']
school_type = request.POST['school_type']
birthday = request.POST['birthday']
phone_number = request.POST['phone_number']
address = request.POST['address']
image = request.FILES['image']
email = request.POST['email']
password1 = request.POST['password1']
password2 = request.POST['password2']
text= name
detail=encrypt_text(text, key)
UserProfile_id = username
if password1 == password2:
if User.objects.filter(email=email).exists():
return render(request, 'add-student.html', {"message": "The user is already registered"}, context)
else:
user = User.objects.create(
username=username, password=password1, email=email)
user.set_password(user.password)
user.save()
m =qrcode.make(detail)
qrfilename="media\\" +name + "_qr.jpg"
m.save(qrfilename)
good='C:\\Users\\USER\\Desktop\\SchoolManagement\\' + qrfilename
profile = UserProfile.objects.create(user=user, name=name, user_type='Student', parent=parent, class_room=class_room, section=section,
gender=gender, school_type=school_type, birthday=birthday, phone_number=phone_number, address=address, image=image, qr_image=good)
profile.save()
new_image='http://advancescholar.com/media/' + str(image)
print(new_image)
r = requests.get("http://ec2-3-21-174-239.us-east-2.compute.amazonaws.com/save_user", params={
"name": name,
"image": new_image
}).json()
if r["success"]:
print("Saved Picture")
return redirect('add-student.html', {"message": "Student Added"}, context)
else:
return render(request, 'add-student.html', {"message": "The passwords don't match"}, context)
elif request.POST.get('form_type') == "bulkstudent":
name = request.POST.get('name', False)
parent = request.POST.get('parent', False)
gender = request.POST.get('gender', False)
email = request.POST.get('email', False)
password1 = request.POST.get('password1', False)
password2 = request.POST.get('password2', False)
BulkStudent_id = name
if password1 == password2:
if User.objects.filter(email=email).exists():
return render(request, "add-student.html", {"message": "The user is already registered"})
else:
profile = BulkStudent.objects.create(
name=name, parent=parent, gender=gender, email=email, password=password1)
profile.save()
return redirect("add-student.html", {"message": "Student Added"}, context)
else:
return render(request, "add-student.html", {"message": "The passwords don't match"}, context)
elif request.POST.get("form_type") == "csv":
response = HttpResponse(content_type='application/ms-excel')
response['Content-Disposition'] = 'attachment; filename="students.xls"'
wb = xlwt.Workbook(encoding='utf-8')
ws = wb.add_sheet('Students Data')
row_num = 0
font_style = xlwt.XFStyle()
font_style.font.bold = True
columns = ['Name', 'Username', 'Parent', 'Class', 'Section',
'Gender', 'School Type', 'Birthday', 'Phone Number', 'Address']
for col_num in range(len(columns)):
ws.write(row_num, col_num, columns[col_num], font_style)
rows = UserProfile.objects.filter(user_type="Student").values_list(
'user', 'name', 'parent', 'class_room', 'section', 'gender', 'school_type', 'birthday', 'phone_number', 'address')
for row in rows:
row_num += 1
for col_num in range(len(row)):
ws.write(row_num, col_num, row[col_num], font_style)
wb.save(response)
return response
return redirect("add-student.html", context)
else:
return render(request, "add-student.html", context)
else:
return render(request, "add-student.html", context)
def Student(request):
context = {"students": UserProfile.objects.all().filter(
user_type="Student")}
if request.method == 'POST':
previous_name = request.POST['previous_name']
data = UserProfile.objects.get(name=previous_name)
data.name = request.POST['name']
data.username = request.POST['username']
data.gender = request.POST['gender']
data.address = request.POST['address']
data.phone_number = request.POST['phone_number']
data.email = request.POST['email']
email = data.email
if User.objects.filter(email=email).exists():
return render(request, "student.html", {"message": "The user is already registered"}, context)
else:
data.save()
return redirect("student.html", {"message": "Edited"}, context)
return render(request, "student.html", context)
def timeTable(request):
return render(request, "time-table.html")
def syllabus(request):
return render(request, "syllabus.html")
def subject(request):
return render(request, "subject.html")
def sms(request):
return render(request, "sms.html")
def profile(request):
return render(request, "profile.html")
def manageTeacher(request):
account_type = UserProfile.objects.all().filter(user_type="Admin")
account = account_type.values('session')
context = {"teachers": UserProfile.objects.all().filter(
user_type="Teacher")}
if request.method == 'POST':
if request.POST.get('form_type') == "create":
name = request.POST['name']
username = request.POST['username']
gender = request.POST['gender']
phone_number = request.POST['phone_number']
address = request.POST['address']
image = request.POST['image']
email = request.POST['email']
password1 = request.POST['password1']
password2 = request.POST['password2']
UserProfile_id = username
if password1 == password2:
if User.objects.filter(email=email).exists():
return render(request, 'manage-teacher.html', {"message": "The user is already registered"})
else:
user = User.objects.create(
username=username, password=password1, email=email)
user.set_password(user.password)
user.save()
profile = UserProfile.objects.create(
user=user, name=name, user_type='Teacher', gender=gender, phone_number=phone_number, address=address, image=image)
profile.save()
return redirect('manage-teacher.html', {"message": "Student Added"}, context)
else:
return render(request, 'manage-teacher.html', {"message": "The passwords don't match"}, context)
elif request.POST.get('form_type') == "edit":
previous_name = request.POST['previous_name']
data = UserProfile.objects.get(name=previous_name)
data.name = request.POST['name']
data.username = request.POST['username']
data.gender = request.POST['gender']
data.address = request.POST['address']
data.phone_number = request.POST['phone_number']
data.email = request.POST['email']
email = data.email
if User.objects.filter(email=email).exists():
return render(request, "manage-teacher.html", {"message": "The user is already registered"}, context)
else:
data.save()
return redirect("manage-teacher.html", {"message": "Edited"}, context)
else:
return redirect("manage-teacher.html", context)
else:
return render(request, "manage-teacher.html", context)
def manageStudent(request):
return render(request, "manage-student.html")
def manageParent(request):
account_type = UserProfile.objects.all().filter(user_type="Admin")
account = account_type.values('session')
context = {"parents": UserProfile.objects.all().filter(user_type="Parent")}
if request.method == 'POST':
if request.POST.get('form_type') == "create":
name = request.POST['name']
username = request.POST['username']
gender = request.POST['gender']
phone_number = request.POST['phone_number']
address = request.POST['address']
image = request.POST['image']
email = request.POST['email']
password1 = request.POST['password1']
password2 = request.POST['password2']
UserProfile_id = username
if password1 == password2:
if User.objects.filter(email=email).exists():
return render(request, 'manage-parent.html', {"message": "The user is already registered"})
else:
user = User.objects.create(
username=username, password=password1, email=email)
user.set_password(user.password)
user.save()
profile = UserProfile.objects.create(
user=user, name=name, user_type='Parent', gender=gender, phone_number=phone_number, address=address, image=image)
profile.save()
return redirect('manage-parent.html', {"message": "Student Added"}, context)
else:
return render(request, 'manage-parent.html', {"message": "The passwords don't match"}, context)
elif request.POST.get('form_type') == "edit":
previous_name = request.POST['previous_name']
data = UserProfile.objects.get(name=previous_name)
data.name = request.POST['name']
data.username = request.POST['username']
data.gender = request.POST['gender']
data.address = request.POST['address']
data.phone_number = request.POST['phone_number']
data.email = request.POST['email']
email = data.email
if User.objects.filter(email=email).exists():
return render(request, "manage-parent.html", {"message": "The user is already registered"}, context)
else:
data.save()
return redirect("manage-parent.html", {"message": "Edited"}, context)
else:
return redirect("manage-parent.html", context)
else:
return render(request, "manage-parent.html", context)
def manageLibarian(request):
account_type = UserProfile.objects.all().filter(user_type="Admin")
account = account_type.values('session')
context = {"libarians": UserProfile.objects.all().filter(
user_type="Liberian")}
if request.method == 'POST':
if request.POST.get('form_type') == "create":
name = request.POST['name']
username = request.POST['username']
gender = request.POST['gender']
phone_number = request.POST['phone_number']
address = request.POST['address']
image = request.POST['image']
email = request.POST['email']
password1 = request.POST['password1']
password2 = request.POST['password2']
UserProfile_id = username
if password1 == password2:
if User.objects.filter(email=email).exists():
return render(request, 'manage-libarian.html', {"message": "The user is already registered"})
else:
user = User.objects.create(
username=username, password=password1, email=email)
user.set_password(user.password)
user.save()
profile = UserProfile.objects.create(
user=user, name=name, user_type='Liberian', gender=gender, phone_number=phone_number, address=address, image=image)
profile.save()
return redirect('manage-libarian.html', {"message": "Student Added"}, context)
else:
return render(request, 'manage-libarian.html', {"message": "The passwords don't match"}, context)
elif request.POST.get('form_type') == "edit":
previous_name = request.POST['previous_name']
data = UserProfile.objects.get(name=previous_name)
data.name = request.POST['name']
data.username = request.POST['username']
data.gender = request.POST['gender']
data.address = request.POST['address']
data.phone_number = request.POST['phone_number']
data.email = request.POST['email']
email = data.email
if User.objects.filter(email=email).exists():
return redirect("manage-libarian.html", {"message": "The user is already registered"}, context)
else:
data.save()
return redirect("manage-libarian.html", {"message": "Edited"}, context)
else:
return render(request, "manage-libarian.html", context)
else:
return render(request, "manage-libarian.html", context)
def manageAdmin(request):
context = {"admins": UserProfile.objects.all().filter(user_type="Admin")}
if request.method == 'POST':
if request.POST.get('form_type') == "create":
name = request.POST['name']
username = request.POST['username']
gender = request.POST['gender']
phone_number = request.POST['phone_number']
address = request.POST['address']
image = request.POST['image']
email = request.POST['email']
password1 = request.POST['password1']
password2 = request.POST['password2']
UserProfile_id = username
if password1 == password2:
if User.objects.filter(email=email).exists():
return render(request, 'manage-admin.html', {"message": "The user is already registered"})
else:
user = User.objects.create(
username=username, password=password1, email=email)
user.set_password(user.password)
user.save()
profile = UserProfile.objects.create(
user=user, name=name, user_type='Admin', gender=gender, phone_number=phone_number, address=address, image=image)
profile.save()
return redirect('manage-admin.html', {"message": "Admin Added"}, context)
else:
return render(request, 'manage-admin.html', {"message": "The passwords don't match"}, context)
elif request.POST.get('form_type') == "edit":
user.profile.name = request.POST['name']
user.profile.username = request.POST['username']
user.profile.gender = request.POST['gender']
user.profile.address = request.POST['address']
user.profile.phone_number = request.POST['phone_number']
user.profile.email = request.POST['email']
email = user.profile.email
if User.objects.filter(email=email).exists():
return render(request, "manage-admin.html", {"message": "The user is already registered"}, context)
else:
user.profile.save()
return redirect("manage-admin.html", {"message": "Edited"}, context)
else:
return redirect("manage-admin.html", context)
else:
return render(request, "manage-admin.html", context)
def manageAccountant(request):
account_type = UserProfile.objects.all().filter(user_type="Admin")
account = account_type.values('session')
context = {"accountants": UserProfile.objects.all().filter(
user_type="Accountant")}
if request.method == 'POST':
if request.POST.get('form_type') == "create":
name = request.POST['name']
username = request.POST['username']
gender = request.POST['gender']
phone_number = request.POST['phone_number']
address = request.POST['address']
image = request.POST['image']
email = request.POST['email']
password1 = request.POST['password1']
password2 = request.POST['password2']
UserProfile_id = username
if password1 == password2:
if User.objects.filter(email=email).exists():
return render(request, 'manage-accountant.html', {"message": "The user is already registered"})
else:
user = User.objects.create(
username=username, password=password1, email=email)
user.set_password(user.password)
user.save()
profile = UserProfile.objects.create(
user=user, name=name, user_type='Accountant', gender=gender, phone_number=phone_number, address=address, image=image)
profile.save()
return render(request, 'manage-accountant.html', context)
else:
return render(request, 'manage-accountant.html', context)
elif request.POST.get('form_type') == "edit":
previous_name = request.POST['previous_name']
data = UserProfile.objects.get(name=previous_name)
data.name = request.POST['name']
data.username = request.POST['username']
data.gender = request.POST['gender']
data.address = request.POST['address']
data.phone_number = request.POST['phone_number']
data.email = request.POST['email']
email = data.email
if User.objects.filter(email=email).exists():
return render(request, "manage-accountant.html", {"message": "The user is already registered"}, context)
else:
data.save()
return redirect("manage-accountant.html", {"message": "Edited"}, context)
else:
return redirect("manage-accountant.html", context)
else:
return render(request, "manage-accountant.html", context)
def Dept(request):
return render(request, "department.html")
def Attend(request):
return render(request, "daily-attendance.html")
def Class(request):
return render(request, "class.html")
def Chat(request):
return render(request, "chat.html")
def Calendar(request):
return render(request, "calendar.html")
def Admission(request):
return render(request, "admission.html")
def test_attend(request):
context = {"pics": AttendClass.objects.all()}
if request.method == "POST":
# This is your base64 string image
image = request.FILES.get('snapshot')
attend = AttendClass.objects.create(image=image)
attend.save()
r = requests.get("http://ec2-3-21-174-239.us-east-2.compute.amazonaws.com/match_user", params={
"image": image
}).json()
if r["success"]:
name = r["name"]
return redirect("test_attend.html",{"message": "The user is already registered"+ name})
return render(request, "test_attend.html", context)
def decrypt_text(hash, key):
decryptor = Fernet(key)
text = decryptor.decrypt(hash.encode())
return text.decode("utf-8")
def decode_qr(request):
if request.method=="POST":
image=request.FILES['snapshot']
data = decode(Image.open(image))
first_result = data[0].data.decode("utf-8")
second_result=decrypt_text(first_result,key)
valid = UserProfile.objects.all().filter(name=second_result, user_type="Student").values_list('name')[0][0]
if second_result==valid:
return render(request,"qr_code.html", {"message": second_result + "is a registered student"})
else:
return redirect("qr_code.html", {"message": "Not a registered student"})
return render(request, "qr_code.html")
|
987,702 | 52af129d7375a9717d88e2253515dc1dbefe498d | import json
class Pago():
def __init__(self):
self.nombre=None
self.apellido=None
self.monto=None
self.descripcion=None
self.metodo=None
def set_value(self, info):
if info == "stop":
return False
elif not self.nombre:
self.nombre = info
elif not self.apellido:
self.apellido = info
elif not self.monto:
self.monto = info
elif not self.descripcion:
self.descripcion = info
elif not self.metodo:
self.metodo = info
return True
def write(self):
with open("venta", 'a') as v:
v.write("nombre: " + self.nombre + ". apellido: " + self.apellido + ". monto: " + self.monto +
". descripcion: " + self.descripcion + ". metodo: " +self.metodo + "\n")
def convertir_a_json():
data = {}
for y, line in enumerate(open("venta")):
d_obj = {}
L = line.split()
for x in range(5):
d_obj[inp(x)[:-2]] = L[x*2+1]
data[y] = d_obj
with open("venta.json", "w") as v:
json.dump(data, v, indent=4)
def inp(idx):
if idx == 0:
return "nombre: "
elif idx == 1:
return "apellido: "
elif idx == 2:
return "monto: "
elif idx == 3:
return "descripción: "
elif idx == 4:
return "metodo de pago: "
if __name__ == "__main__":
print("stop para terminar")
stop = True
while stop:
pago = Pago()
count = 0
while count < 5:
stop = pago.set_value(input(inp(count)))
if stop:
count += 1
else:
break
if count == 5:
pago.write()
for line in open("venta"):
print(line.rstrip())
convertir_a_json()
|
987,703 | 03ff2855d4740c76f07ce2c99a1cc46253ca776e | def merge(A,m, B, n):
last=m+n-1
i=m-1
j=n-1
while i>=0 and j>=0:
if A[i]>B[j]:
A[last]=A[i]
last=last-1
i=i-1
else:
A[last]=B[j]
last=last-1
j-=1
while j>=0:
A[last]=B[j]
last=last-1
j=j-1
|
987,704 | afd066c93c5c756f9214d6aebe6b71665f2a73ed | from django.shortcuts import render
from rest_framework.views import APIView
from django.http import HttpResponseRedirect
from django.http import JsonResponse
from collections import defaultdict
from backend.notification import *
class Serializers(object):
@staticmethod
def notifications_aggregator(notifications):
groups = defaultdict(list)
for obj in notifications:
if obj.action_object == None:
obj.delete()
else:
if obj.verb == 'new series video':
groups[(obj.verb, obj.action_object.series.id)].append(obj)
if obj.verb == 'new question':
groups[(obj.verb, obj.action_object.video.id)].append(obj)
if obj.verb == 'new question response':
groups[(obj.verb, obj.action_object.is_instructor, obj.action_object.question.id)].append(obj)
aggregated_list = groups.values()
return aggregated_list
@staticmethod
def notification_serializer(notifications):
if len(notifications) == 0:
return []
sorted(notifications, key = lambda x : x.timestamp, reverse = True)
first = notifications[0]
verb = first.verb
countint = len(notifications)
count = str(countint)
data = {}
data["ids"] = map(lambda x : x.id, notifications)
data["timestamp"] = first.timestamp
if verb == 'new series video':
username = first.actor.username
seriesname = first.action_object.series.name
seriesid = first.action_object.series.uuid
videoid = first.action_object.video.uuid
if countint > 1:
data["description"] = username + ' added ' + count + ' videos to the series ' + seriesname
data["link"] = '/s/' + seriesid
return data
else:
data["description"] = username + ' added a video to the series ' + seriesname
data["link"] = '/s/' + seriesid + '/watch#' + videoid
return data
if verb == 'new question':
videoname = first.action_object.video.name
seriesid = first.action_object.video.series_video.series.uuid
videoid = first.action_object.video.uuid
if countint > 1:
data["description"] = count + ' new questions in the video ' + videoname
data["link"] = '/s/' + seriesid + '/watch#' + videoid
return data
else:
data["description"] = 'Someone asked a question in the video ' + videoname
data["link"] = '/s/' + seriesid + '/watch#' + videoid
return data
if verb == 'new question response':
videoname = first.action_object.question.video.name
seriesid = first.action_object.question.video.series_video.series.uuid
videoid = first.action_object.question.video.uuid
if first.action_object.is_instructor:
data["description"] = 'An instructor responded to your question in the video ' + videoname
data["link"] = '/s/' + seriesid + '/watch#' + videoid
return data
if countint > 1:
data["description"] = count + ' new responses to your question in the video ' + videoname
data["link"] = '/s/' + seriesid + '/watch#' + videoid
return data
else:
data["description"] = 'Someone responded to your question in the video ' + videoname
data["link"] = '/s/' + seriesid + '/watch#' + videoid
return data
'''
@staticmethod
def notification_serializer(notification):
data = {}
data["timestamp"] = str(notification.timestamp)
if notification.verb == 'new series video':
data["description"] = notification.actor.username + ' added a series video'
data["link"] = '/s/' + notification.action_object.series.uuid + '/watch#' + notification.action_object.video.uuid
if notification.verb == 'new question':
data["description"] = notification.actor.username + ' asked a question in ' + notification.action_object.video.name
data["link"] = ""
if notification.verb == 'new question response':
data["description"] = notification.actor.username + ' answered your question ' + notification.action_object.question.title
data["link"] = ""
return data
'''
class GetNotifications(APIView):
def get(self, request):
if request.user.is_anonymous():
return JsonResponse({'notifications': [], 'num': "0"})
else:
unread = request.user.notifications.unread().all()
if len(unread) == 0:
return JsonResponse({'notifications': [{"description": "No new notifications at this time", "timestamp": "", "link": ""}], 'num': "0"})
aggregated_unread = map(Serializers.notification_serializer, Serializers.notifications_aggregator(unread))
num = len(aggregated_unread)
return JsonResponse({'notifications': sorted(aggregated_unread, key = lambda x : x["timestamp"], reverse = True),
'num': num})
class MarkAsRead(APIView):
def post(self, request):
if not request.user.is_anonymous():
ids = request.POST.getlist('ids[]')
for n_id in ids:
notification = Notification.objects.get(id=n_id)
if notification is not None:
if notification.recipient.id == request.user.id:
notification.mark_as_read()
return JsonResponse({}) |
987,705 | 77bef87cd8f738bcdd7f2eccd3c7df1fe72b6ff5 | # coding=utf-8
#
#
# !/data1/Python2.7/bin/python27
#
# 全局变量在多进程里面是不能够共享的,进程是拥有自己的数据,代码段的
# ;
#
import os
from multiprocessing import Process
num = 100
def run():
print 'slave: {} start'.format(os.getpid())
# 在子进程中修改全局变量,对于父进程中的全局变量是没有影响的;
# 在子进程中使用全局变量时,子进程使用了完全不一样的另一份拷贝,父进程和子进程是完全不同的两个变量;
global num
num += 1
print 'run num: {}'.format(num)
print 'slave: {} end'.format(os.getpid())
def main():
print "master: {} start".format(os.getpid())
multiRun = Process(target=run)
multiRun.start()
multiRun.join(timeout=10)
print "num:{}".format(num)
print "master: {} end".format(os.getpid())
if __name__ == '__main__':
main()
|
987,706 | 14f7a908e38a36a7504fa5fe7f274323921ae1a4 | #Creaate a program that generates a password of 6 random alphanumeric characters in the range abcdefghijklmnopqrstuvwxyz01234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ!@#$%^&*()?
import random
password_range = ['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z','0','1','2','3','4','5','6','7','8','9','0','A','B','C','D','E','F','G','H','I','J','K','L','M','N','O','P','Q','R','S','T','U','V','W','X','Y','Z','!','@','#','$','%','^','&','*','(',')','?']
password = ""
for i in range (0,6):
password+=random.choice(password_range)
print(password)
# characters = "abcdefghijklmnopqrstuvwxyz01234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ!@#$%^&*()?"
# chosen = random.sample(characters, 6)
# password = "".join(chosen)
# print(password)
|
987,707 | eb16436d9f9079eab2bea77e8c7502ebdeb8e7c8 | from math import ceil
from collections import defaultdict
d1 = defaultdict(int)
d2 = defaultdict(int)
n, m = map(int, input().split())
s = input()
t = input()
for i in range(n):
d1[ord(s[i]) - 65] += 1
for i in range(m):
d2[ord(t[i]) - 65] += 1
ans = 0
for i in range(26):
if d1[i] == 0:
continue
if d1[i] > 0 and d2[i] == 0:
print(-1)
exit()
if d1[i] > 0:
ans = max(ceil(d1[i] / d2[i]), ans)
print(ans)
|
987,708 | 3b401d15487a4ad1b150e0aeab3aa0d13b8c62c2 | # -*- coding: UTF-8 -*-
from __future__ import division
import sys
import xlsxwriter
import re
tashkeel_patt = ur"[\u0617-\u061A\u064B-\u0652]+"
workbook = xlsxwriter.Workbook((sys.argv[4])+'_shell_output/'+(sys.argv[4])+'avgWordL.xlsx')
worksheet = workbook.add_worksheet()
workbook2 = xlsxwriter.Workbook((sys.argv[4])+'_shell_output/'+(sys.argv[4])+'avgWordL_2.xlsx')
worksheet2 = workbook2.add_worksheet()
format = workbook.add_format()
format.set_bold()
format.set_font_color('white')
format.set_bg_color('green')
format.set_font_size(16)
row = 3
row2 = 3
col = 1
col2=1
size=int(sys.argv[3])+1
def wordAvgLength( str ):
#
# sentence=""
words = sentence.split()
average=0
p=0
pp=0
for word in words:
if len(word) > 1:
average = average + len(word)
else:
p+=1
print p
print len(words)
print sum(len(i) > 1 for i in words)
if sum(len(i) > 1 for i in words) >0:
average = average / sum(len(i) > 1 for i in words)
print average
return average
sentence=""
avg=0
avgAvg=0
o = open((sys.argv[4])+'_shell_output/'+str(sys.argv[4])+'_averageWordLength.txt',"w")
for i in range(1, size):
print i
f = open(sys.argv[1]+"/"+sys.argv[2]+str(i)+".txt", "r")
sentence = f.read()
sentence = unicode(sentence, "utf-8")
sentence = re.sub(tashkeel_patt,u"",sentence)
avg=wordAvgLength(sentence)
o.write(str(avg))
o.write("\n")
if i < 16000:
worksheet.write(row, col, str(i) , format)
worksheet.write(row + 1, col, avg, format)
else:
worksheet2.write(row, col2, str(i) )
worksheet2.write(row + 1, col2, avg)
col2+=1
avgAvg+=avg
avg=0
col += 1
row = 3
print "avg"
row = 5
size=size-1
for k in range(1,101):
worksheet.write(row, k, avgAvg/size,format)
row = 5
for k in range(1,101):
worksheet2.write(row, k, avgAvg/size)
print avg
o.close()
workbook.close()
workbook2.close() |
987,709 | 1a23b9efe3d65338d26295467cd957782fc461ba | #!/usr/bin/env python
# coding: utf-8
"""
Este programa conta as ocorrências das letras de A a Z em um arquivo
texto em formato UTF-8, e exibe uma lista com as letras em ordem
decrescente de quantidade.
As letras acentuadas e letras minúsculas são convertidas para seus
equivalentes maiúsculos e sem acentos. O cedilha é contado como C.
Para usar, passe o nome do arquivo texto como argumento na linha de
comando.
"""
import sys
from unicodedata import decomposition
from string import ascii_uppercase
ocorrencias = {}
for linha in file(sys.argv[1]):
for car_uni in linha.decode('utf-8'): # converter linha para unicode
if not car_uni.strip():
continue # ignorar brancos
try: # primeiro tentamos converter para ASCII
car = car_uni.encode('ascii')
except UnicodeEncodeError: # se não dá certo, apelamos
partes = decomposition(car_uni)
if partes: # se o caractere pode ser decomposto...
ascii = partes.split()[0] # a primeira parte é o código ASCII...
car = chr(int(ascii, 16)) # converter o ASCII hexadecimal
else: # se o caractere não pode ser decomposto...
continue # então não tem correspondente na tabela ASCII
car = car.upper() # converter para maiúsculas
if car in ascii_uppercase:
# finalmente, podemos computar a ocorrência
if car in ocorrencias:
ocorrencias[car] += 1
else:
ocorrencias[car] = 1
indice = [(qtd, car) for (car, qtd) in ocorrencias.items()]
indice = sorted(indice)
print 'letra ocorrencias'
print '----- -----------'
for qtd, car in reversed(indice):
print '%5s %11d' % (car, qtd)
|
987,710 | 0842d219612fd8b7e70fb08b1c0208fcdc0b9c60 | import re
import os
#text = ('dare.txt')
#found = re.findall ('html', text)
#text.split('html')
# print (text)
with open("dare.txt", "r") as f:
for l in f:
found = re.findall("(.*)(\.html)")
print(found) |
987,711 | 150a4819bf8511457821f4f43e7a8bfe245dd689 | from typing import List, Any
from fastapi import APIRouter, HTTPException
from api.api_v1.endpoints.util import get_nodes_and_relationships
from models.intermediary import INTERMEDIARY
from schema.intermediary import IntermediaryOut, IntermediaryIn, Intermediary
from schema.react_force_graph import ReactForceGraphInput
router = APIRouter()
model = INTERMEDIARY
label = model.__label__
@router.get("/", response_model=List[Intermediary])
def read_intermediaries(skip:int=0, limit:int=25) -> Any:
results = model.nodes[skip:limit]
return [Intermediary(**result.serialize.get("node_properties")) for result in results]
@router.get("/{node_id}", response_model=IntermediaryOut)
def read_intermediary(node_id: str) -> Any:
node = model.nodes.get_or_none(node_id=node_id)
if not node:
raise HTTPException(status_code=404, detail=f"{label.title()} not found")
return IntermediaryOut(**node.serialize.get("node_properties"), connected_nodes=node.serialize_connections)
@router.post("/search", response_model=List[IntermediaryOut])
def search_intermediaries(query: IntermediaryIn, skip=0, limit=25) -> Any:
filters = {k: v for k, v in query.dict().items() if v is not None}
results = model.nodes.filter(**filters)
return [IntermediaryOut(**result.serialize.get("node_properties"), connected_nodes=result.serialize_connections) for result in results]
@router.get("/{node_id}/react-force-graph", response_model=ReactForceGraphInput)
def custom_read_intermediary(node_id: str) -> Any:
return get_nodes_and_relationships(model, node_id=node_id) |
987,712 | a9da5e72c839d70a6d1dd187f5b46ce9f7917035 | # created by jenny trac
# created on Nov 23 2017
# program lets user choose how many rows and columns
# program shows all the numbers and calculates the average
import ui
from numpy import random
minimum = 1
maximum = 50
def make_the_array(rows, columns):
# created the array with random numbers
global minimum
global maximum
if rows >= 0 and columns >= 0:
the_array = range(rows)
for row_counter in the_array:
the_array[row_counter] = range(columns)
for column_counter in range (0 , columns):
the_array[row_counter][column_counter] = random.randint(minimum, maximum)
#print the_array
return the_array
else:
return "-1"
def calculate_average(the_2d_array):
# calulates average of a 2d array
total = 0
for every_row in the_2d_array:
for every_column in every_row:
total = total + every_column
average = total / len(the_2d_array)
return average
def generate_touch_up_inside(sender):
# button to make array, show average and show all numbers
# input
number_of_rows = int(view['rows_textfield'].text)
number_of_columns = int(view['columns_textfield'].text)
#process
random_array = make_the_array(number_of_rows, number_of_columns)
if random_array == "-1":
view['array_textview'].text = str(random_array)
view['average_label'].text = str(random_array)
else:
average_of_array = calculate_average(random_array)
#output
view['array_textview'].text = str(random_array)
view['average_label'].text = "The average is: " + str(average_of_array)
view = ui.load_view()
view.present('sheet')
|
987,713 | 3bba041842775bea461c49a4ea6a950d9192aa9a | def nested_sum(t):
total = 0
for x in t:
if isinstance(x, list):
total = nested_sum(x) + total
else:
total += x
return total
mylist = [1,2,3,[4,5],[6,7,8]]
print(nested_sum(mylist))
|
987,714 | 08126c38341f0da1dff305cdce9e15711059fe6d | import numpy as np
import matplotlib.pyplot as plt
import random
phi = np.array(['0.44', '0.38', '0.16', '0.05', '0.58', '0.85', '0.20', '-0.37', '0.46', '0.59', '1.25', '-0.34', '1.12', '-1.07', '-0.84', '-0.32', '0.65', '-0.38', '0.79', '1.01', '0.42', '0.45', '-0.10', '-0.26', '0.29', '0.93', '1.15', '-1.00', '0.37', '-0.83', '-0.83', '-0.19', '0.60', '0.09', '0.66', '0.36', '0.60', '-0.05', '0.25', '-0.35', '0.81', '0.85', '0.53', '-1.10', '0.71', '-0.66', '-0.07', '-0.28', '0.51', '0.34', '0.71', '0.70', '0.57', '0.44', '0.02', '0.12', '0.05', '0.51', '0.39', '-0.91', '0.08', '-0.49', '-0.84', '0.42', '0.82', '0.01', '-0.02', '0.58', '0.67', '0.26', '0.06', '-0.49', '-0.05', '1.24', '1.01', '-0.84', '1.01', '-0.72', '-0.12', '-0.27', '0.85', '0.16', '0.79', '0.91', '0.12', '0.33', '-0.13', '0.01', '0.72', '0.71', '0.73', '-1.13', '0.72', '-1.23', '-0.32', '-0.41', '1.10', '-0.05', '0.09', '1.01', '0.27', '0.30', '-0.05', '-0.15', '0.40', '0.44', '1.31', '-0.65', '0.65', '-1.32', '-1.17', '0.78', '0.52', '0.15', '0.38', '0.17', '0.27', '0.56', '-0.02', '0.23', '0.72', '0.76', '1.13', '-0.45', '1.12', '-0.63', '-1.20', '1.03', '1.07', '-0.22', '0.61', '0.44', '0.87', '0.87', '0.18', '0.13', '0.36', '0.79', '1.50', '-0.80', '1.47', '-0.77', '-0.65', '0.12', '1.34', '0.03', '0.22', '0.22', '0.47', '0.56', '-0.06', '0.37', '0.71', '1.18', '0.95', '-0.25', '1.44', '-0.38', '-0.34', '0.55', '0.46', '0.18', '0.55', '0.52', '0.42', '-0.04', '-0.10', '-0.42', '0.31', '0.78', '0.82', '-0.46', '1.15', '-0.48', '-1.05', '0.70', '0.47', '0.21', '0.29', '0.26', '1.01', '0.28', '-0.27', '-0.48', '0.20', '0.68', '0.82', '-0.32', '1.00', '-0.56', '-0.94', '0.51', '0.93', '0.37', '0.87', '0.81', '0.37', '0.73', '-0.61', '-0.06', '0.53', '0.81', '0.88', '-0.51', '1.16', '-1.21', '-0.39', '0.46', '0.97', '0.41', '0.52', '0.63', '1.01', '0.71', '0.03', '0.41', '0.63', '0.50', '0.83', '-0.45', '1.39', '-0.51', '-0.71', '-0.12', '0.72', '-0.16', '-0.00', '0.37', '0.09', '0.42', '-0.16', '0.31', '0.65', '0.55', '1.49', '-1.08', '0.91', '-0.60', '-0.60', '-0.47', '0.54', '-0.15', '0.81', '0.29', '0.34', '0.92', '0.18', '-0.27', '0.42', '0.31', '1.50', '-0.41', '1.02', '-0.91', '-0.68', '0.20', '0.40', '0.49', '0.82', '0.75', '0.98', '-0.06', '-0.61', '-0.46', '0.01', '0.39', '1.37', '-1.35', '0.28', '-0.89',
'0.26', '0.21', '1.00', '0.17', '0.08', '0.13', '0.71', '0.66', '-0.40', '-0.49', '0.25', '1.18', '1.24', '-1.29', '0.83', '-1.02', '-0.01', '-0.48', '0.56', '0.46', '0.14', '0.31', '0.95', '0.37', '-0.34', '-0.38', '0.92', '0.52', '1.53', '-0.60', '1.27', '-0.88', '-0.38', '0.10', '1.35', '0.50', '0.58', '0.05', '0.47', '0.55', '-0.41', '-0.52', '0.83', '0.51', '1.61', '-0.07', '1.10', '-0.87', '-0.39', '0.27', '0.69', '0.90', '0.81', '0.64', '0.59', '0.53', '-0.26', '0.09', '-0.49', '0.06', '0.16', '-0.23', '0.32', '0.26', '0.57', '-1.02', '0.30', '0.20', '0.42', '-0.20', '0.12', '0.95', '-0.31', '0.84', '-0.70', '-0.09', '0.62', '-0.18', '-0.05', '0.15', '0.58', '-1.06', '0.76', '0.46', '0.90', '0.47', '0.70', '0.68', '-0.33', '0.63', '-0.22', '0.16', '0.11', '0.35', '-0.09', '-0.22', '0.51', '-1.20', '0.07', '0.15', '0.53', '0.08', '0.51', '0.04', '0.49', '0.22', '-0.82', '-0.01', '0.77', '0.17', '0.23', '-0.12', '0.85', '-0.88', '0.25', '0.57', '0.22', '0.15', '0.41', '0.56', '0.00', '0.90', '-0.66', '0.11', '0.84', '0.30', '0.48', '-0.29', '0.14', '-0.46', '0.83', '1.04', '1.12', '0.74', '0.37', '-0.03', '-0.20', '0.62', '-0.70', '0.17', '0.65', '0.32', '-0.27', '0.01', '0.04', '-1.02', '-0.13', '0.17', '0.50', '0.56', '0.05', '0.48', '0.35', '0.62', '-0.17', '0.37', '0.49', '-0.22', '-0.12', '0.03', '0.94', '-0.59', '0.58', '0.61', '1.06', '0.09', '0.90', '0.60', '0.10', '0.79', '0.16', '0.31', '-0.19', '0.28', '-0.38', '-0.04', '0.35', '-1.08', '0.24', '0.99', '0.68', '0.14', '0.71', '0.31', '0.38', '0.92', '-0.17', '-0.16', '0.38', '0.02', '-0.36', '0.47', '-0.16', '-0.40', '0.39', '0.88', '0.94', '0.44', '0.60', '0.29', '-0.16', '0.76', '-0.15', '-0.62', '0.08', '0.08', '0.33', '0.31', '-0.09', '-0.60', '0.18', '0.30', '1.10', '-0.18', '0.39', '0.82', '0.53', '0.34', '-0.46', '0.13', '0.65', '0.25', '-0.37', '-0.46', '0.79', '-0.70', '0.04', '0.41', '0.86', '-0.09', '0.30', '0.44', '-0.14', '0.68', '-0.45', '-0.07', '-0.13', '-0.13', '-0.32', '0.18', '0.73', '-0.46', '0.01', '0.81', '1.15', '0.18', '0.71', '0.86', '0.09', '0.65',
'-0.14', '0.13', '0.08', '0.02', '0.59', '-0.12', '-0.22', '0.13', '0.13', '0.14', '0.88', '0.03', '0.85', '0.77', '0.29', '0.15', '-0.26', '-0.19', '0.62', '-0.05', '-0.07', '0.43', '0.26', '-0.40', '0.29', '0.61', '0.88', '0.12', '0.97', '0.86', '0.32', '0.70', '-0.84', '-0.13', '0.43', '0.06', '-0.61', '-0.15', '0.44', '-0.70', '0.78', '0.41', '0.83', '0.52', '0.15', '0.49', '0.03', '0.18', '-0.34', '-0.04', '0.61', '0.08', '-0.39', '0.19', '-0.28', '-0.73', '0.87', '0.20', '0.41', '0.08', '0.21', '0.87', '0.61', '0.52', '-0.14', '0.12', '0.01', '0.38', '0.40', '-0.06', '-0.23', '-0.15', '-0.09', '0.74', '0.77', '0.19', '0.52', '0.16', '-0.17', '0.90', '-0.68', '-0.20', '0.30', '-0.14', '0.37', '0.15', '0.18', '0.07', '0.35', '1.01', '0.88', '0.35', '0.53', '0.74', '-0.17', '0.03', '-0.04', '-0.25', '-0.05', '-0.14', '-0.01', '-0.50', '0.25', '-0.40', '0.18', '0.58', '0.86', '0.51', '0.77', '0.95', '0.22', '0.52', '-0.75', '0.26', '-0.02', '0.47', '0.28', '0.20', '0.62', '-0.57'])
simple = np.array(['0.86', '-0.08', '-0.08', '1.30', '1.41', '-0.54', '-0.07', '0.06', '0.38', '0.12', '0.14', '-0.27', '0.11', '0.04', '-0.26', '-0.26'])
mine = phi.reshape(-1,16)[:int(len(phi.reshape(-1,16))/2)]
dine = phi.reshape(-1,16)[int(len(phi.reshape(-1,16))/2):]
MINE = np.zeros(mine.shape)
DINE = np.zeros(dine.shape)
for i in range(len(mine)):
for j in range(len(mine[0])):
MINE[i,j] = float(mine[i,j])
DINE[i,j] = float(dine[i,j])
for k in range(len(simple)):
simple[k] = float(simple[k])
np.corrcoef(MINE)
np.corrcoef(DINE)
b = []
for i in range(len(MINE[0])):
a = np.mean(float(round(np.mean(MINE[:,i]),2)) - float(simple[i]))
c = np.mean(float(round(np.mean(MINE[:,i]),2)) - float(simple[i]))
b.append(a)
plt.hist(np.mean(abs(np.array(b))))
print(np.mean(abs(np.array(b))))
for j in range(20):
b = []
random.shuffle(simple)
for i in range(len(MINE[0])):
a = np.mean(float(round(np.mean(MINE[:,i]),2)) - float(simple[i]))
c = np.mean(float(round(np.mean(MINE[:,i]),2)) - float(simple[i]))
b.append(a)
plt.hist(np.mean(abs(np.array(b))))
plt.show()
for i in range(len(MINE[0])):
plt.hist(DINE[:,i])
plt.hist(float(simple[i]))
#plt.show()
['1.54', '-0.54', '-0.03', '1.34', '1.98', '-0.13', '-0.06', '0.13', '0.28', '0.00', '0.02', '-0.12', '-0.06', '-0.25', '-0.37', '0.13']
['0.98', '-0.30', '-0.03', '0.67', '0.58', '0.02', '-0.08', '0.08', '0.32', '0.13', '-0.19', '-0.31', '0.04', '-0.16', '-0.30', '0.10']
['1.91', '-0.42', '-0.24', '1.12', '1.66', '-0.26', '0.09', '0.11', '0.31', '0.09', '-0.19', '0.09', '0.05', '-0.32', '-0.84', '0.48']
['1.30', '0.36', '0.06', '0.39', '1.17', '0.36', '-0.11', '0.03', '0.18', '0.04', '0.03', '0.16', '-0.09', '-0.54', '-0.08', '-0.13']
# isBase + isCaptured + isAlive + [sum(mine[:6]), sum(mine[6:12])] + [sum(dine[:6]), sum(dine[6:12])] + splitDistance + baseDistance |
987,715 | e7cc28ab43f77bf744564ce2eacc4b476f1cab48 | import sqlite3
DB_PATH = "../../db/users.db"
def create_table(conn):
conn.cursor().execute('''CREATE TABLE IF NOT EXISTS users
(id UNIQUE, username text, firstname text , lastname text)''')
def insert_id(info):
# conn = set_connection()
with set_connection() as conn:
conn.cursor().execute('INSERT OR IGNORE INTO users VALUES (:id, :username, :first, :last)',
{'id':info.chat.id,'username':info.from_user.username,
'first':info.from_user.first_name,'last':info.from_user.last_name})
def search_id(chat_id):
conn = set_connection()
result = conn.cursor().execute('''SELECT * FROM users WHERE id = :id''', {"id":chat_id}).fetchone()
conn.close()
return result
def set_connection(db_path = DB_PATH):
db_conn = sqlite3.connect(db_path)
return db_conn
|
987,716 | ce2ca5dfee78cbac866f839b279cdb19828404d5 | from fractions import gcd
from itertools import accumulate
N = int(input())
A = tuple(map(int, input().split()))
to_right = [0] + list(accumulate(A, gcd))[:-1]
to_left = list(reversed(list(accumulate(reversed(A), gcd))))[1:] + [0]
print(max(gcd(r, l) for r, l in zip(to_right, to_left)))
|
987,717 | 9c42e6ec860d97fa1b4ddf129d130d5f762bd660 | import sqlite3
from django.shortcuts import render, redirect, reverse
from libraryapptwo.models import Library, model_factory, Book
from django.contrib.auth.decorators import login_required
from ..connection import Connection
def create_library(cursor, row):
_row = sqlite3.Row(cursor, row)
library = Library()
library.id = _row["id"]
library.title = _row["title"]
library.address = _row["address"]
# blank books list to be populated later
library.books = []
book = Book()
book.id = _row["book_id"]
book.title = _row["book_title"]
book.author = _row["author"]
book.isbn = _row["isbn"]
book.year_published = _row["year_published"]
# return tuple containing library and book build from data in current row of data set
return (library, book)
@login_required
def list_libraries(request):
if request.method == 'GET':
with sqlite3.connect(Connection.db_path) as conn:
conn.row_factory = create_library
db_cursor = conn.cursor()
db_cursor.execute("""
SELECT
li.id,
li.title,
li.address,
b.id book_id,
b.title book_title,
b.author,
b.year_published,
b.isbn
FROM libraryapptwo_library li
JOIN libraryapptwo_book b ON li.id = b.location_id
""")
# all_libraries = []
all_libraries = db_cursor.fetchall()
library_groups = {}
for (library, book) in all_libraries:
# if dict has a key of current library id value, add the key and set the value to the current library
if library.id not in library_groups:
library_groups[library.id] = library
library_groups[library.id].books.append(book)
# if key does not exist, append current book to the list of books for current library
else:
library_groups[library.id].books.append(book)
print("test1", library_groups)
# for row in dataset:
# lib = Library()
# lib.id = row["id"]
# lib.address = row["address"]
# lib.title = row["title"]
# all_libraries.append(lib)
template = 'libraries/list.html'
context = {
'all_libraries': library_groups.values()
}
return render(request, template, context)
elif request.method == 'POST':
form_data = request.POST
with sqlite3.connect(Connection.db_path) as conn:
db_cursor = conn.cursor()
db_cursor.execute("""
INSERT INTO libraryapptwo_library
(
title, address
)
VALUES (?, ?)
""",
(form_data['title'], form_data['address']))
return redirect(reverse('libraryapptwo:libraries'))
|
987,718 | 3720bb867c5828082ef281807cf2fd082e3573b9 | class Node:
def __init__(self, value):
self.left = None
self.value = value
self.right = None
class BST:
def __init__(self):
self.root = None
# insertion in binary search tree
def insert(self, value):
if self.root is None:
self.root = Node(value)
return
temp = self.root
while temp:
if temp.value == value:
print(f"inserion failed, value exist {value}")
return
if temp.value > value:
if not temp.left:
temp.left = Node(value)
return
temp = temp.left
else:
if not temp.right:
temp.right = Node(value)
return
temp = temp.right
# deletion from binary search tree
def deleteNode(self, value):
return
# searching in binary search tree
def searchWrap(self, root, value):
if root is None or root.value == value:
return root
if root.value < value:
return self.searchWrap(root.right, value)
return self.searchWrap(root.left, value)
def search(self, value):
root = self.searchWrap(self.root, value)
if root is None:
print(f"Not found in tree {value}")
else:
print(f"Found in tree {value}")
def inorder(self, root: Node):
if root:
self.inorder(root.left)
print(root.value, end=" ")
self.inorder(root.right)
def showBST(self, msg):
print(msg)
self.inorder(self.root)
print()
if __name__ == "__main__":
bt = BST()
bt.insert(30)
bt.insert(50)
bt.insert(15)
bt.insert(20)
bt.insert(10)
bt.insert(40)
bt.insert(10)
bt.insert(60)
bt.showBST("before")
# bt.showBST("after delete")
bt.search(15)
bt.search(400) |
987,719 | f318b0a51e029cd4df1dc70171ccfc0b5798206d | name = "sahil"
tuples = ("sa", "ra")
lists = ["s", "a", "h", "i", "l"]
dictionary = {"sahil": "raza"}
print(f'name = "{name}"', f"tuples = {tuples}", f"lists = {lists}", f"dictionary = {dictionary}", " " , sep = "\n") |
987,720 | d39b8d5b5cccdd6b84b58ce4a08e1050f2aff4a6 | import argparse
class Options:
def __init__(self):
self.parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
self.parser.add_argument("--video_file", type=str, default="", help="filename of video file")
self.parser.add_argument("--positive_folder", type=str, default="positive_images",
help="path to positive images folder")
self.parser.add_argument("--negative_folder", type=str, default="negative_images",
help="path to negative images folder")
self.parser.add_argument("--sample_folder", type=str, default="sample_images",
help="path to sample images folder")
self.parser.add_argument("--output_folder", type=str, default="output",
help="path to the training output")
self.parser.add_argument("--image_size", type=int, default=32, help="height and width of the sampled image")
self.parser.add_argument("--sample_count", type=int, default=128,
help="the number of samples to generate per positive image")
# Validation
self.parser.add_argument("--cascade_file", type=str, default=r"classifier\haarcascade_frontalface_default.xml",
help="filename of cascade file. Leave blank if cascade file does not exist")
self.parser.add_argument("--scene_start", type=int, default=0, help="frame number of the scene start")
self.parser.add_argument("--scene_end", type=int, default=1000, help="frame number of the scene end")
self.options = self.parser.parse_args()
# Print the option values to the console
args = vars(self.options)
print('------------ Options -------------')
for k, v in sorted(args.items()):
print('%s: %s' % (str(k), str(v)))
print('-------------- End ----------------')
print()
|
987,721 | bfa0406251e5deecec65e6a5555cf3aed613c92c | from weixin.contact.token import Weixin
import requests
import logging
import json
import pytest
class TestDepartment:
@classmethod
def setup_class(cls):
print('\nsetup_class')
Weixin.get_token()
print(Weixin._token)
def setup(self):
print('setup...')
def test_create_depth(self):
parentid = 1
for i in range(5):
data = {
"name": "广州研发中心分部_" + str(parentid),
"parentid": parentid,
"order": 1,
}
r = requests.post('https://qyapi.weixin.qq.com/cgi-bin/department/create',
params={'access_token': Weixin.get_token()}, json=data).json()
logging.debug(r)
parentid = r['id']
@pytest.mark.parametrize('name',
['東京アニメーション研究所',
'도쿄 애니메이션 인스티튜트',
'東京動漫研究所'
'معهد طوكيو للرسوم المتحركة'])
def test_create_order(self, name):
data = {
"name": name,
"parentid": 1,
"order": 1,
}
r = requests.post('https://qyapi.weixin.qq.com/cgi-bin/department/create',
params={'access_token': Weixin.get_token()}, json=data).json()
logging.debug(r)
def test_get(self):
r = requests.get('https://qyapi.weixin.qq.com/cgi-bin/department/list',
params={'access_token': Weixin.get_token()}).json()
logging.debug(json.dumps(r, indent=2))
|
987,722 | dc8995b61f4bc2480bac2516c92aabb1474e1a68 | #!/usr/bin/env python3
__author__ = "Christian F. Walter"
from math import factorial
if __name__ == "__main__":
with open('p067_triangle.txt', 'r') as tri_file:
tri = tri_file.readlines()
for i, row in enumerate(tri):
tri[i] = row.strip().split()
# # from bottom to top, sum max of two numbers below
for row in range(len(tri)-1, 0, -1):
for i, n in enumerate(tri[row-1]):
tri[row-1][i] = int(n) + max(int(tri[row][i]), int(tri[row][i+1]))
print(tri[0][0])
|
987,723 | f144f1a7c20ffe5edd6f558237a32d224b49bf85 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import argparse
import sys
import tempfile
from tensorflow.examples.tutorials.mnist import input_data
import tensorflow as tf
from skimage import transform
import tensorflow as tf
import input
# load the train and test data
train_data, train_label = input.load_data('image/train.txt')
test_data, test_label = input.load_data('image/val.txt')
# resize rhe training and testing pictures
train_data_m = []
for image in train_data:
img64 = transform.resize(image, (64, 64, 3))
train_data_m.append(img64)
test_data_m = []
for image in test_data:
img64 = transform.resize(image, (64, 64, 3))
test_data_m.append(img64)
train_labels_ = []
for i in train_label:
if i == 0:
train_labels_.append(np.array([1.0, 0.0, 0.0, 0.0, 0.0]))
elif i == 1:
train_labels_.append(np.array([0.0, 1.0, 0.0, 0.0, 0.0]))
elif i == 2:
train_labels_.append(np.array([0.0, 0.0, 1.0, 0.0, 0.0]))
elif i == 3:
train_labels_.append(np.array([0.0, 0.0, 0.0, 1.0, 0.0]))
elif i == 4:
train_labels_.append(np.array([0.0, 0.0, 0.0, 0.0, 1.0]))
test_labels_ = []
for i in test_label:
if i == 0:
test_labels_.append(np.array([1.0, 0.0, 0.0, 0.0, 0.0]))
elif i == 1:
test_labels_.append(np.array([0.0, 1.0, 0.0, 0.0, 0.0]))
elif i == 2:
test_labels_.append(np.array([0.0, 0.0, 1.0, 0.0, 0.0]))
elif i == 3:
test_labels_.append(np.array([0.0, 0.0, 0.0, 1.0, 0.0]))
elif i == 4:
test_labels_.append(np.array([0.0, 0.0, 0.0, 0.0, 1.0]))
FLAGS = None
def conv2d(x, W):
"""conv2d returns a 2d convolution layer with full stride."""
return tf.nn.conv2d(x, W, strides=[1, 1, 1, 1], padding='SAME')
def max_pool_2x2(x):
"""max_pool_2x2 downsamples a feature map by 2X."""
return tf.nn.max_pool(x, ksize=[1, 2, 2, 1],strides=[1, 2, 2, 1], padding='SAME')
def weight_variable(shape):
"""weight_variable generates a weight variable of a given shape."""
initial = tf.truncated_normal(shape, stddev=0.1)
return tf.Variable(initial)
def bias_variable(shape):
"""bias_variable generates a bias variable of a given shape."""
initial = tf.constant(0.1, shape=shape)
return tf.Variable(initial)
def deepnn(x):
"""deepnn builds the graph for a deep net for classifying digits.
Args:
x: an input tensor with the dimensions (N_examples, 784), where 784 is the
number of pixels in a standard MNIST image.
Returns:
A tuple (y, keep_prob). y is a tensor of shape (N_examples, 10), with values
equal to the logits of classifying the digit into one of 10 classes (the
digits 0-9). keep_prob is a scalar placeholder for the probability of
dropout.
"""
# Reshape to use within a convolutional neural net.
# Last dimension is for "features" - there is only one here, since images are
# grayscale -- it would be 3 for an RGB image, 4 for RGBA, etc.
with tf.name_scope('reshape'):
x_image = tf.reshape(x, [-1, 28, 28, 3])
# First convolutional layer - maps one grayscale image to 32 feature maps.
with tf.name_scope('conv1'):
W_conv1 = weight_variable([5, 5, 1, 32])
b_conv1 = bias_variable([32])
h_conv1 = tf.nn.relu(conv2d(x_image, W_conv1) + b_conv1)
# Pooling layer - downsamples by 2X.
with tf.name_scope('pool1'):
h_pool1 = max_pool_2x2(h_conv1)
# Second convolutional layer -- maps 32 feature maps to 64.
with tf.name_scope('conv2'):
W_conv2 = weight_variable([5, 5, 32, 64])
b_conv2 = bias_variable([64])
h_conv2 = tf.nn.relu(conv2d(h_pool1, W_conv2) + b_conv2)
# Second pooling layer.
with tf.name_scope('pool2'):
h_pool2 = max_pool_2x2(h_conv2)
# Fully connected layer 1 -- after 2 round of downsampling, our 28x28 image
# is down to 7x7x64 feature maps -- maps this to 1024 features.
with tf.name_scope('fc1'):
W_fc1 = weight_variable([7 * 7 * 64, 1024])
b_fc1 = bias_variable([1024])
h_pool2_flat = tf.reshape(h_pool2, [-1, 7*7*64])
h_fc1 = tf.nn.relu(tf.matmul(h_pool2_flat, W_fc1) + b_fc1)
# Dropout - controls the complexity of the model, prevents co-adaptation of
# features.
with tf.name_scope('dropout'):
keep_prob = tf.placeholder(tf.float32)
h_fc1_drop = tf.nn.dropout(h_fc1, keep_prob)
# Map the 1024 features to 10 classes, one for each digit
with tf.name_scope('fc2'):
W_fc2 = weight_variable([1024, 5])
b_fc2 = bias_variable([5])
y_conv = tf.matmul(h_fc1_drop, W_fc2) + b_fc2
return y_conv, keep_prob
# Create a graph to hold the model.
graph = tf.Graph()
# Create model in the graph.
with graph.as_default():
# Placeholders for inputs and labels.
x = tf.placeholder(tf.float32, [None, 28, 28, 3])
y_ = tf.placeholder(tf.float32, [None, 5])
y = tf.placeholder(tf.float32, [None, 5])
# Flatten input from: [None, height, width, channels]
# To: [None, height * width * channels] == [None, 3072]
x_flat = tf.contrib.layers.flatten(x)
y_conv, keep_prob = deepnn(x_flat)
# ============-------------===============-------------=============
with tf.name_scope('loss'):
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(labels=y_,
logits=y_conv)
cross_entropy = tf.reduce_mean(cross_entropy)
with tf.name_scope('adam_optimizer'):
train_step = tf.train.AdamOptimizer(1e-4).minimize(cross_entropy)
with tf.name_scope('accuracy'):
correct_prediction = tf.equal(tf.argmax(y_conv, 1), tf.argmax(y_, 1))
correct_prediction = tf.cast(correct_prediction, tf.float32)
accuracy = tf.reduce_mean(correct_prediction)
graph_location = tempfile.mkdtemp()
print('Saving graph to: %s' % graph_location)
train_writer = tf.summary.FileWriter(graph_location)
train_writer.add_graph(tf.get_default_graph())
with tf.name_scope('test_accuracy'):
prediction = tf.equal(tf.argmax(y_conv, 1), tf.argmax(y, 1))
prediction = tf.cast(prediction, tf.float32)
test_accuracy = tf.reduce_mean(prediction)
init = tf.initialize_all_variables()
with tf.Session(graph=graph) as sess:
sess.run(init)
for i in range(20):
train, loss_value, accu = sess.run([train_step,cross_entropy,accuracy], feed_dict={x: train_data_m, y_: train_label, keep_prob: 0.5})
# train, loss_value, accu = session.run([train_step,cross_entropy,accuracy], feed_dict={x: test_images64, y_: test_labels_, keep_prob: 0.5})
if i % 10 == 0:
print('step ' + str(i) + ' Loss: ', loss_value)
print(accu)
predicted = sess.run(test_accuracy, feed_dict={x: test_data_m, y: test_label, keep_prob: 0.5})
|
987,724 | 844a8ff13f424c9dae04b209ee92c99f4ee6785d | import unittest
import asserts
import masTest
if __name__ == "__main__":
loader = unittest.TestLoader()
suite = unittest.TestSuite()
suite.addTests(loader.loadTestsFromModule(asserts))
suite.addTests(loader.loadTestsFromModule(masTest))
runner = unittest.TextTestRunner(verbosity=3)
results = runner.run(suite)
|
987,725 | 3fc85f6d7d9117ebf2c8378fe252db26e915c78e | import numpy as np
import pytest
from numpy.testing import assert_allclose
from scipy import stats
from sklearn.metrics import (
mean_absolute_error,
mean_squared_error,
median_absolute_error,
r2_score,
)
from xskillscore.core.np_deterministic import (
_mae,
_mape,
_median_absolute_error,
_mse,
_pearson_r,
_pearson_r_p_value,
_r2,
_rmse,
_smape,
_spearman_r,
_spearman_r_p_value,
)
@pytest.fixture
def a():
return np.random.rand(3, 4, 5)
@pytest.fixture
def b():
return np.random.rand(3, 4, 5)
# standard params in this testing file
skipna = False
weights = None
def test_pearson_r_nd(a, b):
axis = 0
expected = np.squeeze(a[0, :, :]).copy()
for i in range(np.shape(a)[1]):
for j in range(np.shape(a)[2]):
_a = a[:, i, j]
_b = b[:, i, j]
expected[i, j], p = stats.pearsonr(_a, _b)
actual = _pearson_r(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
axis = 1
expected = np.squeeze(a[:, 0, :]).copy()
for i in range(np.shape(a)[0]):
for j in range(np.shape(a)[2]):
_a = a[i, :, j]
_b = b[i, :, j]
expected[i, j], p = stats.pearsonr(_a, _b)
actual = _pearson_r(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
axis = 2
expected = np.squeeze(a[:, :, 0]).copy()
for i in range(np.shape(a)[0]):
for j in range(np.shape(a)[1]):
_a = a[i, j, :]
_b = b[i, j, :]
expected[i, j], p = stats.pearsonr(_a, _b)
actual = _pearson_r(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
def test_r2_nd(a, b):
axis = 0
expected = np.squeeze(a[0, :, :]).copy()
for i in range(np.shape(a)[1]):
for j in range(np.shape(a)[2]):
_a = a[:, i, j]
_b = b[:, i, j]
expected[i, j] = r2_score(_a, _b)
actual = _r2(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
axis = 1
expected = np.squeeze(a[:, 0, :]).copy()
for i in range(np.shape(a)[0]):
for j in range(np.shape(a)[2]):
_a = a[i, :, j]
_b = b[i, :, j]
expected[i, j] = r2_score(_a, _b)
actual = _r2(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
axis = 2
expected = np.squeeze(a[:, :, 0]).copy()
for i in range(np.shape(a)[0]):
for j in range(np.shape(a)[1]):
_a = a[i, j, :]
_b = b[i, j, :]
expected[i, j] = r2_score(_a, _b)
actual = _r2(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
def test_pearson_r_p_value_nd(a, b):
axis = 0
expected = np.squeeze(a[0, :, :]).copy()
for i in range(np.shape(a)[1]):
for j in range(np.shape(a)[2]):
_a = a[:, i, j]
_b = b[:, i, j]
r, expected[i, j] = stats.pearsonr(_a, _b)
actual = _pearson_r_p_value(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
axis = 1
expected = np.squeeze(a[:, 0, :]).copy()
for i in range(np.shape(a)[0]):
for j in range(np.shape(a)[2]):
_a = a[i, :, j]
_b = b[i, :, j]
p, expected[i, j] = stats.pearsonr(_a, _b)
actual = _pearson_r_p_value(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
axis = 2
expected = np.squeeze(a[:, :, 0]).copy()
for i in range(np.shape(a)[0]):
for j in range(np.shape(a)[1]):
_a = a[i, j, :]
_b = b[i, j, :]
r, expected[i, j] = stats.pearsonr(_a, _b)
actual = _pearson_r_p_value(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
def test_spearman_r_nd(a, b):
axis = 0
expected = np.squeeze(a[0, :, :]).copy()
for i in range(np.shape(a)[1]):
for j in range(np.shape(a)[2]):
_a = a[:, i, j]
_b = b[:, i, j]
expected[i, j], p = stats.spearmanr(_a, _b)
actual = _spearman_r(a, b, weights, axis, skipna)
assert_allclose(actual, expected, atol=1e-5)
axis = 1
expected = np.squeeze(a[:, 0, :]).copy()
for i in range(np.shape(a)[0]):
for j in range(np.shape(a)[2]):
_a = a[i, :, j]
_b = b[i, :, j]
expected[i, j], p = stats.spearmanr(_a, _b)
actual = _spearman_r(a, b, weights, axis, skipna)
assert_allclose(actual, expected, atol=1e-5)
axis = 2
expected = np.squeeze(a[:, :, 0]).copy()
for i in range(np.shape(a)[0]):
for j in range(np.shape(a)[1]):
_a = a[i, j, :]
_b = b[i, j, :]
expected[i, j], p = stats.spearmanr(_a, _b)
actual = _spearman_r(a, b, weights, axis, skipna)
assert_allclose(actual, expected, atol=1e-5)
def test_spearman_r_p_value_nd(a, b):
nan_policy = 'propagate' # default
axis = 0
expected = np.squeeze(a[0, :, :]).copy()
for i in range(np.shape(a)[1]):
for j in range(np.shape(a)[2]):
_a = a[:, i, j]
_b = b[:, i, j]
r, expected[i, j] = stats.spearmanr(_a, _b, nan_policy=nan_policy)
actual = _spearman_r_p_value(a, b, weights, axis, skipna)
assert_allclose(actual, expected, atol=1e-5)
axis = 1
expected = np.squeeze(a[:, 0, :]).copy()
for i in range(np.shape(a)[0]):
for j in range(np.shape(a)[2]):
_a = a[i, :, j]
_b = b[i, :, j]
p, expected[i, j] = stats.spearmanr(_a, _b, nan_policy=nan_policy)
actual = _spearman_r_p_value(a, b, weights, axis, skipna)
assert_allclose(actual, expected, atol=1e-5)
axis = 2
expected = np.squeeze(a[:, :, 0]).copy()
for i in range(np.shape(a)[0]):
for j in range(np.shape(a)[1]):
_a = a[i, j, :]
_b = b[i, j, :]
r, expected[i, j] = stats.spearmanr(_a, _b, nan_policy=nan_policy)
actual = _spearman_r_p_value(a, b, weights, axis, skipna)
assert_allclose(actual, expected, atol=1e-5)
def test_rmse_nd(a, b):
axis = 0
expected = np.squeeze(a[0, :, :]).copy()
for i in range(np.shape(a)[1]):
for j in range(np.shape(a)[2]):
_a = a[:, i, j]
_b = b[:, i, j]
expected[i, j] = np.sqrt(mean_squared_error(_a, _b))
actual = _rmse(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
axis = 1
expected = np.squeeze(a[:, 0, :]).copy()
for i in range(np.shape(a)[0]):
for j in range(np.shape(a)[2]):
_a = a[i, :, j]
_b = b[i, :, j]
expected[i, j] = np.sqrt(mean_squared_error(_a, _b))
actual = _rmse(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
axis = 2
expected = np.squeeze(a[:, :, 0]).copy()
for i in range(np.shape(a)[0]):
for j in range(np.shape(a)[1]):
_a = a[i, j, :]
_b = b[i, j, :]
expected[i, j] = np.sqrt(mean_squared_error(_a, _b))
actual = _rmse(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
def test_mse_nd(a, b):
axis = 0
expected = np.squeeze(a[0, :, :]).copy()
for i in range(np.shape(a)[1]):
for j in range(np.shape(a)[2]):
_a = a[:, i, j]
_b = b[:, i, j]
expected[i, j] = mean_squared_error(_a, _b)
actual = _mse(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
axis = 1
expected = np.squeeze(a[:, 0, :]).copy()
for i in range(np.shape(a)[0]):
for j in range(np.shape(a)[2]):
_a = a[i, :, j]
_b = b[i, :, j]
expected[i, j] = mean_squared_error(_a, _b)
actual = _mse(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
axis = 2
expected = np.squeeze(a[:, :, 0]).copy()
for i in range(np.shape(a)[0]):
for j in range(np.shape(a)[1]):
_a = a[i, j, :]
_b = b[i, j, :]
expected[i, j] = mean_squared_error(_a, _b)
actual = _mse(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
def test_mae_nd(a, b):
axis = 0
expected = np.squeeze(a[0, :, :]).copy()
for i in range(np.shape(a)[1]):
for j in range(np.shape(a)[2]):
_a = a[:, i, j]
_b = b[:, i, j]
expected[i, j] = mean_absolute_error(_a, _b)
actual = _mae(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
axis = 1
expected = np.squeeze(a[:, 0, :]).copy()
for i in range(np.shape(a)[0]):
for j in range(np.shape(a)[2]):
_a = a[i, :, j]
_b = b[i, :, j]
expected[i, j] = mean_absolute_error(_a, _b)
actual = _mae(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
axis = 2
expected = np.squeeze(a[:, :, 0]).copy()
for i in range(np.shape(a)[0]):
for j in range(np.shape(a)[1]):
_a = a[i, j, :]
_b = b[i, j, :]
expected[i, j] = mean_absolute_error(_a, _b)
actual = _mae(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
def test_median_absolute_error_nd(a, b):
axis = 0
expected = np.squeeze(a[0, :, :]).copy()
for i in range(np.shape(a)[1]):
for j in range(np.shape(a)[2]):
_a = a[:, i, j]
_b = b[:, i, j]
expected[i, j] = median_absolute_error(_a, _b)
actual = _median_absolute_error(a, b, axis, skipna)
assert_allclose(actual, expected)
axis = 1
expected = np.squeeze(a[:, 0, :]).copy()
for i in range(np.shape(a)[0]):
for j in range(np.shape(a)[2]):
_a = a[i, :, j]
_b = b[i, :, j]
expected[i, j] = median_absolute_error(_a, _b)
actual = _median_absolute_error(a, b, axis, skipna)
assert_allclose(actual, expected)
axis = 2
expected = np.squeeze(a[:, :, 0]).copy()
for i in range(np.shape(a)[0]):
for j in range(np.shape(a)[1]):
_a = a[i, j, :]
_b = b[i, j, :]
expected[i, j] = median_absolute_error(_a, _b)
actual = _median_absolute_error(a, b, axis, skipna)
assert_allclose(actual, expected)
def test_mape_nd(a, b):
axis = 0
expected = np.squeeze(a[0, :, :]).copy()
for i in range(np.shape(a)[1]):
for j in range(np.shape(a)[2]):
_a = a[:, i, j]
_b = b[:, i, j]
expected[i, j] = mean_absolute_error(_a / _a, _b / _a)
actual = _mape(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
axis = 1
expected = np.squeeze(a[:, 0, :]).copy()
for i in range(np.shape(a)[0]):
for j in range(np.shape(a)[2]):
_a = a[i, :, j]
_b = b[i, :, j]
expected[i, j] = mean_absolute_error(_a / _a, _b / _a)
actual = _mape(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
axis = 2
expected = np.squeeze(a[:, :, 0]).copy()
for i in range(np.shape(a)[0]):
for j in range(np.shape(a)[1]):
_a = a[i, j, :]
_b = b[i, j, :]
expected[i, j] = mean_absolute_error(_a / _a, _b / _a)
actual = _mape(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
def test_smape_nd(a, b):
axis = 0
expected = np.squeeze(a[0, :, :]).copy()
for i in range(np.shape(a)[1]):
for j in range(np.shape(a)[2]):
_a = a[:, i, j]
_b = b[:, i, j]
expected[i, j] = mean_absolute_error(
_a / (np.absolute(_a) + np.absolute(_b)),
_b / (np.absolute(_a) + np.absolute(_b)),
)
actual = _smape(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
axis = 1
expected = np.squeeze(a[:, 0, :]).copy()
for i in range(np.shape(a)[0]):
for j in range(np.shape(a)[2]):
_a = a[i, :, j]
_b = b[i, :, j]
expected[i, j] = mean_absolute_error(
_a / (np.absolute(_a) + np.absolute(_b)),
_b / (np.absolute(_a) + np.absolute(_b)),
)
actual = _smape(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
axis = 2
expected = np.squeeze(a[:, :, 0]).copy()
for i in range(np.shape(a)[0]):
for j in range(np.shape(a)[1]):
_a = a[i, j, :]
_b = b[i, j, :]
expected[i, j] = mean_absolute_error(
_a / (np.absolute(_a) + np.absolute(_b)),
_b / (np.absolute(_a) + np.absolute(_b)),
)
actual = _smape(a, b, weights, axis, skipna)
assert_allclose(actual, expected)
|
987,726 | f49e9bc61cbf04ca12e2f5fa1dd0f939a5f4bab9 | import mysql.connector as mysql
from modules.classes import Car, User, DataBaseException
def commit_req(estimate_req):
cnx = mysql.connect(host = "db", user = "root", passwd = "root", db = "db")
cursor = cnx.cursor()
estimate_req['model_id']=None
print(estimate_req)
insert_query='INSERT INTO estimate_req VALUES(%(model_id)s,%(Model)s,%(Coefficient)s,%(Intercept)s,%(Estimated_Price)s)'
cursor.execute(insert_query,estimate_req)
cnx.commit()
cursor.close()
cnx.close()
def setup_db():
cnx = mysql.connect(host = "db", user = "root", passwd = "root", db = "db")
cursor = cnx.cursor()
drop_query=('DROP TABLE IF EXISTS estimate_req;')
table_query='create table estimate_req(model_id int not null auto_increment, Model varchar(100) not null, Coefficient varchar(100), Intercept varchar(100), Estimated_Price varchar(100), primary key(model_id));'
drop_users=('DROP TABLE IF EXISTS users;')
create_users='create table users(user_name varchar(100) not null, password varchar(100) not null, primary key(user_name));'
drop_cars=('DROP TABLE IF EXISTS cars;')
create_cars='create table cars(car_id int not null auto_increment, model varchar(100) not null, fuel varchar(100) not null, year int not null, km int not null, capacity float not null, estimated_price int not null, sale_price int not null, owner varchar(100) not null REFERENCES users(user_name), primary key(car_id));'
cursor.execute(drop_query)
cursor.execute(table_query)
cursor.execute(drop_users)
cursor.execute(create_users)
cursor.execute(drop_cars)
cursor.execute(create_cars)
# Commit the changes
cnx.commit()
cursor.close()
cnx.close()
print('Setup completed')
def add_new(car,user_name):
try:
cnx = mysql.connect(host = "db", user = "root", passwd = "root", db = "db")
car['owner']=user_name
cursor = cnx.cursor()
print("car to save: ",car)
query = "INSERT INTO cars VALUES (%(car_id)s,%(model)s,%(fuel)s,%(year)s,%(km)s,%(capacity)s,%(estimated_price)s,%(sale_price)s,%(owner)s);"
cursor.execute(query,car)
car['car_id']=cursor.lastrowid
cnx.commit()
cursor.close()
cnx.close()
car_obj=Car(car['model'],car['fuel'],car['year'],car['km'],car['capacity'],car['estimated_price'],car['sale_price'],car['car_id'])
return car, car_obj
except Exception as e:
print("add_new: ",e)
raise DataBaseException("Saving the car aborted ")
def register_user(user_name, password):
try:
cnx = mysql.connect(host = "db", user = "root", passwd = "root", db = "db")
cursor = cnx.cursor()
query = "INSERT INTO users VALUES (%(user_name)s,%(password)s);"
cursor.execute(query,{'user_name':user_name,'password':password})
cnx.commit()
cursor.close()
cnx.close()
user_obj=User(user_name,password)
return {"user_name":user_name,"added":True}, user_obj
except Exception as e:
print(e)
raise DataBaseException("Saving the user aborted ")
|
987,727 | b3245ee0ac37d080a6f6d74ce9df4127e0802372 | # -*- encoding: utf-8 -*-
from src.base.monitor import CXMonitor
class VectorCXMonitor(CXMonitor):
pass
|
987,728 | cfe9aa4f213f39ad0e5249a6fbc55bb79cc17a73 | EDITORS = [
'subl',
'atom',
'code'
] |
987,729 | d13293b05e0145b05594b4f88231c4230bc7df35 | from flask import jsonify, request
from flask_todo import app, db, bcrypt
from flask_todo.models import Todo, TodoSchema, User, UserSchema
import datetime
from pytz import timezone
from sqlalchemy import desc
from flask_jwt_extended import (
create_access_token, create_refresh_token, jwt_required,
jwt_refresh_token_required, get_jwt_identity, )
@app.route('/protected', methods=['GET'])
@jwt_required
def protected():
current_user = get_jwt_identity()
if not current_user:
return jsonify({"message": "Bad access token"}), 401
user_datas = db.session.query(User.username,
User.email,
User.id, User.profile_image_key,
User.profile_back_image_key).\
filter(User.id == current_user)
return jsonify({'status': 'ok',
'user_datas': UserSchema(many=True).dump(user_datas)}), 200
@app.route('/refresh', methods=['POST'])
@jwt_refresh_token_required
def refresh():
current_user = get_jwt_identity()
if not current_user:
return jsonify({"message": "Bad access token"}), 401
new_token = create_access_token(identity=current_user, fresh=False)
ret = {'access_token': new_token}
return jsonify(ret), 200
@app.route('/home', methods=('GET',))
@jwt_required
def home():
current_user = get_jwt_identity()
todos = db.session.query(Todo).filter(
Todo.user_id == current_user).order_by(desc(Todo.id)).all()
for todo in todos:
dt_naive_to_utc_replace = todo.date_posted.replace(
tzinfo=datetime.timezone.utc)
todo.date_posted = dt_naive_to_utc_replace.astimezone(
timezone('Asia/Tokyo'))
return jsonify({'status': 'ok',
'todos': TodoSchema(many=True).dump(todos)}), 200
@app.route('/create', methods=('POST',))
@jwt_required
def create():
if not request.is_json:
return jsonify({"message": "Missing JSON in request"}), 400
current_user = get_jwt_identity()
title = request.json.get('title', None)
content = request.json.get('content', None)
post_image_key = request.json.get('post_image_key', None)
todo = Todo(title=title, content=content,
post_image_key=post_image_key, user_id=current_user)
db.session.add(todo)
db.session.commit()
return jsonify({"message": "Success create Todo"}), 200
@app.route('/delete', methods=('POST',))
@jwt_required
def delete():
if not request.is_json:
return jsonify({"message": "Missing JSON in request"}), 400
id = request.json.get('delete_id', None)
db.session.query(Todo).filter(Todo.id == id).delete()
db.session.commit()
return jsonify({"message": "Success delete Todo"}), 200
@app.route('/signin', methods=['POST'])
def signin():
if not request.is_json:
return jsonify({"message": "Missing JSON in request"}), 400
email = request.json.get('email', None)
password = request.json.get('password', None)
user = User.query.filter_by(email=email).first()
if not user:
return jsonify({"msg": "Missing email parameter"}), 400
if user and not bcrypt.check_password_hash(user.password, password):
return jsonify({"msg": "Missing password parameter"}), 400
ret = {
'access_token': create_access_token(identity=user.id, fresh=True),
'refresh_token': create_refresh_token(identity=user.id)
}
return jsonify(ret), 200
@app.route('/signup', methods=['POST'])
def signup():
if not request.is_json:
return jsonify({"message": "Missing JSON in request"}), 400
username = request.json.get('username', None)
email = request.json.get('email', None)
password = request.json.get('password', None)
hashed_pass = bcrypt.generate_password_hash(password)
user_validate = User.query.filter_by(email=email).first()
if user_validate:
return jsonify({"mode": "signup", "status": "error",
"message": "This email cannot be used"}), 400
user = User(username=username, email=email, password=hashed_pass)
db.session.add(user)
db.session.commit()
return jsonify({"mode": "signup", "status": "success",
"message": "Completed"}), 200
@app.route('/signout', methods=['POST'])
def signout():
return jsonify({"mode": "signout", "status": "success",
"message": "Completed"}), 200
@app.route('/profile', methods=['GET'])
@jwt_required
def profile():
current_user = get_jwt_identity()
user = db.session.query(User).filter(User.id == current_user).all()
return jsonify({'status': 'ok',
'user': UserSchema(many=True).dump(user)}), 200
@app.route('/update', methods=['POST'])
@jwt_required
def profile_update():
if not request.is_json:
return jsonify({"message": "Missing JSON in request"}), 400
current_user = get_jwt_identity()
profile_image_key = request.json.get('profile_image_key', None)
profile_back_image_key = request.json.get('profile_back_image_key', None)
user_name = request.json.get('username')
user = db.session.query(User).filter(User.id == current_user).first()
user.profile_image_key = profile_image_key
user.profile_back_image_key = profile_back_image_key
user.username = user_name
db.session.commit()
return 'OK'
|
987,730 | 6350727c8e86967b6f44754e816c936c323c35bf | # -*- coding: utf-8 -*-
## some coding for commonly used plots
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import rcParams
# setup some plot defaults
plt.rc('text', usetex=True)
plt.rc('font', family='serif')
plt.rc('font', size=30)
rcParams.update({'figure.autolayout': True})
def plot_1d(x,y,xlab,title='',ylab=''):
fig = plt.figure(figsize=(12,8))
plt.plot(x,y)
plt.xlabel(xlab)
if len(ylab) > 0:
plt.ylabel(ylab)
if len(title) > 0:
plt.title(title)
return fig
def logyplot_1d(x,y,xlab,title='',ylab=''):
fig = plt.figure(figsize=(12,8))
plt.semilogy(x,y)
plt.xlabel(xlab)
if len(ylab) > 0:
plt.ylabel(ylab)
if len(title) > 0:
plt.title(title)
return fig
def plot_2d(z,xin,yin,zmin,zmax,xlab='',ylab='',title='',cmp='RdBu'):
fig = plt.figure(figsize=(12,8))
x,y = np.meshgrid(xin,yin)
plt.imshow(z, cmap=cmp, vmin=zmin, vmax=zmax,
extent=[x.min(),x.max(),y.min(),y.max()],
interpolation='nearest', origin='lower', aspect='auto')
plt.axis([x.min(), x.max(), y.min(), y.max()])
plt.colorbar()
plt.xlabel(xlab)
plt.ylabel(ylab)
plt.title(title)
return fig
def movie_2d(z,xin,yin,zmin,zmax,nframes,outfile,xlab='',ylab='',title='',step=1,cmp='RdBu'):
from matplotlib import animation
fig = plt.figure(figsize=(12,8))
x,y = np.meshgrid(xin,yin)
im = plt.imshow(z[0,:,:], cmap=cmp, vmin=zmin[0], vmax=zmax[0],
extent=[x.min(),x.max(),y.min(),y.max()],
interpolation='nearest', origin='lower', aspect='auto')
plt.colorbar()
plt.xlabel(xlab)
plt.ylabel(ylab)
plt.title(title)
ims = []
ims.append([im])
for i in range(1,nframes,step):
im = plt.imshow(z[i,:,:], cmap=cmp, vmin=zmin[i], vmax=zmax[i],
extent=[x.min(),x.max(),y.min(),y.max()],
interpolation='nearest', origin='lower', aspect='auto')
ims.append([im])
ani = animation.ArtistAnimation(fig,ims,interval=50,blit=True)
ani.save(outfile)
def movie_1d(x,y,xmin,xmax,ymin,ymax,nframes,outfile,xlab,ylab):
from matplotlib import animation
fig = plt.figure(figsize=(12,8))
ax=plt.axes(xlim=(xmin,xmax),ylim=(ymin,ymax))
line, = ax.plot([],[],lw=2)
plt.xlabel(xlab)
plt.ylabel(ylab)
def init():
line.set_data([],[])
return line,
def animate(i):
line.set_data(x,y[i,:])
return line,
anim=animation.FuncAnimation(fig, animate, init_func=init,
frames=nframes, interval=20)
anim.save(outfile)
|
987,731 | f3a03dbf3bbc21ca9380b039326f1e7b70f77103 | # Programmer: Mr. Devet
# Date: September 9, 2021
# Description: Draws a scene with a boat and a sun
# Load the contents of the turtle module into the python
# environment
from pygameplus import *
# Create the graphics window and open it
screen = Screen(725, 310, "My Boat")
screen.background_color = "sky blue"
screen.open()
# Change the turtle's shape to be a turtle, rather than
# an arrow
mikey = Turtle()
screen.add(mikey)
mikey.line_width = 30
# Uncomment to change the speed of the turtle
mikey.speed = 700
mikey.turn_right(90)
mikey.move_forward(190)
mikey.begin_line()
mikey.turn_left(90)
mikey.move_forward(500)
mikey.turn_left(90)
mikey.move_forward(30)
mikey.turn_left(90)
mikey.line_color = "red"
mikey.move_forward(1000)
mikey.turn_right(90)
mikey.move_forward(30)
mikey.turn_right(90)
mikey.line_color = "white"
mikey.move_forward(1000)
mikey.turn_left(90)
mikey.move_forward(30)
mikey.turn_left(90)
mikey.line_color = "red"
mikey.move_forward(1000)
mikey.turn_right(90)
mikey.move_forward(30)
mikey.turn_right(90)
mikey.line_color = "white"
mikey.move_forward(1000)
mikey.turn_left(90)
mikey.move_forward(30)
mikey.turn_left(90)
mikey.line_color = "red"
mikey.move_forward(1000)
mikey.turn_right(90)
mikey.move_forward(30)
mikey.turn_right(90)
mikey.line_color = "white"
mikey.move_forward(1000)
mikey.turn_left(90)
mikey.move_forward(30)
mikey.turn_left(90)
mikey.line_color = "red"
mikey.move_forward(590)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.turn_right(90)
mikey.move_forward(30)
mikey.turn_right(90)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "white"
mikey.move_forward(590)
mikey.turn_left(90)
mikey.move_forward(30)
mikey.turn_left(90)
mikey.line_color = "red"
mikey.move_forward(590)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.turn_right(90)
mikey.move_forward(30)
mikey.turn_right(90)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "white"
mikey.move_forward(590)
mikey.turn_left(90)
mikey.move_forward(30)
mikey.turn_left(90)
mikey.line_color = "red"
mikey.move_forward(590)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
mikey.line_color = "blue"
mikey.move_forward(51.25)
mikey.line_color = "white"
mikey.circle(5,360)
|
987,732 | 2b1d1a053f77d4861bd7217ba63c80ac22482237 | #!/usr/bin/env python
import cv2 as cv
import numpy as np
import open3d as o3d
from matplotlib import pyplot as plt
def task_1_3_extract_and_match_features(img1, img2):
orb = cv.ORB_create()
keypoints1, descriptors1 = orb.detectAndCompute(img1, None)
keypoints2, descriptors2 = orb.detectAndCompute(img2, None)
img1_w_kps = cv.drawKeypoints(img1, keypoints1, None, color=(0, 255, 0), flags=0)
img2_w_kps = cv.drawKeypoints(img2, keypoints2, None, color=(0, 255, 0), flags=0)
cv.imwrite('./output/image1_orb.png', img1_w_kps)
cv.imwrite('./output/image2_orb.png', img2_w_kps)
bf = cv.BFMatcher()
matches = bf.knnMatch(descriptors1, descriptors2, k=2)
matchesMask = [[0,0] for i in range(len(matches))]
for i,(m,n) in enumerate(matches):
if m.distance < 0.8*n.distance:
matchesMask[i]=[1,0]
draw_params = dict(matchColor = (0, 255, 0),
singlePointColor = (255, 0, 0),
matchesMask = matchesMask,
flags = cv.DrawMatchesFlags_NOT_DRAW_SINGLE_POINTS)
img3 = cv.drawMatchesKnn(img1, keypoints1, img2, keypoints2, matches, None, **draw_params)
cv.imwrite('./output/orb_matching.png', img3)
return matches, keypoints1, keypoints2, img3
def task_1_4_compute_fundamental_matrix(matches, kp1, kp2, img1, img2):
good = []
pts1 = []
pts2 = []
for i,(m,n) in enumerate(matches):
if m.distance < 0.8 * n.distance:
good.append(m)
pts2.append(kp2[m.trainIdx].pt)
pts1.append(kp1[m.queryIdx].pt)
pts1 = np.int32(pts1)
pts2 = np.int32(pts2)
F, mask = cv.findFundamentalMat(pts1, pts2, cv.FM_RANSAC)
pts1 = pts1[mask.ravel()==1]
pts2 = pts2[mask.ravel()==1]
def drawlines(img1, img2, lines, pts1, pts2):
h, w = img1.shape
img1 = cv.cvtColor(img1, cv.COLOR_GRAY2BGR)
img2 = cv.cvtColor(img2, cv.COLOR_GRAY2BGR)
for r, pt1, pt2 in zip(lines, pts1, pts2):
color = tuple(np.random.randint(0, 255, 3).tolist())
(x0, y0) = map(int, [0, -r[2]/r[1]])
(x1, y1) = map(int, [w, -(r[2]+r[0]*w)/r[1]])
img1 = cv.line(img1, (x0, y0), (x1, y1), color, 1)
img1 = cv.circle(img1, tuple(pt1), 5, color, -1)
img2 = cv.circle(img2, tuple(pt2), 5, color, -1)
return img1, img2
lines1 = cv.computeCorrespondEpilines(pts2.reshape(-1, 1, 2), 2, F)
lines1 = lines1.reshape(-1, 3)
img5, img6 = drawlines(img1, img2, lines1, pts1, pts2)
lines2 = cv.computeCorrespondEpilines(pts1.reshape(-1, 1, 2), 1, F)
lines2 = lines2.reshape(-1, 3)
img3, img4 = drawlines(img2, img1, lines2, pts2, pts1)
cv.imwrite('./output/epipolar_lines_1.png', img5)
cv.imwrite('./output/epipolar_lines_2.png', img3)
return F, img5, img3
def task_1_5_compute_essential_matrix(F, K):
return np.matmul(np.matmul(np.transpose(K), F), K)
def task_1_6_get_camera_poses(E):
U, S, Vt = np.linalg.svd(E, full_matrices=True)
W = np.array([[0, -1, 0],
[1, 0, 0],
[0, 0, 1]])
R1 = np.matmul(np.matmul(U, W), Vt)
R2 = np.matmul(np.matmul(U, np.transpose(W)), Vt)
u3 = U[:, 2]
return R1, R2, u3
def task_2_1_rectify_images(K, R, t, distCoeffs, img1, img2):
rectify_params = dict(alpha = 1.0, flags=0)
R1, R2, P1, P2, Q, roi1, roi2 = cv.stereoRectify(K, distCoeffs, K, distCoeffs, (img1.shape[1], img1.shape[0]), R, t, **rectify_params)
map1, map2 = cv.initUndistortRectifyMap(K, distCoeffs, R1, P1, (img1.shape[1], img1.shape[0]), cv.CV_32FC1)
map3, map4 = cv.initUndistortRectifyMap(K, distCoeffs, R2, P2, (img2.shape[1], img2.shape[0]), cv.CV_32FC1)
rect1 = cv.remap(img1, map1, map2, cv.INTER_NEAREST)
rect2 = cv.remap(img2, map3, map4, cv.INTER_NEAREST)
cv.imwrite('./output/rectified_1.png', rect1)
cv.imwrite('./output/rectified_2.png', rect2)
return rect1, rect2
if __name__ == "__main__":
K = np.array([[1112.60959, 0.0, 724.315664],
[0.0, 1122.20828, 520.183459],
[0.0, 0.0, 1.0]])
distCoeffs = np.transpose([0.28864512, -1.51224766, -0.00702437, 0.00289229, 2.43952476])
img1 = cv.imread('./data/lab1.jpg', cv.IMREAD_GRAYSCALE)
img2 = cv.imread('./data/lab2.jpg', cv.IMREAD_GRAYSCALE)
matches, kp1, kp2, matched_img = task_1_3_extract_and_match_features(img1, img2)
F, epipolar_lines_1, epipolar_lines_2 = task_1_4_compute_fundamental_matrix(matches, kp1, kp2, img1, img2)
E = task_1_5_compute_essential_matrix(F, K)
R1, R2, t = task_1_6_get_camera_poses(E)
rect1, rect2 = task_2_1_rectify_images(K, R1, t, distCoeffs, epipolar_lines_1, epipolar_lines_2)
plt.imshow(matched_img)
plt.show()
print('Fundamental Matrix F:')
print(F)
plt.imshow(epipolar_lines_1)
plt.show()
plt.imshow(epipolar_lines_2)
plt.show()
print('Essential Matrix E:')
print(E)
print('Camera Rotation Candidate R1:')
print(R1)
print('Camera Rotation Candidate R2:')
print(R2)
print('Camera Transition t:')
print(t)
plt.imshow(rect1)
plt.show()
plt.imshow(rect2)
plt.show() |
987,733 | 18184048e0091e02df6685f99ff7625e10e11d1d | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2015-2016, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
#We have adapted network_api_demo.py for our own research.
#We are only using the SensorRegion from the Network API. Encoded data is being fed to the latest
#spatial pooler, temporal memory, and SDR classifier algorithms.
#The sample data is our own and is generated from our SketchUp ruby extension.
import copy
import csv
import json
import os
import numpy
from itertools import izip as zip, count
from nupic.research.temporal_memory import TemporalMemory as TM
from nupic.research.spatial_pooler import SpatialPooler as SP
from nupic.algorithms.sdr_classifier import SDRClassifier as SDR
from pkg_resources import resource_filename
from nupic.engine import Network
from nupic.encoders import MultiEncoder, ScalarEncoder
from nupic.data.file_record_stream import FileRecordStream
_VERBOSITY = 0
_SEED = 1956
_INPUT_FILE_PATH = resource_filename(
"nupic.datafiles", "extra/tm5.csv"
)
_OUTPUT_PATH = "point3d008-7out.csv"
_NUM_RECORDS = 9
tm = TM(columnDimensions = (2048,),
cellsPerColumn=18,
minThreshold=8,
maxNewSynapseCount=20,
permanenceIncrement=0.1,
activationThreshold=8,
seed=_SEED,
)
sp = SP(inputDimensions=(165,),
columnDimensions=(2048,),
potentialRadius=16,
potentialPct=0.5,
globalInhibition=True,
localAreaDensity=-1.0,
numActiveColumnsPerInhArea=10.0,
stimulusThreshold=0,
synPermInactiveDec=0.008,
synPermActiveInc=0.05,
synPermConnected=0.10,
minPctOverlapDutyCycle=0.001,
dutyCyclePeriod=1000,
maxBoost=20.0,
seed=-1,
spVerbosity=0,
wrapAround=True
)
sdr = SDR(steps=(1,),
alpha=0.001,
actValueAlpha=0.3,
verbosity=0
)
def createEncoder():
diagCoorA_encoder = ScalarEncoder(55, 0.0, 200.0, n=200, name="diagCoorA")
diagCoorB_encoder = ScalarEncoder(55, 0.0, 200.0, n=200, name="diagCoorB")
diagCoorC_encoder = ScalarEncoder(55, 0.0, 200.0, n=200, name="diagCoorC")
global encoder
encoder = MultiEncoder()
encoder.addEncoder("diagCoorA", diagCoorA_encoder)
encoder.addEncoder("diagCoorB", diagCoorB_encoder)
encoder.addEncoder("diagCoorC", diagCoorC_encoder)
return encoder
def createNetwork(dataSource):
network = Network()
network.addRegion("sensor", "py.RecordSensor",
json.dumps({"verbosity": _VERBOSITY}))
sensor = network.regions["sensor"].getSelf()
sensor.encoder = createEncoder()
sensor.dataSource = dataSource
return network
def runNetwork(network, writer):
sensorRegion = network.regions["sensor"]
listDataOut = []
srcDataOut = []
for h in xrange(_NUM_RECORDS):
network.run(1)
testGV = sensorRegion.getOutputData("dataOut").nonzero()[0]
listDataOut.append(testGV)
src = sensorRegion.getOutputData("sourceOut")
print src
tri = {}
tri[0] = src[0]
tri[1] = src[1]
tri[2] = src[2]
srcDataOut.append(tri)
listCount = len(listDataOut)
for j in xrange(listCount):
num = int(j)
mod = num % 2
if mod == 0:
if j < listCount-2:
for k in xrange(200):
testArr = numpy.zeros(2048, dtype="int64")
sp.compute(listDataOut[j], 1, testArr)
activeColumns = testArr.nonzero()[0]
tm.compute(activeColumns, learn = True)
actCellsProto = tm.getActiveCells()
for kk in xrange(3):
classification={"bucketIdx": kk+1, "actValue": srcDataOut[j][kk]}
sdr.compute(j+1, actCellsProto, classification, 1, 0)
testArr2 = numpy.zeros(2048, dtype="int64")
sp.compute(listDataOut[j+1], 1, testArr2)
activeColumns2 = testArr2.nonzero()[0]
tm.compute(activeColumns2, learn = True)
actCellsProto2 = tm.getActiveCells()
for mm in xrange(3):
classification2={"bucketIdx": mm+1, "actValue": srcDataOut[j+1][mm]}
sdr.compute(j+2, actCellsProto2, classification2, 1, 0)
tm.reset()
testArr3 = numpy.zeros(2048, dtype="int64")
sp.compute(listDataOut[listCount-1], 0, testArr3)
activeColumns3 = testArr3.nonzero()[0]
tm.compute(activeColumns3, learn = False)
actCellsProto3 = tm.getActiveCells()
# classification3 = None
for oo in xrange(3):
classification3={"bucketIdx": oo+1, "actValue": srcDataOut[listCount-1][oo]}
#classification3 = {"bucketIdx": 3, "actValue": srcDataOut[8][2]}
predict = sdr.compute(listCount, actCellsProto3, classification3, 0, 1)
print predict
if __name__ == "__main__":
dataSource = FileRecordStream(streamID=_INPUT_FILE_PATH)
network = createNetwork(dataSource)
network.initialize()
outputPath = os.path.join(os.path.dirname(__file__), _OUTPUT_PATH)
with open(outputPath, "w") as outputFile:
writer = csv.writer(outputFile)
runNetwork(network, writer)
|
987,734 | 88d396c0225c469410e1f737dd408a4662e778cc | #!/usr/bin/python
map = [[],[],[],[]];
num = 0;
ans = 0;
tmp = 0;
def check(x,y):
global tmp,ans,num;
if x >= num or y >= num:
return 0;
if map[x][y] != '.':
return 0;
for i in range(x-1,-1,-1):
if map[i][y] == 'M':
return 0;
elif map[i][y] == 'X':
break;
for i in range(x+1, num):
if map[i][y] == 'M':
return 0;
elif map[i][y] == 'X':
break;
for i in range(y-1,-1,-1):
if map[x][i] == 'M':
return 0;
elif map[x][i] == 'X':
break;
for i in range(y+1,num):
if map[x][i] == 'M':
return 0;
elif map[x][i] == 'X':
break;
return 1;
def map_print():
for i in range(0,num):
print map[i];
return
def dfs(pos):
global tmp,ans,num
#print "pos:",pos
#print map[pos/num][pos%num],
for i in range(pos,num*num):
x=i/num;
y=i%num;
#print "pos:",pos,"x:",x,"y:",y
if check(x,y) == 1:
tmp+=1;
if tmp > ans:
ans = tmp;
map[x] = map[x][:y]+'M'+map[x][y+1:];
#print "x:",x,"y:",y
#map_print();
dfs(pos+1);
map[x] = map[x][:y]+'.'+map[x][y+1:];
tmp-=1;
return
while 1 :
num = int(raw_input());
if num == 0:
#print "out"
break;
for i in range(0,num):
map[i] = raw_input()
ans = 0;
tmp = 0;
dfs(0)
print ans
|
987,735 | e7472112fb0292a03c04342e604a1d8ae2305733 | import filecmp
import io
import os
from random import randint
import requests
from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.files import File
from django.core.files.temp import NamedTemporaryFile
from django.urls import reverse, resolve
from rest_framework import status
from rest_framework.test import APILiveServerTestCase
from post.apis import PostList
from post.models import Post
User = get_user_model()
class PostListViewTest(APILiveServerTestCase):
URL_API_POST_LIST_NAME = 'api-post'
URL_API_POST_LIST = '/api/post/'
VIEW_CLASS = PostList
@staticmethod
def create_user(username='dummy'):
return User.objects.create_user(username=username, age=0)
@staticmethod
def create_post(author=None):
return Post.objects.create(author=author, photo=File(io.BytesIO()))
def test_post_list_url_name_reverse(self):
url = reverse(self.URL_API_POST_LIST_NAME)
self.assertEqual(url, self.URL_API_POST_LIST)
def test_post_list_url_resolve_view_class(self):
"""
post.apis.PostList뷰에 대해
URL reverse, resolve, 사용하고 있는 view함수가 같은지 확인
:return:
"""
# /api/post/에 매칭되는 ResolverMatch객체를 가져옴
resolver_match = resolve(self.URL_API_POST_LIST)
# ResolverMatch의 url_name이 'api-post'(self.URL_API_POST_LIST_NAME)인지 확인
self.assertEqual(
resolver_match.url_name,
self.URL_API_POST_LIST_NAME)
# ResolverMatch의 func이 PostList(self.VIEW_CLASS)인지 확인
self.assertEqual(
resolver_match.func.view_class,
self.VIEW_CLASS)
def test_get_post_list(self):
"""
PostList의 GET요청 (Post목록)에 대한 테스트
임의의 개수만큼 Post를 생성하고 해당 개수만큼 Response가 돌아오는지 확인
:return:
"""
user = self.create_user()
# 0이상 20이하의 임의의 숫자 지정
num = randint(1, 20)
# num개수만큼 Post생성, author를 지정해줌
for i in range(num):
self.create_post(author=user)
url = reverse(self.URL_API_POST_LIST_NAME)
# post_list에 GET요청
response = self.client.get(url)
# status code가 200인지 확인
self.assertEqual(response.status_code, status.HTTP_200_OK)
# objects.count결과가 num과 같은지 확인
self.assertEqual(Post.objects.count(), num)
# response로 돌아온 JSON리스트의 길이가 num과 같은지 확인
self.assertEqual(len(response.data), num)
# response로 돌아온 객체들이 각각 pk, author, photo, created_at키를 가지고 있는지 확인
for i in range(num):
cur_post_data = response.data[i]
self.assertIn('pk', cur_post_data)
self.assertIn('author', cur_post_data)
self.assertIn('photo', cur_post_data)
self.assertIn('created_at', cur_post_data)
def test_get_post_list_exclude_author_is_none(self):
"""
author가 None인 Post가 PostList get요청에서 제외되는지 테스트
:return:
"""
user = self.create_user()
num_author_none_posts = randint(1, 10)
num_posts = randint(11, 20)
for i in range(num_author_none_posts):
self.create_post()
for i in range(num_posts):
self.create_post(author=user)
response = self.client.get(self.URL_API_POST_LIST)
# author가 없는 Post개수는 response에 포함되지 않는지 확인
self.assertEqual(len(response.data), num_posts)
def test_create_post(self):
"""
Post를 생성하고 생성하기 위해 사용한 파일과 생성되어진 파일이 같은지 검사
:return:
"""
# 테스트용 유저 생성
user = self.create_user()
# 해당 유저를 현재 client에 강제로 인증
self.client.force_authenticate(user=user)
# 테스트용 이미지 파일의 경로
path = os.path.join(settings.STATIC_DIR, 'test', 'pby63.jpg')
# path에 해당하는 파일을 post요청에 'photo'키의 값으로 전달
with open(path, 'rb') as photo:
response = self.client.post(self.URL_API_POST_LIST, {
'photo': photo,
})
# response 코드가 201인지 확인
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# 1개의 포스트가 생성되었는지 확인
self.assertEqual(Post.objects.count(), 1)
# 업로드를 시도한 파일 (path경로의 파일)과
# 실제 업로드 된 파일 (새로 생성된 Post의 photo필드에 있는 파일)이
# 같은 파일인지 확인
post = Post.objects.get(pk=response.data['pk'])
if settings.STATICFILES_STORAGE == 'django.contrib.staticfiles.storage.StaticStorage':
# 파일시스템에서의 두 파일을 비교할 경우
self.assertTrue(filecmp.cmp(path, post.photo.file.name))
# 실제 파일 지우기
post.photo.delete()
else:
# S3에 올라간 파일을 비교해야하는 경우
url = post.photo.url
# requests를 사용해서 S3파일 URL에 GET요청
response = requests.get(url)
# NamedTemporaryFile객체를 temp_file이라는 파일변수로 open
with NamedTemporaryFile(suffix='.jpg', delete=False) as temp_file:
# temp_file에 response의 내용을 기록
temp_file.write(response.content)
# 기록한 temp_file과 원본 path를 비교
self.assertTrue(filecmp.cmp(path, temp_file.name))
|
987,736 | 4d0680a87ac8c1413df316e11645785445d1065c | from time import time as now
from threading import Timer
from SocketServer import BaseServer
import logging
logger = logging.getLogger('timer.ActivityTimer')
debug = logger.debug
"""
A timer that calls BaseServer.handle_timeout when there is no
activity recorded within the servers 'timeout' variable.
Invoke ActivityTimer.notify to extend server's TTL.
"""
class ActivityTimer():
def __init__(self, server):
if not isinstance(server, BaseServer):
raise TypeError('Object does not extend SocketServer.BaseServer')
self.server = server
self._activity = 0
self._timer = None
"""
Gets the duration of the server's timeout variable.
"""
def getTimeout(self):
return self.server.timeout
"""
Notifies the timer that there was activity, extending the TTL of the server.
"""
def notify(self):
self._activity = now()
"""
Calls the BaseServer's handle_timeout method if it didn't notify any
activity within it's timeout duration.
"""
def checkForTimeout(self):
debug('')
if (now() - self._activity) > self.getTimeout():
self.server.handle_timeout()
else:
self.restart()
"""
Cancels the internal Timer and restarts the timer thread.
"""
def restart(self):
try:
self._timer.cancel()
except Exception, e:
debug('%s', e)
finally:
self._timer = None
self.start()
"""
Starts the timer that will call the BaseServer.handle_timeout method if there is no
activity recorded by ActivityTimer.notify within the duration of BaseServer.timeout.
"""
def start(self):
debug('timeout=%d', self.getTimeout())
if self._timer:
raise Exception('Timer already set.')
self._timer = Timer(self.getTimeout(), self.checkForTimeout)
self._timer.start()
|
987,737 | 6c3644b7bedfca03fab2771bb340a26361b8c8fd | #!/usr/bin/env python
# encoding: utf-8
class Solution:
# @param {string} input
# @return {integer[]}
def diffWaysToCompute(self, input):
ret = []
for i in range(len(input)):
c = input[i]
if c in ('+', '-', '*'):
left = self.diffWaysToCompute(input[:i])
right = self.diffWaysToCompute(input[i+1:])
for li in left:
for ri in right:
r = 0
if c == '+':
r = int(li) + int(ri)
elif c == '-':
r = int(li) - int(ri)
elif c == '*':
r = int(li) * int(ri)
ret.append(r)
if not ret:
ret.append(int(input))
return ret
if __name__ == '__main__':
s = Solution()
print s.diffWaysToCompute('2-1-1')
print s.diffWaysToCompute('2*3-4*5')
|
987,738 | f591c21fe606af39328fe12a8f50619752e1669f | #-*- coding: utf-8 -*-
# vim: set fileencoding=utf-8
import logging
import random
import string
import datetime
import json
from google.appengine.api import users
from google.appengine.ext import deferred
import httplib2
from ardux.tasks import sync_resources
from flask import redirect, g, session, request, make_response
from flask.helpers import url_for
from flask.templating import render_template
from ardux.models import ResourceDevice
import flask
from oauth2client.client import flow_from_clientsecrets
from oauth2client.client import FlowExchangeError
import constants
from ardux import app
@app.route('/')
def index():
user = users.get_current_user()
if user:
return redirect(url_for('admin.index'))
#Force user to login then admin
return redirect(users.create_login_url(url_for('admin.index')))
@app.route('/_ah/warmup')
def warmup():
#TODO: Warmup
return 'Warming Up...'
@app.route('/device/register', methods=['GET', 'POST'])
def device_register():
device = ResourceDevice(type=constants.TYPE_WEB, uuid=''.join(
random.choice(string.ascii_uppercase + string.digits) for x in
range(5)))
device.put();
return flask.jsonify(device.to_dict())
@app.route('/device/sync/<uuid>')
def device_sync(uuid):
device = ResourceDevice.query(ResourceDevice.uuid == uuid).get()
if device:
device.last_sync = datetime.datetime.now()
device.put()
return flask.jsonify(device.to_dict(exclude=('resource_key',)))
else:
return 'Device not found', 404
@app.route('/cron/sync/resources')
def resources_sync():
logging.info("Scheduling calendars and events sync task...")
deferred.defer(sync_resources)
return "Scheduling sync task..."
@app.route('/signout')
def sign_out():
pass
@app.route('/signin')
def sign_in():
# Create a state token to prevent request forgery.
# Store it in the session for later validation.
state = ''.join(random.choice(string.ascii_uppercase + string.digits) for
x in xrange(32))
session['state'] = state
# Set the Client ID, Token State, and Application Name in the HTML while
# serving it.
return render_template('signin.html',
CLIENT_ID=constants.OAUTH2_CLIENT_ID,
STATE=state,
APPLICATION_NAME=constants.SOURCE_APP_NAME)
@app.route('/signin/connect', methods=['POST'])
def connect():
"""Exchange the one-time authorization code for a token and
store the token in the session."""
# Ensure that the request is not a forgery and that the user sending
# this connect request is the expected user.
if request.args.get('state', '') != session['state']:
response = make_response(json.dumps('Invalid state parameter.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Normally, the state is a one-time token; however, in this example,
# we want the user to be able to connect and disconnect
# without reloading the page. Thus, for demonstration, we don't
# implement this best practice.
# del session['state']
code = request.data
try:
# Upgrade the authorization code into a credentials object
oauth_flow = flow_from_clientsecrets('client_secrets.json', scope='')
oauth_flow.redirect_uri = 'postmessage'
credentials = oauth_flow.step2_exchange(code)
except FlowExchangeError:
response = make_response(
json.dumps('Failed to upgrade the authorization code.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
# An ID Token is a cryptographically-signed JSON object encoded in base 64.
# Normally, it is critical that you validate an ID Token before you use it,
# but since you are communicating directly with Google over an
# intermediary-free HTTPS channel and using your Client Secret to
# authenticate yourself to Google, you can be confident that the token you
# receive really comes from Google and is valid. If your server passes the
# ID Token to other components of your app, it is extremely important that
# the other components validate the token before using it.
gplus_id = credentials.id_token['sub']
stored_credentials = session.get('credentials')
stored_gplus_id = session.get('gplus_id')
if stored_credentials is not None and gplus_id == stored_gplus_id:
response = make_response(json.dumps('Current user is already connected.'),
200)
response.headers['Content-Type'] = 'application/json'
return response
# Store the access token in the session for later use.
session['credentials'] = credentials
session['gplus_id'] = gplus_id
response = make_response(json.dumps('Successfully connected user.', 200))
response.headers['Content-Type'] = 'application/json'
return response
@app.route('/disconnect', methods=['POST'])
def disconnect():
"""Revoke current user's token and reset their session."""
# Only disconnect a connected user.
credentials = session.get('credentials')
if credentials is None:
response = make_response(json.dumps('Current user not connected.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Execute HTTP GET request to revoke current token.
access_token = credentials.access_token
url = 'https://accounts.google.com/o/oauth2/revoke?token=%s' % access_token
h = httplib2.Http()
result = h.request(url, 'GET')[0]
if result['status'] == '200':
# Reset the user's session.
del session['credentials']
response = make_response(json.dumps('Successfully disconnected.'), 200)
response.headers['Content-Type'] = 'application/json'
return response
else:
# For whatever reason, the given token was invalid.
response = make_response(
json.dumps('Failed to revoke token for given user.', 400))
response.headers['Content-Type'] = 'application/json'
return response
@app.before_request
def before_request():
if users.get_current_user():
g.url_logout_text = 'Logout'
g.url_logout = users.create_logout_url(url_for('admin.index'))
else:
g.url_logout_text = 'Login'
g.url_logout = users.create_login_url(url_for('sign_in'))
@app.errorhandler(403)
def page_unauthorized(e):
return render_template('403.html'), 403
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
@app.errorhandler(500)
def page_error(e):
logging.error('500 error: %s', e)
return render_template('500.html'), 500
|
987,739 | e9a9d87020420033ae8d800ea0f88208e30e75d2 | from flask import request
from flask_restful import Api, Resource
from app.api.v1.model.models import UserModels
class User(Resource):
def __init__(self):
self.db = UserModels()
def notFound(self):
return {'Message' : 'Record not found'},404
def get(self, user_id):
user = self.db.find(user_id)
if not user:
return self.notFound()
return {
'Message' : 'The specific user has been returned',
'data' : user
}, 200
class Users(Resource):
def __init__(self):
self.db = UserModels()
def get(self):
return{
'Message': 'Users returned successfully',
'data': self.db.all()
}, 200
def post(self):
data = request.get_json()
user = {
'firstname' : data['firstname'],
'lastname' : data['lastname'],
'othernames' : data['othernames'],
'email' : data['email'],
'phoneNumber' : data['phoneNumber'],
'username' : data['username'],
'registered' : data['registered'],
}
self.db.save(user)
return {
'Message' : 'User saved successfully',
'data' : user
}, 201 |
987,740 | f8f448f2df7094d9c962434055dff28b411c7a34 | import urllib2
import json
def get_tweets(name):
s = urllib2.urlopen("http://search.twitter.com/search.json?q="+name).read()
x = json.loads(s)
for i in range(len(x['results'])):
print "%s.%s--> %s\n" %(i, x['results'][i]['from_user_name'], x['results'][i]['text'])
name = raw_input("Enter the twitter-username get recent tweets: ")
get_tweets(name)
|
987,741 | 62d190cf6cc199f4da3a37038ff4cc5716d8f9ab | from netapp.netapp_object import NetAppObject
class ArchiveRecord(NetAppObject):
"""
Single instance of sampled data in binary form.
"""
_data = None
@property
def data(self):
"""
Binary instance data. Average expected length
of each return string is under 10k bytes, though this
number will fluctuate depending on the number of
counters being sampled. May contain error string if
we are unable to base64 encode the particular instance.
"""
return self._data
@data.setter
def data(self, val):
if val != None:
self.validate('data', val)
self._data = val
@staticmethod
def get_api_name():
return "archive-record"
@staticmethod
def get_desired_attrs():
return [
'data',
]
def describe_properties(self):
return {
'data': { 'class': basestring, 'is_list': False, 'required': 'required' },
}
|
987,742 | 251968cf50b8af09e909e60eed0f5274b42fce44 | import sys
import logging
from lbps.network_tools import LBPSWrapper
from lbps.structure.base_station import BaseStation
from lbps.structure.relay_node import RelayNode
from lbps.structure.user_equipment import UserEquipment
from itertools import count
class DRX(LBPSWrapper):
count = count(0)
def __init__(
self,
root,
inactivity_timer,
short_cycle_count,
short_cycle_time,
long_cycle_time,
name=None
):
self.name = (
name or '_'.join(
[self.__class__.__name__, str(next(self.count))]
)
)
self.root = root
self.inactivity_timer = inactivity_timer
self.short_cycle_count = short_cycle_count
self.short_cycle_time = short_cycle_time
self.long_cycle_time = long_cycle_time
self.demo_meta = None
self.demo_summary = None
self.__metadata = {}
rn = self.root.target_device[0]
self.__tdd_config = {
'backhaul': rn.backhaul.tdd_config,
'access': rn.access.tdd_config
}
def __metadata_update(self, metadata, src, record=True):
# in drx mode: awake > no data > inactivity timer counting
target = metadata[src]
if target['inactivity_timer']:
target['inactivity_timer'] -= 1
if record: target['awake'] += 1
src.sleep = False
elif target['short_cycle_time']:
target['short_cycle_time'] -= 1
if record: target['sleep'] += 1
src.sleep = True
elif target['short_cycle_count']:
target['short_cycle_count'] -= 1
target['short_cycle_time'] = self.short_cycle_time
if record: target['awake'] += 1
src.sleep = False
elif target['long_cycle_time']:
target['long_cycle_time'] -= 1
if record: target['sleep'] += 1
src.sleep = True
else:
target['long_cycle_time'] = self.long_cycle_time
if record: target['awake'] += 1
src.sleep = False
def __metadata_reset(self, metadata, src, record=True):
src.sleep = False
if record: metadata[src]['awake'] += 1
metadata[src].update({
'inactivity_timer': self.inactivity_timer,
'short_cycle_count': self.short_cycle_count,
'short_cycle_time': 0,
'long_cycle_time': 0
})
def __transmit_packet(self, TTI, timeline, metadata=None, flush=False):
def __transmit(pkt, src, dest, cap):
dest.buffer.append(pkt)
src.buffer.remove(pkt)
return cap-pkt['size']
record = True if not flush else False
if not metadata:
metadata = {
d: {
'inactivity_timer': self.inactivity_timer,
'short_cycle_count': self.short_cycle_count,
'short_cycle_time': 0,
'long_cycle_time': 0,
'sleep': 0,
'awake': 0,
'delay': 0
} for d in self.all_devices(self.root, RelayNode, UserEquipment)
}
# backhaul
backhaul_activate_rn = []
if self.__tdd_config['backhaul'][TTI%10] == 'D':
# transmission
available_cap = self.root.wideband_capacity
for pkt in self.root.buffer:
rn = pkt['device'].ref_access
if available_cap < pkt['size']: break
if flush or not rn.sleep:
backhaul_activate_rn.append(rn)
available_cap = __transmit(
pkt, self.root, rn.backhaul, available_cap)
backhaul_activate_rn = list(set(backhaul_activate_rn))
# metadata
if not flush:
for rn in backhaul_activate_rn:
self.__metadata_reset(metadata, rn, record=record)
for ue in rn.access.target_device:
self.__metadata_update(metadata, ue, record=record)
# access
for rn in self.root.target_device:
transmit_direction = rn.access.tdd_config[TTI%10]
if rn in backhaul_activate_rn or transmit_direction != 'D':
continue
if not rn.backhaul.buffer:
self.__metadata_update(metadata, rn, record=record)
for ue in rn.access.target_device:
self.__metadata_update(metadata, ue, record=record)
continue
self.__metadata_reset(metadata, rn, record=record)
available_cap = rn.access.wideband_capacity
activate_ue = []
# transmission
for pkt in rn.backhaul.buffer:
ue = pkt['device']
if available_cap < pkt['size']: break
if flush or not ue.sleep:
activate_ue.append(ue)
available_cap = __transmit(
pkt, rn.backhaul, ue, available_cap)
metadata[ue]['delay'] += TTI - pkt['arrival_time']
# metadata
if not flush:
for ue in rn.access.target_device:
if ue in activate_ue: self.__metadata_reset(metadata, ue, record=record)
else: self.__metadata_update(metadata, ue, record=record)
return metadata
def run(self, timeline):
_time = self.root.simulation_time
_all_src = self.all_devices(self.root, RelayNode, UserEquipment)
_all_rn = self.all_devices(self.root, RelayNode)
b_tdd = self.__tdd_config['backhaul']
a_tdd = self.__tdd_config['access']
is_downlink = lambda x: b_tdd[x%10] == 'D' or a_tdd[x%10] == 'D'
metadata = {
d: {
'inactivity_timer': self.inactivity_timer,
'short_cycle_count': self.short_cycle_count,
'short_cycle_time': 0,
'long_cycle_time': 0,
'sleep': 0,
'awake': 0,
'delay': 0
} for d in _all_src
}
logging.info('* Simulation begin with lambda {} Mbps = load {}'.format(
self.root.lambd, self.root.load))
self.clear_env(self.root)
for d in _all_src: d.sleep = False
# simulate packet arriving, TTI = 0 ~ simulation_time
for TTI, pkt in timeline.items():
if pkt: self.root.buffer += pkt
if is_downlink(TTI):
self.__transmit_packet(TTI, timeline, metadata)
else:
for d in _all_src: self.__metadata_update(metadata, d, record=True)
# out of simulation time
is_flush = lambda x, y: not x.buffer and all([not _.backhaul.buffer for _ in y])
TTI = len(timeline)
while not is_flush(self.root, _all_rn):
if is_downlink(TTI):
metadata = self.__transmit_packet(TTI, timeline, metadata, flush=True)
else:
for d in _all_src: self.__metadata_update(metadata, d, record=False)
TTI += 1
logging.info('* Simulation end with TTI {}'.format(TTI))
self.demo_meta = metadata
summary = self.summary_metadata(self.root, metadata)
summary['lambda'] = self.root.lambd
self.demo_summary = summary
logging.info('summary = {}'.format(summary))
return summary
|
987,743 | b35625419f9de20301d72a7e53152c1c8c8512f8 | import sys
sys.path.append('/home/aistudio')
import paddle
import paddle.fluid as fluid
from TPN.models.solver import Solver
from TPN.models.evaluator import Evaluator
from TPN.readers.rawframes_dataset import RawFramesDataset
from TPN.utils.config import cfg
def evaluate(evaluator, epoch):
with fluid.dygraph.guard():
print("evaluating models...")
evaluator.reset_summary()
evaluator.load_models(epoch)
for step, data in evaluator.read_data():
images, label = data
loss, accuracy = evaluator.forward(images, label)
if step % cfg.solver.log_interval == 0:
evaluator.update_logger(epoch, step, loss, accuracy)
evaluator.write_summary(epoch)
def train():
with fluid.dygraph.guard():
dataset = RawFramesDataset(**cfg.dataset.hmdb)
batch_reader = dataset.batch_reader(cfg.solver.batch_size)
solver = Solver(batch_reader)
eval_dataset = RawFramesDataset(**cfg.eval.dataset.hmdb)
eval_batch_reader = eval_dataset.batch_reader(cfg.eval.solver.batch_size)
evaluator = Evaluator(eval_batch_reader)
if cfg.solver.start_epoch != 0:
solver.load_models(cfg.solver.start_epoch - 1)
for epoch in range(cfg.solver.start_epoch, cfg.solver.max_epoch):
print("Begin to train epoch " + str(epoch))
for step, data in solver.read_data():
images, label = data
loss, accuracy = solver.forward(images, label)
solver.backward(loss)
if step % cfg.solver.log_interval == 0:
solver.update_logger(epoch, step, loss, accuracy)
if epoch % cfg.solver.save_interval == 0:
solver.save_models(epoch)
evaluate(evaluator, epoch)
if __name__ == '__main__':
train() |
987,744 | 5182647f2134618426dea66c6d8027d54ec60bd8 | from cs50 import SQL
db = SQL("sqlite:///finance.db")
rows = db.execute("SELECT * FROM log")
print(rows)
new_rows = []
for i in range(len(rows)):
if rows[i]["symbol"] not in [item["symbol"] for item in new_rows]:
new_rows.append(rows[i])
print("NO DUBLICATE")
else:
for row in new_rows:
if row["symbol"] == rows[i]["symbol"]:
row["shares"] += rows[i]["shares"]
print(new_rows)
|
987,745 | 3c98ed65f0c6e75c1ef225cd421334963da544a5 | #!/usr/bin/env python3
from glob import glob
import logging
import os
import sys
from fastapi import FastAPI, File, UploadFile
from fastapi.responses import FileResponse
from fastapi.middleware.cors import CORSMiddleware
from uvicorn.logging import ColourizedFormatter
logger = logging.getLogger('traffic-editor-file-server')
console_formatter = ColourizedFormatter(
"{levelprefix:<8} {name}: {message}",
style="{",
use_colors=True
)
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setFormatter(console_formatter)
logger.addHandler(ch)
logger.debug("hello world")
if 'MAP_DIR' not in os.environ:
logger.error("MAP_DIR must be set in the environment")
sys.exit(1)
map_dir = os.getenv("MAP_DIR")
logger.info(f"serving from {map_dir}")
# spin through MAP_DIR and use the first .building.yaml file we see
map_filenames = glob(os.path.join(map_dir, "*.building.yaml"))
if not map_filenames:
logger.error(f"couldn't find a .building.yaml file in {map_dir}")
sys.exit(1)
map_filename = map_filenames[0]
logger.info(f"using {map_filename} as the map")
app = FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=['*'],
allow_credentials=True,
allow_methods=['*'],
allow_headers=['*']
)
@app.get("/file/{filename}")
async def get_file(filename: str):
# todo: sanitize the filename
return {"filename": filename}
@app.get("/map_file")
async def get_map_file():
return FileResponse(map_filename)
@app.post("/map_file")
async def write_map_file(file: UploadFile = File(...)):
# todo: write the uploaded file to map_filename
return {"status": "ok"}
|
987,746 | 259db2033aebd5ff00539c9a81d168092faa9c37 | """
Python program to get a list, sorted in increasing order
by the last element in each tuple from a given list of non-empty tuples.
"""
t1=(1,'s','c','d','f',6)
t2=('x','y')
t3=('p','q','r','s')
t4=(t1,)+(t2,)+(t3,)
l=[t4]
print(l)
t4=t1+t2+t3
print(l)
|
987,747 | 177254d3d51cf3f499aa45b712df2d22a9f4cd98 | import cv2
import numpy as np
def gradient(img,y,x):
grad = np.array([0.0, 0.0, 0.0])
grad = img[y, x] * 4 - img[y + 1, x] - img[y - 1, x] - img[y, x + 1] - img[y, x - 1]
return grad
def isophote(patch, patchSize):
p = patchSize//2+1
pGrad = np.array([patch[p,p,:] - patch[p+1,p,:], patch[p,p,:] - patch[p,p+1,:]])
return pGrad / np.linalg.norm(pGrad)
def orthoomega(bitPatch, patchSize):
p = patchSize//2+1
small = bitPatch[p-1:p+1,p-1:p+1,:]
sobelX = -small[0,0,:] - 2*small[0,1,:] - small[0,2,:] + small[2,0,:] + 2*small[2,1,:] + small[2,2,:]
sobelY = -small[0,0,:] - 2*small[1,0,:] - small[2,0,:] + small[0,2,:] + 2*small[1,2,:] + small[2,2,:]
sobelVec = np.array([sobelX,sobelY])
return sobelVec / np.linalg.norm(sobelVec)
def confidence(bitPatch, patchSize):
total = 0
for i in range(patchSize):
for j in range(patchSize):
if not bitPatch[i,j]:
total += 1
return total / (patchSize * patchSize)
def data(patch, bitPatch, patchSize):
# p is the coordinate of the center pixel of the patch
p = patchSize//2+1
# find the scalar magnitude of the gradient at p
gradMag = np.linalg.norm(np.linalg.norm(gradient(patch, p, p)))
# find the isophote at p
isophote = isophote(patch, patchSize)
# find the
normalAtP = orthoomega(bitPatch, patchSize)
return gradMag * np.linalg.norm(np.dot(isophote, normalAtP))
def inpaint(img, bitMask, patchSize):
height = img.shape[0]
width = img.shape[1]
importantX = 0
importantY = 0
importance = 0
# go through every pixel checking its importance
for i in range(patchSize // 2 + 1, height - patchSize // 2):
for j in range(patchSize // 2 + 1, width - patchSize // 2):
# check if pixel is on an edge
# we're not too worried about selecting a pixel not on the seam since
# confidence would be too low to select it
if bitMask[i,j,:]:
# select a neighborhood around the current pixel of size patchSize*patchSize
bitPatch = bitMask[i-patchSize//2:i+patchSize//2,j-patchSize//2:j+patchSize//2,:]
patch = img[i-patchSize//2:i+patchSize//2,j-patchSize//2:j+patchSize//2,:]
# compute the confidence and data terms around this pixel
cP = confidence(bitPatch, patchSize)
dP = data(patch, bitPatch, patchSize)
# if the combined score is higher than the highest "importance" score,
# select this pixel instead
if cP*dP > importance:
importance = cP * dP
importantY = i
importantX = j
# now search for the known patch that's most similar and replace our unknown patch
if __name__ == '__main__':
inputImgSrc = sys.argv[1]
bitMaskSrc = sys.argv[2]
outputImgSrc = sys.argv[3]
patchSize = sys.argv[4]
inputImg = cv2.imread(inputImgSrc)
bitMask = cv2.imread(bitMaskSrc)[:,:,:1]
# set any non-zero values to 1
bitMask[bitMask > 0] = 1
outputImg = inpaint(inputImg, bitMask, patchSize)
cv2.imwrite(outputImgSrc, outputImg)
|
987,748 | 267ae35db74b443a9eaa07ac8983c1dad6e7d77b | import os
basedir = os.path.abspath(os.path.dirname(__file__))
jwt_secret="mykey"
# class Config(object):
# DEBUG = False
# # MONGO_URI = 'mongodb://localhost:27017/mydb'
# MONGO_URI = os.getenv('MONGO_URL')
# class DevelopmentConfig(Config):
# DEBUG = True
# class ProductionConfig(Config):
# DEBUG = False
# app_config = {
# 'development': DevelopmentConfig,
# 'production': ProductionConfig
# }
# # # Database
# # MONGO_URI = 'mongodb://localhost:27017/mydb'
# # # MONGO_USERNAME = 'mydb'
# # # MONGO_PASSWORD = 'password'
# # # Debugging
# # DEBUG = False
# # Networking
# # PORT = 5000
# PREFERRED_URL_SCHEME = 'https'
# SERVER_NAME = 'mywebsite.com'
|
987,749 | 1067ac71b92a55ff826134a3e734dc352f603515 | import numpy as np
import matplotlib.pyplot as plt
from matplotlib import style
import csv
import pickle
style.use('classic')
def input_file_csv(type1,angle=None):
date = '08-03-2019'
normalize_csv(date,type1,angle)
date = '11-03-2019'
normalize_csv(date,type1,angle)
date = '25-02-2019'
normalize_csv(date,type1,angle)
date = '26-02-2019'
normalize_csv(date,type1,angle)
date = '27-02-2019'
normalize_csv(date,type1,angle)
def normalize_csv(date,type1,angle):
if angle == None:
csv_path = date + '/' + type1 + '/'
else:
csv_path = date + '/' + type1 + '/' + angle + '/'
try:
normalize_csv_(type1,date,csv_path + type1)
except FileNotFoundError:
normalize_csv_(type1,date,csv_path + 'DTF')
def normalize_csv_(type1,date,csv_path):
x,y = normalize_csv__(csv_path + '.csv')
plt.title(type1)
plt.plot(x,y,label=date)
def normalize_csv__(filename):
xs = []
ys = []
end_index = 0
with open(filename, 'r') as f:
reader = csv.reader(f)
for row in reader:
s = row[0]
if s.find('END') >= 0:
end_index += 1
#1 S11, 2 Phase, 3 SWR, 4 Real/Imaginary
if end_index == 1:
break
else:
xs = []
ys = []
elif(s.find('!') < 0 and s.find('BEGIN') < 0 and s.find('END') < 0):
xs.append(row[0])
if len(row) > 2:
#1 Real, 2 Imaginary
ys.append(row[1])
else:
ys.append(row[1])
return np.array(xs, dtype=np.float64), np.array(ys, dtype=np.float64)
input_file_csv('H1D25','0')
#input_file_csv('H1D30-20','0')
#input_file_csv('H1D50-20','0')
#input_file_csv('H1D65','0')
#input_file_csv('H1D75','0')
#input_file_csv('H1D80','0')
#input_file_csv('H1N')
#plt.xticks(np.arange(0, 5, step=0.1))
plt.xlabel('Meters')
plt.ylabel('VSWR')
plt.legend()
plt.grid()
plt.show()
|
987,750 | 5ddeddab84dff65113be7ee853d916f8c5f3b5d2 | import numpy as np
import pandas as pd
from pandas_datareader import data as pdr
import matplotlib.pyplot as plt
import datetime
from datetime import date
import statsmodels
from statsmodels.tsa.stattools import coint
from pykalman import KalmanFilter
from math import sqrt
import yfinance as yf
yf.pdr_override()
#set starting variables (only need to manuably set tickers)
today = date.today()
fiveYearsAgo = today - datetime.timedelta(days = 5 * 365)
endDate = today
startDate = fiveYearsAgo
tickers = ['SLV', 'GDX', 'GLD', 'IAU', 'GDXJ', 'GLDM', 'XME', 'SIVR', 'BAR', 'SIL', 'SLVP']
#get data for each ticker
data = pdr.get_data_yahoo(tickers, start = startDate, end = endDate)
prices = data["Adj Close"].dropna(axis='columns')
#set up data for test
keysList = prices.keys()
keySize = len(keysList)
uniquePairs = (keySize * (keySize - 1)) / 2
pValMax = 0.05
pairsList = []
print('\n' + str(keySize) + " tickers span a valid backtest with " + str(int(uniquePairs)) + " possible pair(s).", end = '\n\n')
#run cointegration test on all possible pairs
for i in range(keySize):
for j in range(i + 1, keySize):
stock1 = prices[keysList[i]]
stock2 = prices[keysList[j]]
result = coint(stock1, stock2)
pvalue = result[1]
if(pvalue < pValMax):
pairsList.append((keysList[i], keysList[j], pvalue))
print(str(len(pairsList)) + " possible cointegrated pairs with p-values less than " + str(pValMax) + ":")
#print out valid pairs with sufficient p-value
for pair in pairsList:
print(str(pair[0]) + " and " + str(pair[1]) + " have p-value = " + str(pair[2])) |
987,751 | 84ad68849a82de44fd45963f86faa2af55cb2854 | from netaddr import IPSet
from app.scope.scan_manager import IPScanManager
def test_new_scan_manager(app):
scope = IPSet(["10.0.0.0/24"])
blacklist = IPSet(["10.0.0.5/32"])
mgr = IPScanManager(scope, blacklist, False)
assert mgr.get_total() == 255
assert mgr.get_ready()
def test_scan_cycle_complete_coverage(app):
scope = IPSet(["10.0.0.0/24"])
blacklist = IPSet()
mgr = IPScanManager(scope, blacklist, False)
result = [mgr.get_next_ip() for _ in range(mgr.get_total())]
assert len(result) == len(set(result))
|
987,752 | 69bba5bea0699741d26cfc769dd68e042ad7810d | from market_observer_interface import ObserverInterface, ObservableInterface
class Observable(ObservableInterface):
def __init__(self):
self.observers = []
def register(self, observer):
assert isinstance(observer, Observer), "Observer objects available to register only"
if observer not in self.observers:
self.observers.append(observer)
def unregister(self, observer):
if observer in self.observers:
self.observers.remove(observer)
def send_update(self, *args, **kwargs):
for observer in self.observers:
observer.update(*args, **kwargs)
def unregister_all(self):
self.observers.clear()
class Observer(ObserverInterface):
def __init__(self, name):
self.name = name
self.id = id(self)
self.lead = False
def update(self, *args, **kwargs):
# do something on specific update
self.lead = kwargs.get('lead', False) #remove it to custom object
class AmericanMarket(Observer):
def update(self, *args, **kwargs):
print("American Market get next updates:{}; {}".format(args, kwargs))
class EuropeanMarket(Observer):
def update(self, *args, **kwargs):
print("European Market get next updates:{}; {}".format(args, kwargs))
|
987,753 | 712e2fa267d4387003efba57d45652b1a39e561a | from django.test import TestCase
from .models import Post, UserPostLikes
from django.contrib.auth.models import User
from rest_framework.test import APIClient
from rest_framework import status
from django.urls import reverse
# Create your tests here.
class ModelTestCase(TestCase):
def setUp(self):
"""Define the test client and other test variables."""
self.user = User(first_name="David", last_name="Komljenovic", username="WhiteSeraph")
self.user.save()
self.post = Post(post_text="This is a test post!", user=self.user)
def test_model_can_create_a_post(self):
"""Test the post model can create a post."""
old_count = Post.objects.count()
self.post.save()
new_count = Post.objects.count()
self.assertNotEqual(old_count, new_count)
def test_model_can_create_a_user(self):
self.test_user = User(first_name="David1", last_name="Komljenovic", username="WhiteSeraph1")
old_count = User.objects.count()
self.test_user.save()
new_count = User.objects.count()
self.assertNotEqual(old_count, new_count)
class ViewTestCase(TestCase):
def setUp(self):
self.client = APIClient()
self.user = User(first_name="David", last_name="Komljenovic", username="WhiteSeraph")
self.user.save()
self.post = Post(post_text="This is a test post!", user=self.user)
self.post.save()
def test_api_can_create_a_post(self):
self.post_data = {"post_text": "This is a test post!", "user": self.user.id}
self.response = self.client.post(
reverse('create'),
self.post_data,
format="json")
self.assertEqual(self.response.status_code, status.HTTP_201_CREATED)
def test_api_can_get_a_post(self):
"""Test the api can get a given post."""
post = Post.objects.get()
response = self.client.get(
reverse('details', kwargs={'pk': post.id}), format="json")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, post)
def test_api_can_update_post(self):
"""Test the api can update a given post."""
post = Post.objects.get()
change_post = {'name': 'Something new'}
res = self.client.put(
reverse('details', kwargs={'pk': post.id}),
change_post, format='json'
)
self.assertEqual(res.status_code, status.HTTP_200_OK)
def test_api_can_delete_post(self):
"""Test the api can delete a post."""
post = Post.objects.get()
response = self.client.delete(
reverse('details', kwargs={'pk': post.id}),
format='json',
follow=True)
self.assertEquals(response.status_code, status.HTTP_204_NO_CONTENT)
|
987,754 | b5d74aad33f783564c8a2d370b39aa836f8f24bc | from django import forms
from .models import Client, MealTracker
class ClientForm(forms.ModelForm):
class Meta:
model = Client
fields = ('client_name', 'gender', 'email', 'height', 'weight', 'client_age', 'BMR', 'account_number', 'address',
'city', 'state', 'zipcode', 'phone_number')
class MealTrackerForm(forms.ModelForm):
class Meta:
model = MealTracker
fields = ('client_name', 'meal_category', 'meal_description', 'time', 'calories')
|
987,755 | abe5f9f7281eab2604d207bdb4f04cb2c4232a0f | import unittest
from core.constants import ErrorMessages
from problem2.processor import Processor
class TestPrblm2Processor(unittest.TestCase):
def setUp(self):
self.processor = Processor()
def tearDown(self):
self.processor = None
def test_parser(self):
kingdom_list = self.processor.parse_input('Air Land')
self.assertEqual(kingdom_list, ['air', 'land'])
kingdom_list = self.processor.parse_input('Air')
self.assertEqual(kingdom_list, ['air'])
def test_validator(self):
isvalid, message = self.processor.validate(['air', 'lands'])
self.assertEqual(isvalid, False)
self.assertEqual(message, ErrorMessages.INVALID_KINGDOM.value.format('lands'))
isvalid, message = self.processor.validate(['air', 'air'])
self.assertEqual(isvalid, False)
self.assertEqual(message, ErrorMessages.DUPLICATE_MESSAGE.value)
|
987,756 | 33b3728ab4a39717c5d55248a77848b406a6be6f | #!/user/bin/env python
# coding: utf-8
from jinja2 import nodes
from jinja2.ext import Extension
from base import BaseExtension
from uri import UriExtension
class CssType:
INLINE = 1 # 内联样式
EXTRA = 2 # 外部样式
# 创建一个自定义拓展类,继承 jinja2.ext.Extension
class CssExtension(BaseExtension):
# 定义该拓展的语句关键字,这里表示模版中的 {% uri "参数" %} 语句
tags = set(["css"])
_id = 1
def __init__(self, environment):
# 初始化父类,必须这样写
super(CssExtension, self).__init__(environment)
# 在 Jinja2 的环境变量中,添加属性
# 这样,就可以在 env.xxx 来访问了
environment.extend(
css=self,
css_support=True,
css_list=[]
)
def ready(self):
self.reset()
def reset(self):
self.environment.css_list = []
self._id = 1
# 重新 jinja2.ext.Extension 类的 parse 函数
# 这里是处理模板中 {% uri %} 语句的主程序
def parse(self, parser):
# 进入此函数,即表示 {% uri %} 标签被找到了
# 下面的代码,会获取当前 {% uri %} 语句所在模板的行号
lineno = next(parser.stream).lineno
# 获取 {% uri %} 语句中的参数,比如我们调用是 {% uri 'python' %},
# 这里就会返回一个 jinja2.nodes.Const 类型的对象,其值是 'python'
lang_type = None
try:
lang_type = parser.parse_expression()
except:
pass
# 将参数封装为列表
args = []
if lang_type is not None:
args.append(lang_type)
# 下面代码,可以支持两个参数,参数之间,用逗号隔开,不过本例用不到
# 这里先检查当前处理流的位置,是不是个逗号,是的话,再获取下一个参数
# 不是的话,就在参数列表的最后,加一个空对象
# if parser.stream.skip_if('comma'):
# args.append(parser.parser_expression())
# else:
# args.append(nodes.Const(None))
# 解析从 {% uri %} 标志开始,到 {% enduri %} 为止的中间所有语句
# 将解析完的内容,帮到 body 里,并且将当前流的位置,移动到 {% enduri %} 后面
# 因为我们这里,是单结束标签,所以不需要获取 body
body = parser.parse_statements(['name:endcss'], drop_needle=True)
# 返回一个 CallBlock类型的节点,并将其之前取得的行号,设置在该节点中
# 初始化 CallBlock 节点时,传入我们自定义的 _do_add_link 方法的调用,两个空列表,以及刚才解析后的语句内容 body
method = '_do_add_link'
if len(args) == 0:
method = '_do_add_inline_link'
return nodes.CallBlock(self.call_method(method, args), [], [], body).set_lineno(lineno)
# 添加资源
def _do_add_link(self, url, caller):
# 获取 {% uri %}...{% enduri %} 语句中的内容
# 这里 caller() 对应上面调用 CallBlock 时传入的 body
content = caller().rstrip()
if content == '' and url != '':
self.add_extra(url)
elif content != '':
self.add_inline(url, content + "\n")
return ''
# 添加内联资源
def _do_add_inline_link(self, caller):
content = caller().strip()
if content != '':
self._id += 1
return self._do_add_link('__auto_link_%s' % self._id, caller)
return ''
# 判断资源是否已经存在
def _index_of(self, id):
css_list = self.environment.css_list
for index,item in enumerate(css_list):
if item["id"] == id:
return index
return -1
# 添加内联资源
def add_inline(self, id, content):
# 如果没有 id,则动态生成一个
index = self._index_of(id)
css_list = self.environment.css_list
if index != -1:
css_list[index]["content"] = content
else:
css_list.append({ "type": CssType.INLINE, "id": id, "content": content })
# 添加外部资源
def add_extra(self, url):
# 尝试从 uri 中,修复链接
if hasattr(self.environment, "uri"):
url = self.environment.uri.query_resource(url)
# 加入集合中
if self._index_of(url) != -1:
pass
else:
self.environment.css_list.append({ "type": CssType.EXTRA, "id": url, "content": url })
# 生成 link 标签
def build_css(self):
css_list = self.environment.css_list
contents = [] # 最终生成内容
for item in css_list:
item_type = item["type"]
if item_type == CssType.EXTRA:
# 外部资源样式
url = item["content"]
contents.append('<link href="%s" rel="stylesheet" />' % url)
elif item_type == CssType.INLINE:
# 内联样式,id 可能是空的
css = item["content"]
contents.append('<style>%s</style>' % css)
# endfor
return '\n'.join(contents) |
987,757 | 5be5042da70eddaed798233ac0b3315baa33c47d |
ATTRIBUTE = "ATTRIBUTE"
CONTENT = "CONTENT"
IMPORT = "IMPORT"
MOD = "MOD"
WIDGET = "WIDGET" |
987,758 | 5d64aedabfbba26cb6f35b696598647a081703e8 |
import tkinter as kin
import tkinter as tk
from tkinter import *
from tkinter import messagebox as tkmessagebox
from tkinter.font import Font
from tkinter import messagebox
import time
import random
from time import sleep
import time
top = kin.Tk()
top.geometry("1920x1080")
def irsensor():
import RPi.GPIO as IO
IO.setwarnings(False)
IO.setmode(IO.BOARD)
IO.setup(8,IO.OUT) #GPIO 2 -> Red LED as output
IO.setup(10,IO.OUT) #GPIO 3 -> Green LED as output
IO.setup(16,IO.IN) #GPIO 14 -> IR sensor as input
i=0
win1=Toplevel(bg='black',height='1000',width='2000')
win1.title('IR SENSOR')
g=Label(win1,text='connect ir sensor to pins 5V-pin2: gnd-pin14: out-pin16 along with ground connection', font=('Blackletter', 17),fg='orange')
g.pack()
def df():
while i==0:
if(IO.input(16)==True): #object is far away
IO.output(8,True) #Red led ON
IO.output(10,False) # Green led OFF
if(IO.input(16)==False): #object is near
IO.output(8,True) #Green led ON
IO.output(10,False) # Red led OFF
i=1
def ah():
win1.destroy()
exitbut=Button(win1,text='exit',fg='blue',height='1',width='4',command=ah)
detect=Button(win1,text='lets detect',command=df,height='2',width='6')
detect.place(x=20,y=56)
detect.pack()
exitbut.pack()
IO.output(8,False) #Red led ON
IO.output(10,False)
def uvsensor():
import RPi.GPIO as GPIO
import threading
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False)
TRIG=36
ECHO=38
GPIO.setup(TRIG, GPIO.OUT)
GPIO.setup(ECHO, GPIO.IN)
global is_busy,thread1
is_busy=0
win=Toplevel(bg='black',height='1000',width='2000')
def exitProgram():
# if messagebox.askyesno("Print", "Exit?"):
thread1.exit = 1
sleep(0.300)
win.destroy()
originalPlantImage = tk.PhotoImage(file="asd.png")
image = originalPlantImage.subsample(15, 15)
exitb = tk.Button(win,text="Exit",image=image,font=("Helvetica", 14,'bold'),compound="left",borderwidth=3,
width = 60,height = 30,bg="lightskyblue",fg='black',command= exitProgram,
activebackground="dark gray")
g=Label(win,text='connect uv sensor in pins trig-36, echo-38 along with ground connection', font=('Blackletter', 10),)
g.pack()
exitb.pack(fill=X,padx=2)
def measure_dis():
GPIO.output(TRIG, False)
sleep(2)
GPIO.output(TRIG, True)
sleep(0.00001)
GPIO.output(TRIG, False)
while GPIO.input(ECHO)==0:
pulse_start= 0
pulse_start= time.time()
while GPIO.input(ECHO)==1:
pulse_end= 0
pulse_end= time.time()
pulse_duration = pulse_end-pulse_start
distance= pulse_duration*18000
distance= round(distance,2)
DistMax.set(str(distance)+' cm')
print('dist:',distance,'cm')
def do_task():
global is_busy,thread1
if is_busy:
is_busy = 0
thread1.doit = 0
loop_it.configure(text="start")
else:
is_busy=1
thread1.doit = 1
loop_it.configure(text="stop")
loop_it = tk.Button(win,text="start",font=("Helvetica", 12,'bold'),
compound="left",
borderwidth=3,
width = 20,
height = 10,
bg="lightskyblue",
fg='white',
command= do_task,
activebackground="dark gray")
loop_it.pack(side=LEFT,padx=20)
class HC_SR04_Thread (threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.doit=0
self.exit = 0
def run(self):
while self.exit == 0:
if self.doit == 1:
try:
measure_dis()
except:
print('Error1')
pass
GPIO.output(TRIG, False)
sleep(2)
thread1 = HC_SR04_Thread()
thread1.start()
DistMax=StringVar()
DistMax.set('')
dist_max = Label(win, textvariable=DistMax,width=10,height=2, font=("Helvetica", 14),bg= 'black',fg="white", borderwidth=2,relief="sunken")#ridge
dist_max.pack(side =LEFT,padx=10)
top.mainloop()
def ak():
import RPi.GPIO as IO
import time
IO.setwarnings(False)
IO.setmode(IO.BOARD)
IO.setup(24,IO.OUT) #GPIO 2 -> Red LED as output
win=Toplevel(bg='black',height='1000',width='2000')
win.title('LED button clickz')
g=Label(win,text='connect led to pin 24 along with ground connection pin 6', font=('Blackletter', 17),)
g.pack()
g.place(x=0, y=0)
def lt():
IO.output(24,True)
def li():
IO.output(24,False)
def exitprogram():
IO.output(24,False)
win.quit()
win.destroy()
ledbutton=kin.Button(win, text='TURN LED ON', command=lt, bg='pink' ,height=1, width=24)
ledbutton.place(x=24 ,y=45)
ledbutton=kin.Button(win, text='TURN LED OFF', command=li, bg='pink' ,height=1, width=24)
ledbutton.place(x=24 ,y=75)
exib=kin.Button(win, text='exit', command=exitprogram, bg='cyan', height=1, width=4)
exib.place(x=50, y=120)
def aran():
win=Toplevel(bg='black',height='1000',width='2000')
img5=PhotoImage(file="rsz_sensors.png", height='1000', width='2000')
la1=Label(win, image=img5, height='1000', width='1500')
la1.image=img5
la1.pack()
uv=kin.Button(win,text='UV SENSOR',height='2', width='7', bd=1, command=uvsensor)
uv.pack()
uv.place(x=380,y=100)
ir=kin.Button(win,text='IR SENSOR',height='2', width='7', bd=1,command=irsensor)
ir.pack()
ir.place(x=380,y=175)
gs=kin.Button(win,text='GAS SENSOR',height='2', width='7', bd=1)
gs.pack()
gs.place(x=380,y=250)
win.quit()
def anil():
b1.forget()
img5=PhotoImage(file="background.png", height='1000', width='2000')
la=Label(top, image=img5, height='1000', width='1500')
la.image=img5
la.pack()
bla=kin.Button(top,text='LEDBULB',height='1', width='5', font=('comicsans'), bd=0, command=ak)
bla.pack()
bla.place(x=290,y=110)
redbutton = Button(top, text="SENSORs",fg="red", height='1', font=('comicsans'), width='5', bd=0, command=aran)
redbutton.pack()
redbutton.place(x=230,y=145)
greenbutton = Button(top, text="WIFI", fg="brown", bg='white', height='1', font=('comicsans'), width='5', bd=0)
greenbutton.pack()
greenbutton.place(x=360,y=145)
bluebutton = Button(top,text='CLOUD', fg="yellow", height='1', font=('comicsans'), width='5', bd=0 )
bluebutton.pack()
bluebutton.place(x=290,y=190)
img=kin.PhotoImage(file="asd.png")
b1=Button(top, justify=CENTER, image=img, height='1080', width='1920', command=anil)
b1.config(image=img, compound=LEFT )
b1.pack()
top.mainloop()
|
987,759 | 350c94faa7a0b15e4517318eccec4d882da89d3d | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Copyright (c) 2013, Rui Carmo
Description: Experimental Cython compile script
License: MIT (see LICENSE.md for details)
"""
import os, sys
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
from Cython.Build import cythonize
from glob import glob
try:
from Cython.Distutils import build_ext
except:
print "You don't seem to have Cython installed"
sys.exit(1)
def scandir(dir, files=[]):
for file in os.listdir(dir):
path = os.path.join(dir, file)
if os.path.isfile(path) and path.endswith(".py"):
files.append(path.replace(os.path.sep, ".")[:-3])
elif os.path.isdir(path):
scandir(path, files)
return files
def makeExtension(extName):
extPath = extName.replace(".", os.path.sep)+".py"
return Extension(
extName,
[extPath],
include_dirs = ["."],
extra_compile_args = ["-O3", "-Wall"],
extra_link_args = ['-g'],
libraries = [],
)
extNames = scandir("miniredis")
extensions = [makeExtension(name) for name in extNames]
setup(
name = "miniredis",
packages = "miniredis",
ext_modules=extensions,
cmdclass = {'build_ext': build_ext},
setup_requires=['nose'],
test_suite='nose.main',
)
|
987,760 | 61959d83e66f4edb454a71fe8294e33c10681d9a | # Generated by Django 2.2.3 on 2019-07-24 23:31
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('myapp', '0010_auto_20190724_2322'),
]
operations = [
migrations.RemoveField(
model_name='documentconvert',
name='place',
),
migrations.RemoveField(
model_name='documentconvert',
name='place_convert',
),
]
|
987,761 | a8f48599be3aee49a3cbcd8aff28d162b17b2616 | x=input("Enter a num:")
y=input("Enter a num:")
if x != y:
print("yes")
else:
print("no")
|
987,762 | d1a1c0ce64ebf4715b3e761ff19fec0bcd2e3baa | import numpy as np
gamma = 0.75
alpha = 0.9
location_to_state = {
'L1' : 0,
'L2' : 1,
'L3' : 2,
'L4' : 3,
'L5' : 4,
'L6' : 5,
'L7' : 6,
'L8' : 7,
'L9' : 8
}
actions= [0,1,2,3,4,5,6,7,8]
rewards = np.array([[0,1,0,0,0,0,0,0,0],
[1,0,1,0,0,0,0,0,0],
[0,1,0,0,0,1,0,0,0],
[0,0,0,0,0,0,1,0,0],
[0,1,0,0,0,0,0,1,0],
[0,0,1,0,0,0,0,0,0],
[0,0,0,1,0,0,0,1,0],
[0,0,0,0,1,0,1,0,1],
[0,0,0,0,0,0,0,1,0]])
state_to_location = dict((state ,location) for location,state in location_to_state.items())
def get_optimal_route(start_location,end_location):
rewards_new = np.copy(rewards)
ending_state = location_to_state[end_location]
rewards_new[ending_state,ending_state] = 999
# ---This is Q-learning ALgorithm---
Q = np.array(np.zeros([9,9]))
for i in range(1000):
current_state = np.random.randint(0,9)
playable_actions = []
for j in range(9):
if rewards_new[current_state,j]>0:
playable_actions.append(j)
next_state = np.random.choice(playable_actions)
TD = rewards_new[current_state,next_state] + gamma*Q[next_state,np.argmax(Q[next_state ,])] - Q[current_state,next_state]
Q[current_state ,next_state] += alpha * TD
route = [start_location]
next_location = start_location
while next_location != end_location:
starting_state = location_to_state[start_location]
next_state = np.argmax(Q[starting_state, ])
next_location = state_to_location[next_state]
route.append(next_location)
start_location = next_location
return route
print(get_optimal_route('L9','L6'
'')) |
987,763 | e505a0f4a8625d5b6d8a74494a5d530c1101f65a | import sys
from Library.constrainedPolymer import ConstrainedPolymerPackNBB as CPNBB
import Utilities.fileIO as fIO
import numpy as np
class peptideHairpinGenerator(CPNBB):
# A class for a buildingBlockGenerator object
# that creates random peptide hairpins between two points.
# Static values are acquired from a file
# and dynamic values are applied at runtime.
def __init__(self, paramFilename):
# initialise the parameter dictionary for the base classes
# and create an NPack constrained polymer object
# use the same filename for both so that variables common
# to both have the same value
CPNBB.__init__(self, paramFilename)
def initialiseParameters(self):
# initialise the constrained polymer parent
CPNBB.initialiseParameters(self)
if self.noLoadErrors == False:
print "Critical Parameters are undefined for hairpin"
sys.exit()
def generateBuildingBlock(self,
numPoints,
pointA,
pointB,
alpha1,
alpha2,
beta1,
beta2,
minDist,
bondLength,
innerSphereR,
outerSphereR,
spherePos,
polarity):
self.polarity = polarity
self.numResidues = int(np.ceil(numPoints /3.0))
self.numPoints = self.numResidues * 3
if self.numPoints != numPoints:
print "Warning: numpoints changed to give integer number of residues."
return CPNBB.generateBuildingBlock(self, numPoints, pointA, pointB, rotation, alpha1, alpha2, beta1, beta2, minDist, bondLength, innerSphereR, outerSphereR, spherePos)
def generateBuildingBlockNames(self):
if (self.polarity == "NC"):
names = ['N', 'C', 'C'] * self.numResidues
if (self.polarity == "CN"):
names = ['C', 'C', 'N'] * self.numResidues
return names
if __name__ == "__main__":
# get the file name from the command line
filename = sys.argv[1]
# create the backbone generator object.
hairPinGen = peptideHairpinGenerator(filename)
# generate a backbone
numPoints = 18
pointA = np.array([-5, 0, 0])
pointB = np.array([ 5, 0, 0])
rotation = 90
alpha1 = -180
alpha2 = 180
beta1 = 130
beta2 = 160
minDist = 1.0
bondLength = 2.0
innerSphereR = 0
outerSphereR = 100
spherePos = np.array([0, 0, -5])
polarity = 'NC'
# build building block and dump to file
hairpinBuildingBlock = hairPinGen.generateBuildingBlock(numPoints, pointA, pointB, rotation, alpha1, alpha2, beta1, beta2, minDist, bondLength, innerSphereR, outerSphereR, spherePos, polarity)
hairpinBuildingBlock.exportBBK(fIO.fileRootFromInfile(filename, 'txt'))
print "hairpin done" |
987,764 | 1860713de22a88398b79900e95b9aef9a08b2c86 | import math
import cmath
import string
import sys
import bisect
import heapq
from queue import Queue,LifoQueue,PriorityQueue
from itertools import permutations,combinations
from collections import deque,Counter
from functools import cmp_to_key
import math
import cmath
import string
import sys
import bisect
import heapq
from queue import Queue,LifoQueue,PriorityQueue
from itertools import permutations,combinations
from collections import deque,Counter
from functools import cmp_to_key
if __name__=="__main__":
t,m=list(map(int,input().strip().split()))
w=[]
val=[]
w.append(0)
val.append(0)
for i in range(m):
x,y=list(map(int,input().strip().split()))
w.append(x)
val.append(y)
dp=[[0 for _ in range(1005)] for _ in range(105)]
for i in range(1,m+1):
j=t#j 从后往前
while j>=0:
if j>=w[i]:#!!!!!!
dp[i][j]=max(dp[i-1][j],dp[i-1][j-w[i]]+val[i])
else:
dp[i][j]=dp[i-1][j]
j-=1
print(dp[m][t])
if __name__=="__main__":
t,m=list(map(int,input().strip().split()))
w=[]
val=[]
w.append(0)
val.append(0)
for i in range(m):
x,y=list(map(int,input().strip().split()))
w.append(x)
val.append(y)
dp=[0 for _ in range(1005)]
for i in range(1,m+1):
j=t
while j>=0:
if j>=w[i]:
dp[j]=max(dp[j],dp[j-w[i]]+val[i])
j-=1
print(dp[t])
|
987,765 | ae96e8a9a597b97942a11ac7d72d59add2a52b9c | import utils
if __name__ == '__main__':
# 使用变量
print(utils.name)
# 使用函数
print(utils.sum(10, 5))
# 使用类
person = utils.Person()
person.say_hello()
|
987,766 | 278074db6b815136c55e3af5960611ee4897b7e7 | # Generated by Django 3.0.1 on 2020-01-04 04:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('chat', '0002_auto_20200104_0433'),
]
operations = [
migrations.AlterField(
model_name='room',
name='last_activity',
field=models.DateTimeField(auto_now=True, verbose_name='Last activity date'),
),
migrations.AlterField(
model_name='room',
name='timestamp',
field=models.DateTimeField(auto_now_add=True, verbose_name='Creation Date'),
),
]
|
987,767 | 26fd9331961a9c28792d97165a81e77c19e54139 | def middle_three_char():
word = input('Enter the string: ')
middleIndex = int(len(word) /2)
middleThree = word[middleIndex-1:middleIndex+2]
print(middleThree)
middle_three_char() |
987,768 | e6129916b7924cf6888fa18dc9eb61f161b04582 | from sklearn.model_selection import LeaveOneOut
import numpy as np
def loo_risk(X,y,regmod):
"""
Construct the leave-one-out square error risk for a regression model
Input: design matrix, X, response vector, y, a regression model, regmod
Output: scalar LOO risk
"""
loo = LeaveOneOut()
loo.get_n_splits(X)
loo_losses = []
for train_index, test_index in loo.split(X):
X_train, X_test = X[train_index], X[test_index]
y_train, y_test = y[train_index], y[test_index]
regmod.fit(X_train,y_train)
y_hat = regmod.predict(X_test)
loss = np.sum((y_hat - y_test)**2)
loo_losses.append(loss)
return np.mean(loo_losses)
def emp_risk(X,y,regmod):
"""
Return the empirical risk for square error loss
Input: design matrix, X, response vector, y, a regression model, regmod
Output: scalar empirical risk
"""
regmod.fit(X,y)
y_hat = regmod.predict(X)
return np.mean((y_hat - y)**2) |
987,769 | a24ce25d4a3ecc57598f51c52a5a333913d59a43 | #!BPY
# -*- coding: UTF-8 -*-
#
# read/write file
#
#
# 2017.08.29 Natukikazemizo
import bpy
import codecs
data =[]
f_in = codecs.open(bpy.path.abspath("//") + "data/in.txt", 'r', 'utf-8')
f_out = codecs.open(bpy.path.abspath("//") + "data/out.txt", 'w', 'utf-8')
data1 = f_in.read()
f_in.close()
lines1 = data1.split('n')
for line in lines1:
data += line[:-2].split('\t')
print(line)
f_out.write(line)
f_out.close()
print(data)
|
987,770 | ff1154cdc6c21d4b8adc6f0318dc5f52d2bad0f4 | from django.db import models
from people.models import Person
class Checkin(models.Model):
person = models.ForeignKey(Person,
verbose_name='Pessoa',
related_name='person',
on_delete=models.PROTECT)
observation = models.TextField("Observação",
max_length=600,
blank=True,
null=True)
active = models.BooleanField("Ativo", default=True)
created_at = models.DateTimeField(auto_now_add=True,
verbose_name="Criado em")
updated_at = models.DateTimeField(auto_now=True,
verbose_name="Atualizado em")
class PatientCheckin(Checkin):
class Meta:
verbose_name_plural = "Check-in's de paciente"
verbose_name = "Check-in de paciente"
ordering = ['-created_at']
companion = models.ForeignKey(Person,
blank=True,
null=True,
related_name='companion',
verbose_name='Acompanhante',
on_delete=models.PROTECT)
chemotherapy = models.BooleanField(default=False,
verbose_name='Quimioterapia')
radiotherapy = models.BooleanField(default=False,
verbose_name='Radioterapia')
surgery = models.BooleanField(default=False, verbose_name='Cirurgia')
exams = models.BooleanField(default=False, verbose_name='Exames')
appointment = models.BooleanField(default=False, verbose_name='Consultas')
other = models.BooleanField(default=False, verbose_name='Outros')
ca_number = models.CharField(max_length=20,
blank=True,
null=True,
verbose_name='Número C.A.')
social_vacancy = models.BooleanField(blank=True,
null=True,
verbose_name="Vaga Social?")
@property
def companion_name(self):
if self.companion:
return self.companion.name
@property
def person_name(self):
return self.person.name
def __str__(self):
return self.person.name + " " + self.created_at.strftime(
"(Entrada em %d/%m/%Y %H:%M)")
class CompanionCheckin(Checkin):
class Meta:
verbose_name_plural = "Check-in's de acompanhante"
verbose_name = "Check-in de acompanhante"
ordering = ['-created_at']
patient = models.ForeignKey(Person,
related_name='patient',
verbose_name='Paciente',
on_delete=models.PROTECT)
def __str__(self):
return self.person.name + " " + self.created_at.strftime(
"(Entrada em %d/%m/%Y %H:%M)")
class OtherPeopleCheckin(Checkin):
class Meta:
verbose_name_plural = "Check-in's de outras pessoas"
verbose_name = "Check-in de outras pessoas"
ordering = ['-created_at']
REASON_CHOICES = [('professional', 'Profissional'),
('voluntary', 'Voluntário'), ('visitor', 'Visitante'),
('other', 'Outro')]
reason = models.CharField(max_length=12,
choices=REASON_CHOICES,
verbose_name='Tipo de check-in')
def __str__(self):
return self.person.name + " " + self.created_at.strftime(
"(Entrada em %d/%m/%Y %H:%M)")
class ChangeCompanion(models.Model):
checkin = models.ForeignKey(PatientCheckin,
verbose_name='Checkin de paciente',
related_name='checkin',
on_delete=models.PROTECT)
new_companion = models.ForeignKey(Person,
verbose_name="Novo acompanhante",
related_name='new_companion',
on_delete=models.PROTECT)
created_at = models.DateTimeField(auto_now_add=True,
verbose_name="Criado em")
updated_at = models.DateTimeField(auto_now=True,
verbose_name="Atualizado em")
def __str__(self):
return self.checkin.companion.name + " -> " + self.new_companion.name + \
" " + self.created_at.strftime("(Trocado em %d/%m/%Y %H:%M)")
|
987,771 | 4917b340a84f27a07b0de1ec26db044b291d132c | # import json
# stringOfJsonData = '{"name": "Zophie","isCat":"True","miceCaught":0,"felineIQ":null}'
# jsonDataAsPythonValue = json.loads(stringOfJsonData)
# print(jsonDataAsPythonValue)
# import json
# pythonValue = {'name': 'Zophie','isCat':'True','miceCaught':0,'felineIQ':None}
# stringOfJsonData = json.dumps(pythonValue)
# print(stringOfJsonData)
import json, requests, pprint
url = 'https://api.open-meteo.com/v1/forecast?latitude=51.40&longitude=21.15&hourly=temperature_2m'
response = requests.get(url)
response.raise_for_status()
weather_data = json.loads(response.text)
pprint.pprint(weather_data) |
987,772 | 1e4148cf381ee5d3ff9a2a79fc64b738575caae4 | import math
import numpy as np
import matplotlib.pyplot as plt
from time import sleep
# Input
v = math.radians(float(input("Rotation in degrees: ")))
count = int(input("Amount of times to rotate: "))
# Definitions
vector = np.array([5, 0])
rotated_vector = vector
the_matrix = np.array([[math.cos(v), -math.sin(v)], [math.sin(v), math.cos(v)]])
# Rotate vector <count> times
for _ in range(count):
# Reset
plt.clf()
plt.xlim(-10, 10)
plt.ylim(-10, 10)
# Rotate vector
rotated_vector = the_matrix.dot(rotated_vector) # Performs matrix multiplication on The Matrix and the vector, resulting in rotating the vector
# Draw vectors
plt.quiver([0, 0], [0, 0], [vector[0], rotated_vector[0]], [vector[1], rotated_vector[1]], color=['r', 'g'], angles='xy', scale_units='xy', scale=1)
plt.annotate('original vector', xy=(vector[0] + 0.5, vector[1] - 0.25))
plt.annotate('rotated vector', xy=(rotated_vector[0] + 0.5, rotated_vector[1] - 0.25))
plt.pause(0.25)
plt.show() |
987,773 | 5ba831c46268d1c8b11a544ddd136ec2c08d377a | import time
import pygame
import taichi as ti
import numpy as np
from pygame.locals import *
def mainloop(res, title, img, render):
dat = ti.Vector.field(3, ti.u8, res[::-1])
@ti.kernel
def export():
for i, j in dat:
dat[i, j] = min(255, max(0, int(img[j, res[1] - 1 - i] * 255)))
pygame.init()
screen = pygame.display.set_mode(res, DOUBLEBUF | HWSURFACE | FULLSCREEN)
pygame.display.set_caption(title)
fpsclk = pygame.time.Clock()
last = time.time()
while True:
for event in pygame.event.get():
if event.type == QUIT or (event.type == KEYDOWN and event.key == K_ESCAPE):
pygame.quit()
exit()
render()
export()
data = dat.to_numpy()
data = pygame.image.frombuffer(data.tobytes('C'), res, 'RGB')
screen.blit(data, (0, 0))
pygame.display.flip()
fpsclk.tick(60)
t = time.time()
dt = t - last
print(f'({1 / dt:.2f} FPS)')
#pygame.display.set_caption(f'{title} ({1 / dt:.2f} FPS)')
last = t
if __name__ == '__main__':
res = 1920, 1080
img = ti.Vector.field(3, float, res)
@ti.kernel
def render():
for i, j in img:
img[i, j] = [i / res[0], j / res[1], 0]
mainloop(res, 'THREE', img, render)
|
987,774 | 97ae9af9354d6e6a8c7f7cebc401445138d366ab | from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String
class Create():
Base = declarative_base()
class Modules(Create.Base):
__tablename__ = 'modules'
id = Column(Integer, primary_key=True)
type = Column(String)
name = Column(String)
name_frankenstyle = Column(String)
desc = Column(String)
lastrelease = Column(String)
url = Column(String)
class Code(Create.Base):
__tablename__ = 'code'
id = Column(Integer, primary_key=True)
|
987,775 | 647c363d975cfd31429e848b0a5dc29c27709d36 | # coding:utf-8
from __future__ import print_function
import os, time, sys, json, re
from lib.common import *
from lib.ip.ip import *
# 作者:咚咚呛
# 常规类后门检测
# 1、LD_PRELOAD后门检测
# 2、LD_AOUT_PRELOAD后门检测
# 3、LD_ELF_PRELOAD后门检测
# 4、LD_LIBRARY_PATH后门检测
# 5、ld.so.preload后门检测
# 6、PROMPT_COMMAND后门检测
# 7、crontab后门检测
# 8、alias后门
# 9、ssh后门 ln -sf /usr/sbin/sshd /tmp/su; /tmp/su -oPort=5555;
# 10、SSH Server wrapper 后门,替换/user/sbin/sshd 为脚本文件
# 11、/etc/inetd.conf 后门
# 12、/etc/xinetd.conf/后门
# 13、系统启动项后门检测
class Backdoor_Analysis:
def __init__(self):
# 异常后门列表
self.backdoor = []
# 恶意特征列表
self.malware_infos = []
# 获取恶意特征信息
self.get_malware_info()
self.ip_http = r'(htt|ft)p(|s)://(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)'
self.ip_re = r'(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)'
self.lan_ip = r'(127\.0\.0\.1)|(localhost)|(10\.\d{1,3}\.\d{1,3}\.\d{1,3})|(172\.((1[6-9])|(2\d)|(3[01]))\.\d{1,3}\.\d{1,3})|(192\.168\.\d{1,3}\.\d{1,3})'
# LD_PRELOAD后门检测
def check_LD_PRELOAD(self):
suspicious, malice = False, False
try:
infos = os.popen("echo $LD_PRELOAD").read().splitlines()
for info in infos:
if not len(info) > 3: continue
self.backdoor.append(
{u'异常类型': u'LD_PRELOAD 后门', u'异常信息': info, u'手工确认': u'[1]echo $LD_PRELOAD [2]unset LD_PRELOAD'})
malice = True
return suspicious, malice
except:
return suspicious, malice
# LD_AOUT_PRELOAD后门检测
def check_LD_AOUT_PRELOAD(self):
suspicious, malice = False, False
try:
infos = os.popen("echo $LD_AOUT_PRELOAD").read().splitlines()
for info in infos:
if not len(info) > 3: continue
self.backdoor.append(
{u'异常类型': u'LD_AOUT_PRELOAD 后门', u'异常信息': info,
u'手工确认': u'[1]echo $LD_AOUT_PRELOAD [2]unset LD_AOUT_PRELOAD'})
malice = True
return suspicious, malice
except:
return suspicious, malice
# LD_ELF_PRELOAD后门检测
def check_LD_ELF_PRELOAD(self):
suspicious, malice = False, False
try:
infos = os.popen("echo $LD_ELF_PRELOAD").read().splitlines()
for info in infos:
if not len(info) > 3: continue
self.backdoor.append(
{u'异常类型': u'LD_ELF_PRELOAD 后门', u'异常信息': info,
u'手工确认': u'[1]echo $LD_ELF_PRELOAD [2]unset LD_ELF_PRELOAD'})
malice = True
return suspicious, malice
except:
return suspicious, malice
# LD_LIBRARY_PATH后门检测
def check_LD_LIBRARY_PATH(self):
suspicious, malice = False, False
try:
infos = os.popen("echo $LD_LIBRARY_PATH").read().splitlines()
for info in infos:
if not len(info) > 3: continue
self.backdoor.append(
{u'异常类型': u'LD_LIBRARY_PATH 后门', u'异常信息': info,
u'手工确认': u'[1]echo $LD_LIBRARY_PATH [2]unset LD_LIBRARY_PATH'})
malice = True
return suspicious, malice
except:
return suspicious, malice
# ld.so.preload后门检测
def check_ld_so_preload(self):
suspicious, malice = False, False
try:
if not os.path.exists('/etc/ld.so.preload'): return suspicious, malice
with open('/etc/ld.so.preload') as f:
for line in f:
if not len(line) > 3: continue
if line[0] != '#':
self.backdoor.append({u'异常类型': u'ld.so.preload 后门', u'异常信息': line.replace("\n", ""),
u'文件': u'/etc/ld.so.preload', u'手工确认': u'[1]cat /etc/ld.so.preload'})
malice = True
break
return suspicious, malice
except:
return suspicious, malice
# PROMPT_COMMAND后门检测
def check_PROMPT_COMMAND(self):
suspicious, malice = False, False
try:
infos = os.popen("echo $PROMPT_COMMAND").read().splitlines()
for info in infos:
suspicious2, malice2 = self.analysis_strings('PROMPT_COMMAND backdoor', 'ROMPT_COMMAND', info,
'[1]echo $PROMPT_COMMAND')
if suspicious2: suspicious = True
if malice2: malice = True
return suspicious, malice
except:
return suspicious, malice
# 分析cron定时任务后门
def check_cron(self):
suspicious, malice = False, False
try:
cron_dir_list = ['/var/spool/cron/', '/etc/cron.d/', '/etc/cron.daily/', '/etc/cron.weekly/',
'/etc/cron.hourly/', '/etc/cron.monthly/']
for cron in cron_dir_list:
files = [os.path.join(cron, i) for i in os.listdir(cron) if (not os.path.isdir(os.path.join(cron, i)))]
for file in files:
for i in open(file, 'r'):
suspicious2, malice2 = self.analysis_strings('crontab backdoor', file, i, '[1]cat %s' % file)
if suspicious2: suspicious = True
if malice2: malice = True
return suspicious, malice
except:
return suspicious, malice
# 分析alias后门
def check_alias(self):
suspicious, malice = False, False
try:
infos = os.popen("alias").read().splitlines()
for info in infos:
suspicious2, malice2 = self.analysis_strings('alias backdoor', "", info, '[1]alias')
if suspicious2: suspicious = True
if malice2: malice = True
return suspicious, malice
except:
return suspicious, malice
# 分析SSH后门
def check_SSH(self):
suspicious, malice = False, False
try:
infos = os.popen("netstat -ntpl |grep -v ':22 '| awk '{if (NR>2){print $7}}'").read().splitlines()
for info in infos:
pid = info.split("/")[0]
if os.path.exists('/proc/%s/exe' % pid):
if 'sshd' in os.readlink('/proc/%s/exe' % pid):
self.backdoor.append(
{u'异常类型': u'SSH 后门', u'异常信息': u'/porc/%s/exe' % pid, u'异常文件': u'/proc/%s/exe' % pid,
u'手工确认': u'[1]ls -l /porc/%s [2]ps -ef|grep %s|grep -v grep' % (pid, pid)})
malice = True
return suspicious, malice
except:
return suspicious, malice
# 分析SSH Server wrapper 后门
def check_SSHwrapper(self):
suspicious, malice = False, False
try:
infos = os.popen("file /usr/sbin/sshd").read().splitlines()
if 'ELF' not in infos[0]:
self.backdoor.append(
{u'异常类型': u'SSHwrapper 后门', u'异常信息': infos[0], u'文件': u'/usr/sbin/sshd',
u'手工确认': u'[1]file /usr/sbin/sshd [2]cat /usr/sbin/sshd'})
malice = True
return suspicious, malice
except:
return suspicious, malice
# 分析inetd后门
def check_inetd(self):
suspicious, malice = False, False
try:
if not os.path.exists('/etc/inetd.conf'): return suspicious, malice
with open('/etc/inetd.conf') as f:
for line in f:
if '/bin/bash' in line:
self.backdoor.append(
{u'异常类型': u'inetd.conf 后门', u'异常信息': line, u'文件': u'/etc/inetd.conf',
u'手工确认': u'[1]cat /etc/inetd.conf'})
malice = True
return suspicious, malice
except:
return suspicious, malice
# 分析xinetd后门
def check_xinetd(self):
suspicious, malice = False, False
try:
if not os.path.exists('/etc/xinetd.conf/'): return suspicious, malice
for file in os.listdir('/etc/xinetd.conf/'):
with open(os.path.join('%s%s' % ('/etc/xinetd.conf/', file))) as f:
for line in f:
if '/bin/bash' in line:
fpath = os.path.join('%s%s' % ('/etc/xinetd.conf/', file))
self.backdoor.append(
{u'异常类型': u'xinetd.conf 后门', u'异常信息': line, u'文件': u'/etc/xinetd.conf/%s' % file,
u'手工确认': u'[1]cat /etc/xinetd.conf/%s' % file})
malice = True
return suspicious, malice
except:
return suspicious, malice
# 系统启动项检测
def check_startup(self):
suspicious, malice = False, False
try:
init_path = ['/etc/init.d/', '/etc/rc.d/', '/etc/rc.local', '/usr/local/etc/rc.d',
'/usr/local/etc/rc.local', '/etc/conf.d/local.start', '/etc/inittab', '/etc/systemd/system']
for path in init_path:
if not os.path.exists(path): continue
if os.path.isfile(path):
malware = self.analysis_file(path)
if malware:
self.backdoor.append(
{u'异常类型': u'系统启动项后门', u'文件': path, u'异常信息': malware,
u'手工确认': u'[1]cat %s' % path})
malice = True
continue
for file in gci(path):
malware = self.analysis_file(file)
if malware:
self.backdoor.append(
{u'异常类型': u'系统启动项后门', u'文件': path, u'异常信息': malware,
u'手工确认': u'[1]cat %s' % file})
malice = True
return suspicious, malice
except:
return suspicious, malice
# 获取配置文件的恶意域名等信息
def get_malware_info(self):
try:
if not os.path.exists('malware'): return
for file in os.listdir('./malware/'):
time.sleep(0.001) # 防止cpu占用过大
with open(os.path.join('%s%s' % ('./malware/', file))) as f:
for line in f:
if len(line) > 3:
if line[0] != '#': self.malware_infos.append(line.strip().replace("\n", ""))
except:
return
# 分析文件是否包含恶意特征或者反弹shell问题
def analysis_file(self, file):
try:
if not os.path.exists(file): return ""
if os.path.isdir(file): return ""
if os.path.islink(file): return ""
if " " in file: return ""
if 'GScan' in file: return ""
if (os.path.getsize(file) == 0) or (round(os.path.getsize(file) / float(1024 * 1024)) > 10): return ""
strings = os.popen("strings %s" % file).readlines()
for str in strings:
mal = check_shell(str)
if mal: return mal
for malware in self.malware_infos:
if malware in str: return malware
if self.check_contents_ip(str): return str
return ""
except:
return ""
# 分析字符串是否包含境外IP
def check_contents_ip(self, contents):
try:
if not re.search(self.ip_http, contents): return False
if re.search(self.lan_ip, contents): return False
for ip in re.findall(self.ip_re, contents):
if (find(ip)[0:2] != u'中国') and (find(ip)[0:3] != u'局域网') and (find(ip)[0:4] != u'共享地址'):
return True
return False
except:
return False
# 分析一串字符串是否包含反弹shell或者存在的文件路径
def analysis_strings(self, name, file, contents, solve):
suspicious, malice = False, False
try:
content = contents.replace('\n', '')
if check_shell(content):
self.backdoor.append(
{u'异常类型': name, u'文件': file, u'异常信息': content, u'类型特征': u'反弹shell类', u'手工确认': solve})
malice = True
elif self.check_contents_ip(content):
self.backdoor.append(
{u'异常类型': name, u'文件': file, u'异常信息': content, u'类型特征': u'境外IP信息', u'手工确认': solve})
malice = True
else:
for file in content.split(' '):
if not os.path.exists(file): continue
malware = self.analysis_file(file)
if malware:
self.backdoor.append(
{u'异常类型': name, u'文件': file, u'异常信息': content, u'类型特征': malware, u'手工确认': solve})
malice = True
return suspicious, malice
except:
return suspicious, malice
def run(self):
print(u'\n开始恶意后门类安全扫描')
print(align(u' [1]LD_PRELOAD 后门检测', 30) + u'[ ', end='')
file_write(u'\n开始后门类安全扫描\n')
file_write(align(u' [1]LD_PRELOAD 后门检测', 30) + u'[ ')
sys.stdout.flush()
suspicious, malice = self.check_LD_PRELOAD()
if malice:
pringf(u'存在风险', malice=True)
elif suspicious and (not malice):
pringf(u'警告', suspicious=True)
else:
pringf(u'OK', security=True)
print(align(u' [2]LD_AOUT_PRELOAD 后门检测', 30) + u'[ ', end='')
file_write(align(u' [2]LD_AOUT_PRELOAD 后门检测', 30) + u'[ ')
sys.stdout.flush()
suspicious, malice = self.check_LD_AOUT_PRELOAD()
if malice:
pringf(u'存在风险', malice=True)
elif suspicious and (not malice):
pringf(u'警告', suspicious=True)
else:
pringf(u'OK', security=True)
print(align(u' [3]LD_ELF_PRELOAD 后门检测', 30) + u'[ ', end='')
file_write(align(u' [3]LD_ELF_PRELOAD 后门检测', 30) + u'[ ')
sys.stdout.flush()
suspicious, malice = self.check_LD_ELF_PRELOAD()
if malice:
pringf(u'存在风险', malice=True)
elif suspicious and (not malice):
pringf(u'警告', suspicious=True)
else:
pringf(u'OK', security=True)
print(align(u' [4]LD_LIBRARY_PATH 后门检测', 30) + u'[ ', end='')
file_write(align(u' [4]LD_LIBRARY_PATH 后门检测', 30) + u'[ ')
sys.stdout.flush()
suspicious, malice = self.check_LD_LIBRARY_PATH()
if malice:
pringf(u'存在风险', malice=True)
elif suspicious and (not malice):
pringf(u'警告', suspicious=True)
else:
pringf(u'OK', security=True)
print(align(u' [5]ld.so.preload 后门检测', 30) + u'[ ', end='')
file_write(align(u' [5]ld.so.preload 后门检测', 30) + u'[ ')
sys.stdout.flush()
suspicious, malice = self.check_ld_so_preload()
if malice:
pringf(u'存在风险', malice=True)
elif suspicious and (not malice):
pringf(u'警告', suspicious=True)
else:
pringf(u'OK', security=True)
print(align(u' [6]PROMPT_COMMAND 后门检测', 30) + u'[ ', end='')
file_write(align(u' [6]PROMPT_COMMAND 后门检测', 30) + u'[ ')
sys.stdout.flush()
suspicious, malice = self.check_PROMPT_COMMAND()
if malice:
pringf(u'存在风险', malice=True)
elif suspicious and (not malice):
pringf(u'警告', suspicious=True)
else:
pringf(u'OK', security=True)
print(align(u' [7]crontab 后门检测', 30) + u'[ ', end='')
file_write(align(u' [7]crontab 后门检测', 30) + u'[ ')
sys.stdout.flush()
suspicious, malice = self.check_cron()
if malice:
pringf(u'存在风险', malice=True)
elif suspicious and (not malice):
pringf(u'警告', suspicious=True)
else:
pringf(u'OK', security=True)
print(align(u' [8]alias 后门检测', 30) + u'[ ', end='')
file_write(align(u' [8]alias 后门检测', 30) + u'[ ')
sys.stdout.flush()
suspicious, malice = self.check_SSH()
if malice:
pringf(u'存在风险', malice=True)
elif suspicious and (not malice):
pringf(u'警告', suspicious=True)
else:
pringf(u'OK', security=True)
print(align(u' [9]ssh 后门检测', 30) + u'[ ', end='')
file_write(align(u' [9]ssh 后门检测', 30) + u'[ ')
sys.stdout.flush()
suspicious, malice = self.check_SSH()
if malice:
pringf(u'存在风险', malice=True)
elif suspicious and (not malice):
pringf(u'警告', suspicious=True)
else:
pringf(u'OK', security=True)
print(align(u' [10]SSH wrapper 后门检测', 30) + u'[ ', end='')
file_write(align(u' [10]SSH wrapper 后门检测', 30) + u'[ ')
sys.stdout.flush()
suspicious, malice = self.check_SSHwrapper()
if malice:
pringf(u'存在风险', malice=True)
elif suspicious and (not malice):
pringf(u'警告', suspicious=True)
else:
pringf(u'OK', security=True)
print(align(u' [11]inetd.conf 后门检测', 30) + u'[ ', end='')
file_write(align(u' [11]inetd.conf 后门检测', 30) + u'[ ')
sys.stdout.flush()
suspicious, malice = self.check_inetd()
if malice:
pringf(u'存在风险', malice=True)
elif suspicious and (not malice):
pringf(u'警告', suspicious=True)
else:
pringf(u'OK', security=True)
print(align(u' [12]xinetd.conf 后门检测', 30) + u'[ ', end='')
file_write(align(u' [12]xinetd.conf 后门检测', 30) + u'[ ')
sys.stdout.flush()
suspicious, malice = self.check_xinetd()
if malice:
pringf(u'存在风险', malice=True)
elif suspicious and (not malice):
pringf(u'警告', suspicious=True)
else:
pringf(u'OK', security=True)
print(align(u' [13]系统启动项后门检测', 30) + u'[ ', end='')
file_write(align(u' [13]系统启动项后门检测', 30) + u'[ ')
sys.stdout.flush()
suspicious, malice = self.check_startup()
if malice:
pringf(u'存在风险', malice=True)
elif suspicious and (not malice):
pringf(u'警告', suspicious=True)
else:
pringf(u'OK', security=True)
if len(self.backdoor) > 0:
file_write('-' * 30 + '\n')
file_write(u'后门检查异常如下:\n')
for info in self.backdoor:
file_write(json.dumps(info, ensure_ascii=False) + '\n')
file_write('-' * 30 + '\n')
if __name__ == '__main__':
infos = Backdoor_Analysis()
infos.run()
print(u"后门检查异常如下:")
for info in infos.backdoor:
print(info)
|
987,776 | 73e662f02a4251eeb2d75d3867a0d22e1d14d968 | class Solution:
def addBoldTag(self, s: str, words: List[str]) -> str:
'''
transform to LC56
'''
def add_into_interval(start, end):
i = bisect.bisect_left(bold_range, start)
j = bisect.bisect_right(bold_range, end)
if i%2 == 0 and j%2 == 0:
bold_range[i:j] = [start, end]
elif i%2 == 0 and j%2 == 1:
bold_range[i:j] = [start]
elif i%2 == 1 and j%2 == 0:
bold_range[i:j] = [end]
else:
bold_range[i:j] = []
bold_range = []
for word in words:
for i in range(len(s)-len(word)+1):
if s[i: i+len(word)] == word:
add_into_interval(i, i+len(word))
ans = ''
cur_index = 0
for i in range(len(bold_range)//2):
start = bold_range[i*2]
end = bold_range[i*2+1]
if cur_index < start:
ans += s[cur_index: start]
ans += '<b>' + s[start: end] + '</b>'
cur_index = end
if cur_index < len(s):
ans += s[cur_index:]
return ans
|
987,777 | e153fdb04cf55b2a84b497a0eba5202c7865d935 | # -*- coding:utf8 -*-
class Person:
def say_hi(self):
print('Hello world ,how are you!')
p = Person()
p.say_hi()
|
987,778 | 4b166b1f80ccae1a4b4f4b5176a72fe0ab1b718c | # 1. Write a variable for each of the five datatypes
# 2. Convert a string into a number
# 3. Print out the number you converted above
# 4. Change the number in the variable from exercises 2 and 3 and print it out again
# 5. Ask the user to enter their name, and print "Hello NAME", eg, if I entered
# "Matthew", it should print out "Hello Matthew"
# 6. Create a new variable called "second" which stores 5 times the value of first
first = 8
# 7. Check if the value of "second" is greater than 30. If it is, print out "Yes"
|
987,779 | 222f2ed8b39e156a5ffd5f11d61aba0926931f43 | # Generated by Django 2.0.2 on 2018-03-02 06:03
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ContactInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('phone_number', models.CharField(max_length=16, null=True)),
('email', models.CharField(max_length=256)),
('website', models.CharField(max_length=256, null=True)),
],
),
migrations.CreateModel(
name='Dog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=256)),
('sex', models.CharField(max_length=16)),
('age', models.IntegerField()),
('breed', models.CharField(max_length=256, null=True)),
('bio', models.TextField(null=True)),
],
),
migrations.CreateModel(
name='Location',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('city', models.CharField(max_length=256)),
('state', models.CharField(choices=[('AL', 'AL'), ('AK', 'AK'), ('AZ', 'AZ'), ('AR', 'AR'), ('CA', 'CA'), ('CO', 'CO'), ('CT', 'CT'), ('DC', 'DC'), ('DE', 'DE'), ('FL', 'FL'), ('GA', 'GA'), ('HI', 'HI'), ('ID', 'ID'), ('IL', 'IL'), ('IN', 'IN'), ('IA', 'IA'), ('KS', 'KS'), ('KY', 'KY'), ('LA', 'LA'), ('ME', 'ME'), ('MD', 'MD'), ('MA', 'MA'), ('MI', 'MI'), ('MN', 'MN'), ('MS', 'MS'), ('MO', 'MO'), ('MT', 'MT'), ('NE', 'NE'), ('NV', 'NV'), ('NH', 'NH'), ('NJ', 'NJ'), ('NM', 'NM'), ('NY', 'NY'), ('NC', 'NC'), ('ND', 'ND'), ('OH', 'OH'), ('OK', 'OK'), ('OR', 'OR'), ('PA', 'PA'), ('RI', 'RI'), ('SC', 'SC'), ('SD', 'SD'), ('TN', 'TN'), ('TX', 'TX'), ('UT', 'UT'), ('VT', 'VT'), ('VA', 'VA'), ('WA', 'WA'), ('WV', 'WV'), ('WI', 'WI'), ('WY', 'WY')], max_length=2)),
('zipcode', models.CharField(max_length=32)),
],
),
migrations.CreateModel(
name='Message',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body', models.TextField()),
],
),
migrations.CreateModel(
name='Payment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('amount', models.DecimalField(decimal_places=2, max_digits=15)),
],
),
migrations.CreateModel(
name='PersonalityQualities',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('friendly', models.BooleanField(default=False)),
('kid_friendly', models.BooleanField(default=False)),
('likes_water', models.BooleanField(default=False)),
('likes_cars', models.BooleanField(default=False)),
('socialized', models.BooleanField(default=False)),
('rescue_animal', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='PhysicalQualities',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('color', models.CharField(max_length=64, null=True)),
('height', models.FloatField(null=True)),
('weight', models.FloatField(null=True)),
('eye_color', models.CharField(max_length=64, null=True)),
('hypoallergenic', models.BooleanField(default=False)),
('shedding', models.BooleanField(default=True)),
],
),
migrations.CreateModel(
name='Shelter',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=256)),
('verified', models.BooleanField(default=False)),
('contact_info', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='matchmaking.ContactInfo')),
('location', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='matchmaking.Location')),
],
),
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('contact_info', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='matchmaking.ContactInfo')),
('location', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='matchmaking.Location')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='payment',
name='shelter',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='matchmaking.Shelter'),
),
migrations.AddField(
model_name='payment',
name='user_profile',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='matchmaking.UserProfile'),
),
migrations.AddField(
model_name='message',
name='user_from',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_from', to='matchmaking.UserProfile'),
),
migrations.AddField(
model_name='message',
name='user_to',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='user_to', to='matchmaking.UserProfile'),
),
migrations.AddField(
model_name='dog',
name='location',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='matchmaking.Location'),
),
migrations.AddField(
model_name='dog',
name='personality',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='matchmaking.PersonalityQualities'),
),
migrations.AddField(
model_name='dog',
name='physical',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='matchmaking.PhysicalQualities'),
),
]
|
987,780 | fb275f40bf805a736eeb5e2887ed87f7b4841eef | # Generated by Django 2.0.2 on 2018-02-27 17:07
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('users', '0009_remove_profile_rsa_n'),
]
operations = [
migrations.CreateModel(
name='UserKeys',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('RSA_public_key', models.IntegerField(help_text='First Favorite Prime', null=True)),
('RSA_private_key', models.IntegerField(help_text='Second Favorite Prime', null=True)),
('RSA_n', models.IntegerField(help_text='First Favorite Prime', null=True)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
987,781 | 071a3fe9179dd63343d507e8a094f61bdd8c6f4c | from pratical6.car import Car
def main():
my_car = Car("audio",180)
my_car.drive(30)
print("fuel =", my_car.fuel)
print("odo =", my_car.odometer)
print(my_car)
limo=Car("Toyota",100)
print("fuel =", limo.fuel)
limo.drive(115)
print("odo= ",limo.odometer)
print(limo)
main()
|
987,782 | c49f1aea07ae3f082e389c64ddb5c8459eb9a318 | import os
import glob
from importlib import import_module
_cameras = {}
def get_camera_list():
"""Obtain the list of available camera drivers."""
cameras = []
modules = glob.glob(os.path.join(os.path.dirname(__file__), '*_camera.py'))
for module in modules:
m = import_module('.' + os.path.basename(module)[:-3], __name__)
cameras.append({'name': os.path.basename(module)[:-10], 'description': m.__doc__})
return cameras
def get_camera(name, *args, **kwargs):
"""Return a camera by name and optional arguments.
If multiple clients request the same camera the same object is returned."""
global _cameras
args_str = ','.join([str(x) for x in args])
kwargs_str = ','.join(['{0}={1}'.format(x, y) for x, y in kwargs.items()])
full_name = name + '/' + args_str + '/' + kwargs_str
if full_name not in _cameras:
mod = import_module('.{0}_camera'.format(name), __name__)
_cameras[full_name] = mod.Camera(*args, **kwargs)
_cameras[full_name].start()
return _cameras[full_name]
|
987,783 | e94c01ed4cadc600d71fb2d9dc2a438ee49f7261 | import requests
import os
from django.core.management.base import BaseCommand, CommandError
from bs4 import BeautifulSoup
homepage = "https://vhost.vn/"
urls = "https://vhost.vn/ten-mien/"
source = "VHost"
def get_dom(url):
page = requests.get(url)
dom = BeautifulSoup(page.text, 'html5lib')
return dom
def get_vn():
dom_origin = get_dom(urls)
mark_origin = dom_origin.find(attrs={"id": "ptiv8s0NDNac"})
mark_origin_content = mark_origin.tbody.contents[7].contents[2]
origin_price = mark_origin_content.text.strip(" VND").replace(",", ".")
sale_price = origin_price
return [origin_price,sale_price]
def get_com():
dom_origin = get_dom(urls)
mark_origin = dom_origin.find(attrs={"id": "ptiv8s0NDNac"})
mark_origin_content = mark_origin.tbody.contents[1].contents[2]
origin_price = mark_origin_content.text.strip(" VND").replace(",", ".")
sale_price = origin_price
return [origin_price,sale_price]
def get_comvn():
dom_origin = get_dom(urls)
mark_origin = dom_origin.find(attrs={"id": "ptiv8s0NDNac"})
mark_origin_content = mark_origin.tbody.contents[8].contents[2]
origin_price = mark_origin_content.text.strip(" VND").replace(",", ".")
sale_price = origin_price
return [origin_price,sale_price]
def get_net():
dom_origin = get_dom(urls)
mark_origin = dom_origin.find(attrs={"id": "ptiv8s0NDNac"})
mark_origin_content = mark_origin.tbody.contents[2].contents[2]
origin_price = mark_origin_content.text.strip(" VND").replace(",", ".")
sale_price = origin_price
return [origin_price,sale_price]
def get_org():
dom_origin = get_dom(urls)
mark_origin = dom_origin.find(attrs={"id": "ptiv8s0NDNac"})
mark_origin_content = mark_origin.tbody.contents[3].contents[2]
origin_price = mark_origin_content.text.strip(" VND").replace(",", ".")
sale_price = origin_price
return [origin_price,sale_price]
def get_info():
dom_origin = get_dom(urls)
mark_origin = dom_origin.find(attrs={"id": "ptiv8s0NDNac"})
mark_origin_content = mark_origin.tbody.contents[5].contents[2]
origin_price = mark_origin_content.text.strip(" VND").replace(",", ".")
sale_price = origin_price
return [origin_price,sale_price]
class Command(BaseCommand):
help = 'Crawl PriceList'
def add_arguments(self, parser):
parser.add_argument('-vn',action='store_true', help='crawl .vn')
parser.add_argument('-comvn',action='store_true', help='crawl .com.vn')
parser.add_argument('-com',action='store_true', help='crawl .com')
parser.add_argument('-net',action='store_true', help='crawl .net')
parser.add_argument('-org',action='store_true', help='crawl .org')
parser.add_argument('-info',action='store_true', help='crawl .info')
def handle(self, *args, **kwargs):
if kwargs['vn']:
print(get_vn())
elif kwargs['comvn']:
print(get_comvn())
elif kwargs['com']:
print(get_com())
elif kwargs['net']:
print(get_net())
elif kwargs['org']:
print(get_org())
elif kwargs['info']:
print(get_info())
else:
print("Invalid options! Please type '-h' for help")
|
987,784 | 2418cebb24c9f158e10843744e493a413f0df3ad | import csv
import os
import sys
import numpy as np
import pandas as pd
import datetime
import time
import inspect
ERROR_CODE = 100
NORMAL_CODE = 0
def getKeyboadInput():
ans = input()
return ans
def execSleep(sec):
time.sleep(sec)
def left(str, amount):
return str[:amount]
def right(str, amount):
return str[-amount:]
def mid(str, offset, amount):
return str[offset:offset+amount]
def max(a,b):
ans = a
if( a < b ):
ans = b
return ans
def min(a,b):
ans = a
if( a > b ):
ans = b
return ans
def cutStrBeforeKey(key,str):
no = patternMatch(key,str)
ans = left( str , no - 1)
return ans
def cutStrAfterKey(key,str):
no = patternMatch(key,str)
no = no + ( len(key) - 1 )
ans = right( str , len(str) - no )
return ans
def patternMatch(key,str):
# 一致する :>0
# 一致しない:=0
ans = str.find(key) + 1
return ans
def judgeError(exit_code):
if exit_code == ERROR_CODE:
print("!!!!ERROR OCCURED!!!!11!!")
sys.exit()
def convA2BinWord(word,a,b):
ans = word.replace(a, b)
return ans
def getScriptDir():
return os.path.abspath(os.path.dirname(__file__))
def getFileNameFromPath(file_path):
return os.path.basename(file_path)
def getDateyyyymmdd():
return str(datetime.date.today())
def getTimeyyyymmddhhmmss():
return str(datetime.datetime.now())
def getTime():
return time.time()
def getElapsedTime(base_time,unit="m"):
elapsed_time = time.time() - base_time
if unit == "m":
elapsed_time = elapsed_time / 60
elif unit == "h":
elapsed_time = elapsed_time / 60 / 60
return elapsed_time
###########################################################
#
# varidation
#
###########################################################
def isNotNull(str):
ans = True
if(str == None):
ans = False
elif(str == ""):
ans = False
return ans
def isNull(str):
ans = False
if(str == None):
ans = True
elif(str == ""):
ans = True
return ans
def isInt(val):
if type(val) is int:
return True
else:
return False
def isStr(val):
if type(val) is str:
return True
else:
return False
def isTuple(target):
if isinstance(target, tuple):
return True
else:
return False
def isList(target):
if isinstance(target, list):
return True
else:
return False
def isEvenNumber(val):
if not type(val) is int:
return False
elif ( val % 2 == 0 ):
return True
else:
return False
def isBigger(smaller,bigger):
if smaller <= bigger:
return True
else:
return False
def isSmaller(smaller,bigger):
if smaller <= bigger:
return True
else:
return False
###########################################################
#
# read and write for csv
#
###########################################################
class CsvWriter():
def __init__(self):
self.file = ""
def openFile(self,file_path):
if isNull(file_path):
echoNullOfAValue(file_path,locals())
return ERROR_CODE
self.file = open( file_path , 'w')
return NORMAL_CODE
def openFileForAdd(self,file_path):
if isNull(file_path):
echoNullOfAValue(file_path,locals())
return ERROR_CODE
if not os.path.exists(file_path):
echoNotExistThatFile(file_path)
return ERROR_CODE
self.file = open( file_path , 'a')
return NORMAL_CODE
def closeFile(self):
if isNull(self.file):
echoOpenAnyFile()
return ERROR_CODE
self.file.close()
def writeOfVal(self,val):
if isNull(self.file):
echoOpenAnyFile()
return ERROR_CODE
self.val_list = []
self.val_list.append(val)
self.writer = csv.writer(self.file, lineterminator='\n')
self.writer.writerow(self.val_list)
return NORMAL_CODE
def writeOfList(self,var_list):
if isNull(self.file):
echoOpenAnyFile()
return ERROR_CODE
self.writer = csv.writer(self.file, lineterminator='\n')
self.writer.writerow(var_list)
return NORMAL_CODE
def writeOfArray2d(self,array_2d):
if isNull(self.file):
echoOpenAnyFile()
return ERROR_CODE
self.writer = csv.writer(self.file, lineterminator='\n')
self.writer.writerows(array_2d)
return NORMAL_CODE
class CsvReader():
def __init__(self):
self.file = ""
self.data = [[]]
def openFile(self,file_path):
if isNull(file_path):
echoNullOfAValue(file_path,locals())
return ERROR_CODE
if not os.path.exists(file_path):
echoNotExistThatFile(file_path)
return ERROR_CODE
self.file = open( file_path , "r")
return NORMAL_CODE
def closeFile(self):
if isNull(self.file):
echoOpenAnyFile()
return ERROR_CODE
self.file.close()
def readFile(self):
if isNull(self.file):
echoOpenAnyFile()
return ERROR_CODE
self.data_list = csv.reader(self.file)
for self.data_tmp in self.data_list:
self.data_str_tmp = str(self.data_tmp)
self.data.append(self.data_str_tmp.split(","))
del self.data[0]
return NORMAL_CODE
def getData(self):
return self.data
class CsvReaderViaNp():
#def __init__(self):
def readFile(self,file_path):
if isNull(self.file):
echoOpenAnyFile()
return ERROR_CODE
self.data = np.genfromtxt(file_path,dtype=None,delimiter=",")
return NORMAL_CODE
def getData(self):
return self.data
def getVarName( var, symboltable=locals(), error=None ) :
ans = "("
for key in symboltable.keys():
# in consideration of exsisting paires of same id variable
if id(symboltable[key]) == id(var) :
ans = ans + " " + key
ans = ans + " )"
return ans
def compareType(val1,val2):
if type(val1) == type(val2):
return True
else:
return False
###########################################################
#
# messages
#
###########################################################
#### layer 1 messages
def echoOpenAnyFile():
print(" open any file ")
def echoNotExistThatFile(file_path):
print(" not exist that file :" + file_path)
def echoNullOfAValue(var,symboltable=locals()):
print(" a value is null :" + getVarName(var,symboltable) )
def echoBlank():
print("")
def echoStart(process=""):
print(str(getTimeyyyymmddhhmmss()) + "\t start process " + process)
def echoBar(length="50",mark="*"):
if not (isInt(length)):
length = 50
bar = ""
for i in range(length):
bar = bar + mark
print(bar)
def echoList1d(x_list):
for row in x_list:
print(row)
#### layer 2 messages
def echoErrorOccured(detail=""):
echoBlank()
echoBar()
print("error is occured !!!!!!!!")
if(detail!=""):
print("\t(detail) " + detail)
echoBar()
echoBlank()
|
987,785 | 34fdc3f0b1c0c9ca6414f0ee296e0230bbc5e935 | # Generated by Django 2.2.2 on 2019-12-22 14:10
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('company', '0001_initial'),
('customer', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Invoice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('invoice_number', models.CharField(max_length=15)),
('date_created', models.DateField(default=django.utils.timezone.now)),
('payment_due', models.DateField()),
('place', models.CharField(blank=True, max_length=20)),
('notes', models.CharField(blank=True, max_length=400)),
('items', django.contrib.postgres.fields.jsonb.JSONField(blank=True)),
('invoice_sum', models.DecimalField(decimal_places=2, max_digits=5)),
('tax_sum', models.DecimalField(decimal_places=2, max_digits=5)),
('full_sum', models.DecimalField(decimal_places=2, max_digits=5)),
('paid_sum', django.contrib.postgres.fields.jsonb.JSONField(blank=True)),
('company', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='company.Company')),
('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='customer.Customer')),
],
),
]
|
987,786 | 1c8cd24b1697f9fe47e6eadcad3f3577826e32fd | def addition(num1, num2):
return num1 + num2
def subtraction(num1, num2):
return num1 - num2
def multiplication(num1, num2):
return num1 * num2
def division(num1, num2):
return num1 / num2
print('Welcome to PyCalc!')
print('Select an option below:')
print('1. Add')
print('2. Subtract')
print('3. Multiply')
print('4. Divide')
operation = input("Enter choice(1/2/3/4): ")
x = input('number 1:')
y = input('number 2:')
if operation == 1:
print('Addition equation=', addition(x,y))
elif operation == 2:
print('Subraction equation=', subtraction(x,y))
elif operation == 3:
print('Multiplication Answer=', multiplication(x,y))
elif operation == 4:
print('Division Answer=', division(x,y))
else:
print('Invalid input') |
987,787 | 1c890ac54dd5ba1218c12d6d01114dfae6ac2491 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 9 16:26:30 2021
@author: mike
"""
import os
import yaml
import numpy as np
import pandas as pd
from hilltoppy import web_service as ws
import requests
from time import sleep
# from sklearn.neighbors import LocalOutlierFactor
pd.options.display.max_columns = 10
###########################################################3
### Parameters
base_path = os.path.realpath(os.path.dirname(__file__))
with open(os.path.join(base_path, 'parameters.yml')) as param:
param = yaml.safe_load(param)
mtypes = param['source']['mtypes']
base_url = param['source']['api_endpoint']
hts = param['source']['hts']
std_factor = param['source']['std_factor']
iqr_factor = param['source']['iqr_factor']
date1 = pd.Timestamp.now().round('s').isoformat()
stats_csv = 'summary_stats_{date}.csv'.format(date=date1)
std_csv = 'std_outliers_{date}.csv'.format(date=date1)
iqr_csv = 'iqr_outliers_{date}.csv'.format(date=date1)
dtl_csv = 'detection_limit_outliers_{date}.csv'.format(date=date1)
min_max_csv = 'min_max_outliers_{date}.csv'.format(date=date1)
###########################################################
### Helper functions
def get_stations(base_url, hts, mtype):
"""
Function to get the stations/sites associated with a particular measurement type.
Parameters
----------
base_url : str
The endpoint url for the Hilltop server.
hts : str
The hts "file" that is added to the end of the base_url.
mtype : str
The measurement type to query.
Returns
-------
DataFrame
"""
stns1 = ws.site_list(base_url, hts, location='LatLong') # There's a problem with Hilltop that requires running the site list without a measurement first...
stns1 = ws.site_list(base_url, hts, location='LatLong', measurement=mtype)
stns2 = stns1[(stns1.lat > -47.5) & (stns1.lat < -34) & (stns1.lon > 166) & (stns1.lon < 179)].dropna().copy()
stns2.rename(columns={'SiteName': 'ref'}, inplace=True)
return stns2
def get_results(base_url, hts, mtype, ref):
"""
Function to get the time series results and associated stats from one or many sites associated with a particular measurement type.
Parameters
----------
base_url : str
The endpoint url for the Hilltop server.
hts : str
The hts "file" that is added to the end of the base_url.
mtype : str
The measurement type to query.
ref : str
The reference id of the site.
Returns
-------
Three DataFrames
results, detection limits, and stats
"""
### Get data
res_list = []
for s in ref:
timer = 5
while timer > 0:
try:
res = ws.get_data(base_url, hts, s, mtype).Value
break
except requests.exceptions.ConnectionError as err:
print(s + ' and ' + mtype + ' error: ' + str(err))
timer = timer - 1
sleep(30)
except ValueError as err:
print(s + ' and ' + mtype + ' error: ' + str(err))
break
except Exception as err:
print(str(err))
timer = timer - 1
sleep(30)
if timer == 0:
raise ValueError('The Hilltop request tried too many times...the server is probably down')
res_list.append(res)
res1 = pd.concat(res_list)
### Process DTLs
dtl1 = res1[res1.str.contains('<')]
dtl1 = pd.to_numeric(dtl1.str.replace('<', '')).to_frame()
dtl1['censored'] = '<'
dtl2 = res1[res1.str.contains('>')]
dtl2 = pd.to_numeric(dtl2.str.replace('>', '')).to_frame()
dtl2['censored'] = '>'
dtl3 = pd.concat([dtl1, dtl2])
### Remove DTLs from results
res2 = res1.loc[~res1.index.isin(dtl3.index)]
res2 = pd.to_numeric(res2, errors='coerce').dropna()
### Run stats
grp1 = res2.reset_index().groupby(['Site', 'Measurement'])
dtl_count = dtl3.reset_index().groupby(['Site', 'Measurement']).Value.count()
dtl_count.name = 'DTL count'
data_count = grp1.Value.count()
total_count = data_count.add(dtl_count, fill_value=0).astype(int)
total_count.name = 'total count'
mean1 = grp1.Value.mean().round(3)
mean1.name = 'mean'
median1 = grp1.Value.median().round(3)
median1.name = 'median'
max1 = grp1.Value.max().round(3)
max1.name = 'max'
min1 = grp1.Value.min().round(3)
min1.name = 'min'
q1 = grp1.Value.quantile(0.25).round(3)
q1.name = 'Q1'
q3 = grp1.Value.quantile(0.75).round(3)
q3.name = 'Q3'
std1 = grp1.Value.std().round(3)
std1.name = 'standard deviation'
from_date = grp1['DateTime'].min()
from_date.name = 'start date'
to_date = grp1['DateTime'].max()
to_date.name = 'end date'
### Make stats df
stats_df1 = pd.concat([total_count, dtl_count, from_date, to_date, min1, q1, median1, mean1, q3, max1, std1], axis=1)
### return
return res2, dtl3, stats_df1
def std_outliers(res, stats, factor):
"""
Function to assess outliers according to the number of standard deviations from the mean.
Parameters
----------
res : DataFrame
the time series results from the get_results function.
stats : DataFrame
the stats results from the get_results function.
factor : int, float
The number of standard deviations to use.
Returns
-------
DataFrame
"""
col_name1 = 'mean + std*' + str(factor)
std1 = (stats['mean'] + (stats['standard deviation']*factor))
std1.name = col_name1
col_name2 = 'mean - std*' + str(factor)
std2 = (stats['mean'] - (stats['standard deviation']*factor))
std2.name = col_name2
std2.loc[std2 < 0] = 0
std = pd.concat([std1, std2], axis=1)
data1 = pd.merge(res.reset_index(), std.reset_index(), on=['Site', 'Measurement'])
data2 = data1[data1['Value'] > data1[col_name1]]
data3 = data1[data1['Value'] < data1[col_name2]]
data4 = pd.concat([data2, data3])
return data4
def iqr_outliers(res, stats, factor):
"""
Function to assess outliers according to the number of interquartile ranges (IQR) from the 3rd quartile.
Parameters
----------
res : DataFrame
the time series results from the get_results function.
stats : DataFrame
the stats results from the get_results function.
factor : int, float
The number of IQRs to use.
Returns
-------
DataFrame
"""
col_name1 = 'Q3 + IQR*' + str(factor)
std1 = (stats['Q3'] + (stats['Q3'] - stats['Q1'])*factor)
std1.name = col_name1
col_name2 = 'Q3 - IQR*' + str(factor)
std2 = (stats['Q3'] - (stats['Q3'] - stats['Q1'])*factor)
std2.name = col_name2
std2.loc[std2 < 0] = 0
std = pd.concat([std1, std2], axis=1)
data1 = pd.merge(res.reset_index(), std.reset_index(), on=['Site', 'Measurement'])
data2 = data1[data1['Value'] > data1[col_name1]]
data3 = data1[data1['Value'] < data1[col_name2]]
data4 = pd.concat([data2, data3])
return data4
def dtl_outliers(res, dtl):
"""
Function to assess outliers according using the logged detection limits from the samples.
Parameters
----------
res : DataFrame
the time series results from the get_results function.
dtl : DataFrame
the dtl results from the get_results function.
Returns
-------
DataFrame
"""
col_name = 'detection limit'
lt1 = dtl[dtl['censored'] == '<'].Value
lt1.name = col_name
lt1a = lt1.reset_index().groupby(['Site', 'Measurement'])[col_name].min()
lt2 = pd.merge(res.reset_index(), lt1a.reset_index(), on=['Site', 'Measurement'])
lt3 = lt2[lt2['Value'] < lt2[col_name]].copy()
lt3['censored'] = '<'
gt1 = dtl[dtl['censored'] == '>'].Value
gt1.name = col_name
gt1a = gt1.reset_index().groupby(['Site', 'Measurement'])[col_name].max()
gt2 = pd.merge(res.reset_index(), gt1a.reset_index(), on=['Site', 'Measurement'])
gt3 = gt2[gt2['Value'] > gt2[col_name]].copy()
gt3['censored'] = '>'
dtl2 = pd.concat([lt3, gt3])
return dtl2
def min_max_outliers(res, min=None, max=None):
"""
Function to assess outliers according global minimum and maximum values.
Parameters
----------
res : DataFrame
the time series results from the get_results function.
min : int, float
The minimum value.
max : int, float
The maximum value.
Returns
-------
DataFrame
"""
min_max_list = []
if isinstance(min, (int, float)):
data1 = res[res < min].reset_index()
data1['limit type'] = 'minimum'
data1['limit'] = min
min_max_list.append(data1)
if isinstance(max, (int, float)):
data1 = res[res > max].reset_index()
data1['limit type'] = 'maximum'
data1['limit'] = max
min_max_list.append(data1)
min_max1 = pd.concat(min_max_list)
return min_max1
############################################
### The processing
std_list = []
iqr_list = []
dtl_list = []
min_max_list = []
stats_list = []
for mtype, limits in mtypes.items():
print(mtype)
## Get the sites
sites1 = get_stations(base_url, hts, mtype)
## Get the results
res1, dtl1, stats1 = get_results(base_url, hts, mtype, sites1.ref.tolist())
## std
std_out1 = std_outliers(res1, stats1, std_factor)
## STD
iqr_out1 = iqr_outliers(res1, stats1, iqr_factor)
## DTL
dtl_out1 = dtl_outliers(res1, dtl1)
## min/max
min_max_out1 = min_max_outliers(res1, **limits)
## Package up results
stats_list.append(stats1)
std_list.append(std_out1)
iqr_list.append(iqr_out1)
dtl_list.append(dtl_out1)
min_max_list.append(min_max_out1)
### Combine all results
stats = pd.concat(stats_list)
std_out = pd.concat(std_list)
iqr_out = pd.concat(iqr_list)
dtl_out = pd.concat(dtl_list)
min_max_out = pd.concat(min_max_list)
#############################################################
### Save results
print('Saving results...')
if not os.path.exists(os.path.join(base_path, 'results')):
os.mkdir(os.path.join(base_path, 'results'))
stats.to_csv(os.path.join(base_path, 'results', stats_csv))
std_out.to_csv(os.path.join(base_path, 'results', std_csv))
iqr_out.to_csv(os.path.join(base_path, 'results', iqr_csv))
dtl_out.to_csv(os.path.join(base_path, 'results', dtl_csv))
min_max_out.to_csv(os.path.join(base_path, 'results', min_max_csv))
|
987,788 | 64db285dc9150a38438e857f06e3749845df5203 | # coding: utf-8
"""
"""
__version__ = "0.0.1"
from .helpers import *
|
987,789 | 81878ee1e63609af976d2855dcc470df4e7186f4 | import HTTPCall
class TravelSeasonality(object):
def __init__(self):
self.HandleREST = HTTPCall.HTTPCall()
self.HandleREST.request_authentication()
self.tasks = {}
def call(self):
self.response = self.HandleREST.request_content( '/v1/historical/flights/JFK/seasonality ' + \
'&'.join([task[0] for task in self.tasks.values() if task[1]]))
# Return JSON content
return self.response
|
987,790 | 8b4da3e743b04bb1afde616ce49e6a86f06af78d | import sunck
'''
__name__属性:
模块就是一个可执行的.py文件,一个模块被另一个程序引用。我们不想让模块中的某些代码执行,可以用__name__属性来使程序仅调用模块中的一部分
'''
sunck.sayGood()
|
987,791 | 2fe002744e91244a4b6ae8499ea510e6842fd70a | V, X = map(float, input().split())
print('Yes' if X == V or X > V else 'No') |
987,792 | c53a76c33f0f182d7930b5be542e4d0bc2931e3f | <<<<<<< HEAD
=======
#Given an unsorted array arr and a number n, delete all numbers in arr that are greater than n.
>>>>>>> 24cff50b7d516788ae3fadf06fe003c15d118850
def removeLarger(arr, n):
for i in arr:
if i > n:
arr.remove(i)
return arr
list = [3, 4, 5, 6, 8, 9, 10, 452, 65, 2452, 75, 3424, 24, 563, 32, 445, 76, 87, 90, 34, 49, 12, 34]
a = removeLarger(list, 0)
<<<<<<< HEAD
print(a)
=======
print(a)
>>>>>>> 24cff50b7d516788ae3fadf06fe003c15d118850
|
987,793 | 2bfb879dc266be5e21bd545b10a7986ad52c7598 | import subprocess
from time import sleep
while True:
subprocess.Popen('notepad') #use artificial multithreading
subprocess.Popen('calc') #use artificial multithreading
subprocess.Popen('mspaint') #use artificial multithreading
subprocess.Popen('explorer') #use artificial multithreading
subprocess.Popen('write') #use artificial multithreading
sleep(.05)
|
987,794 | b61ee1534cb5468ff33b50b2e5895fd3e9b90750 | # [Classic]
# https://leetcode.com/problems/valid-number/
# 65. Valid Number
# History:
# Facebook
# 1.
# Apr 3, 2020
# 2.
# May 12, 2020
# Validate if a given string can be interpreted as a decimal number.
#
# Some examples:
# "0" => true
# " 0.1 " => true
# "abc" => false
# "1 a" => false
# "2e10" => true
# " -90e3 " => true
# " 1e" => false
# "e3" => false
# " 6e-1" => true
# " 99e2.5 " => false
# "53.5e93" => true
# " --6 " => false
# "-+3" => false
# "95a54e53" => false
#
# Note: It is intended for the problem statement to be ambiguous. You should gather all
# requirements up front before implementing one. However, here is a list of characters that can
# be in a valid decimal number:
#
# Numbers 0-9
# Exponent - "e"
# Positive/negative sign - "+"/"-"
# Decimal point - "."
# Of course, the context of these characters also matters in the input.
#
# Update (2015-02-10):
# The signature of the C++ function had been updated. If you still see your function signature
# accepts a const char * argument, please click the reload button to reset your code definition.
class Solution(object):
def isNumber(self, s):
"""
:type s: str
:rtype: bool
"""
s = s.strip()
e_seen = False
num_seen = False
dot_seen = False
num_after_e = False
for idx, c in enumerate(s):
if c.isdigit():
num_seen = True
if e_seen:
num_after_e = True
elif c == 'e':
if e_seen or not num_seen:
return False
e_seen = True
elif c in {'+', '-'}:
if idx != 0 and s[idx - 1] != 'e':
return False
elif c == '.':
if dot_seen or e_seen:
return False
dot_seen = True
else:
return False
return num_seen and (not e_seen or num_after_e)
|
987,795 | 639c11001d30363b7c0c9dcde7abb84d0e989ff0 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from neutron.api.rpc.agentnotifiers import dhcp_rpc_agent_api
from neutron.common import constants as const
from neutron.common import exceptions as n_exc
from neutron.extensions import l3
from neutron.extensions import securitygroup as ext_sg
from neutron import manager
from neutron.notifiers import nova
from neutron.openstack.common import log as logging
from neutron.plugins.common import constants as pconst
from oslo.config import cfg
from gbpservice.common import utils
from gbpservice.neutron.extensions import servicechain as sc_ext
from gbpservice.neutron.services.grouppolicy.common import exceptions as exc
LOG = logging.getLogger(__name__)
class LocalAPI(object):
"""API for interacting with the neutron Plugins directly."""
@property
def _nova_notifier(self):
return nova.Notifier()
@property
def _core_plugin(self):
# REVISIT(rkukura): Need initialization method after all
# plugins are loaded to grab and store plugin.
return manager.NeutronManager.get_plugin()
@property
def _l3_plugin(self):
# REVISIT(rkukura): Need initialization method after all
# plugins are loaded to grab and store plugin.
plugins = manager.NeutronManager.get_service_plugins()
l3_plugin = plugins.get(pconst.L3_ROUTER_NAT)
if not l3_plugin:
LOG.error(_("No L3 router service plugin found."))
raise exc.GroupPolicyDeploymentError()
return l3_plugin
@property
def _servicechain_plugin(self):
# REVISIT(rkukura): Need initialization method after all
# plugins are loaded to grab and store plugin.
plugins = manager.NeutronManager.get_service_plugins()
servicechain_plugin = plugins.get(pconst.SERVICECHAIN)
if not servicechain_plugin:
LOG.error(_("No Servicechain service plugin found."))
raise exc.GroupPolicyDeploymentError()
return servicechain_plugin
@property
def _dhcp_agent_notifier(self):
# REVISIT(rkukura): Need initialization method after all
# plugins are loaded to grab and store notifier.
if not self._cached_agent_notifier:
agent_notifiers = getattr(self._core_plugin, 'agent_notifiers', {})
self._cached_agent_notifier = (
agent_notifiers.get(const.AGENT_TYPE_DHCP) or
dhcp_rpc_agent_api.DhcpAgentNotifyAPI())
return self._cached_agent_notifier
def _create_resource(self, plugin, context, resource, attrs):
# REVISIT(rkukura): Do create.start notification?
# REVISIT(rkukura): Check authorization?
# REVISIT(rkukura): Do quota?
with utils.clean_session(context.session):
action = 'create_' + resource
obj_creator = getattr(plugin, action)
obj = obj_creator(context, {resource: attrs})
self._nova_notifier.send_network_change(action, {},
{resource: obj})
# REVISIT(rkukura): Do create.end notification?
if cfg.CONF.dhcp_agent_notification:
self._dhcp_agent_notifier.notify(context,
{resource: obj},
resource + '.create.end')
return obj
def _update_resource(self, plugin, context, resource, resource_id, attrs):
# REVISIT(rkukura): Do update.start notification?
# REVISIT(rkukura): Check authorization?
with utils.clean_session(context.session):
obj_getter = getattr(plugin, 'get_' + resource)
orig_obj = obj_getter(context, resource_id)
action = 'update_' + resource
obj_updater = getattr(plugin, action)
obj = obj_updater(context, resource_id, {resource: attrs})
self._nova_notifier.send_network_change(action, orig_obj,
{resource: obj})
# REVISIT(rkukura): Do update.end notification?
if cfg.CONF.dhcp_agent_notification:
self._dhcp_agent_notifier.notify(context,
{resource: obj},
resource + '.update.end')
return obj
def _delete_resource(self, plugin, context, resource, resource_id):
# REVISIT(rkukura): Do delete.start notification?
# REVISIT(rkukura): Check authorization?
with utils.clean_session(context.session):
obj_getter = getattr(plugin, 'get_' + resource)
obj = obj_getter(context, resource_id)
action = 'delete_' + resource
obj_deleter = getattr(plugin, action)
obj_deleter(context, resource_id)
self._nova_notifier.send_network_change(action, {},
{resource: obj})
# REVISIT(rkukura): Do delete.end notification?
if cfg.CONF.dhcp_agent_notification:
self._dhcp_agent_notifier.notify(context,
{resource: obj},
resource + '.delete.end')
def _get_resource(self, plugin, context, resource, resource_id):
with utils.clean_session(context.session):
obj_getter = getattr(plugin, 'get_' + resource)
obj = obj_getter(context, resource_id)
return obj
def _get_resources(self, plugin, context, resource, filters=None):
with utils.clean_session(context.session):
obj_getter = getattr(plugin, 'get_' + resource + 's')
obj = obj_getter(context, filters)
return obj
# The following methods perform the necessary subset of
# functionality from neutron.api.v2.base.Controller.
#
# REVISIT(rkukura): Can we just use the WSGI Controller? Using
# neutronclient is also a possibility, but presents significant
# issues to unit testing as well as overhead and failure modes.
def _get_port(self, plugin_context, port_id):
return self._get_resource(self._core_plugin, plugin_context, 'port',
port_id)
def _get_ports(self, plugin_context, filters=None):
filters = filters or {}
return self._get_resources(self._core_plugin, plugin_context, 'port',
filters)
def _create_port(self, plugin_context, attrs):
return self._create_resource(self._core_plugin, plugin_context, 'port',
attrs)
def _update_port(self, plugin_context, port_id, attrs):
return self._update_resource(self._core_plugin, plugin_context, 'port',
port_id, attrs)
def _delete_port(self, plugin_context, port_id):
try:
self._delete_resource(self._core_plugin,
plugin_context, 'port', port_id)
except n_exc.PortNotFound:
LOG.warn(_('Port %s already deleted'), port_id)
def _get_subnet(self, plugin_context, subnet_id):
return self._get_resource(self._core_plugin, plugin_context, 'subnet',
subnet_id)
def _get_subnets(self, plugin_context, filters=None):
filters = filters or {}
return self._get_resources(self._core_plugin, plugin_context, 'subnet',
filters)
def _create_subnet(self, plugin_context, attrs):
return self._create_resource(self._core_plugin, plugin_context,
'subnet', attrs)
def _update_subnet(self, plugin_context, subnet_id, attrs):
return self._update_resource(self._core_plugin, plugin_context,
'subnet', subnet_id, attrs)
def _delete_subnet(self, plugin_context, subnet_id):
try:
self._delete_resource(self._core_plugin, plugin_context, 'subnet',
subnet_id)
except n_exc.SubnetNotFound:
LOG.warn(_('Subnet %s already deleted'), subnet_id)
def _get_network(self, plugin_context, network_id):
return self._get_resource(self._core_plugin, plugin_context, 'network',
network_id)
def _get_networks(self, plugin_context, filters=None):
filters = filters or {}
return self._get_resources(
self._core_plugin, plugin_context, 'network', filters)
def _create_network(self, plugin_context, attrs):
return self._create_resource(self._core_plugin, plugin_context,
'network', attrs)
def _delete_network(self, plugin_context, network_id):
try:
self._delete_resource(self._core_plugin, plugin_context,
'network', network_id)
except n_exc.NetworkNotFound:
LOG.warn(_('Network %s already deleted'), network_id)
def _get_router(self, plugin_context, router_id):
return self._get_resource(self._l3_plugin, plugin_context, 'router',
router_id)
def _get_routers(self, plugin_context, filters=None):
filters = filters or {}
return self._get_resources(self._l3_plugin, plugin_context, 'router',
filters)
def _create_router(self, plugin_context, attrs):
return self._create_resource(self._l3_plugin, plugin_context, 'router',
attrs)
def _update_router(self, plugin_context, router_id, attrs):
return self._update_resource(self._l3_plugin, plugin_context, 'router',
router_id, attrs)
def _add_router_interface(self, plugin_context, router_id, interface_info):
self._l3_plugin.add_router_interface(plugin_context,
router_id, interface_info)
def _remove_router_interface(self, plugin_context, router_id,
interface_info):
self._l3_plugin.remove_router_interface(plugin_context, router_id,
interface_info)
def _add_router_gw_interface(self, plugin_context, router_id, gw_info):
return self._l3_plugin.update_router(
plugin_context, router_id,
{'router': {'external_gateway_info': gw_info}})
def _remove_router_gw_interface(self, plugin_context, router_id,
interface_info):
self._l3_plugin.update_router(
plugin_context, router_id,
{'router': {'external_gateway_info': None}})
def _delete_router(self, plugin_context, router_id):
try:
self._delete_resource(self._l3_plugin, plugin_context, 'router',
router_id)
except l3.RouterNotFound:
LOG.warn(_('Router %s already deleted'), router_id)
def _get_sg(self, plugin_context, sg_id):
return self._get_resource(
self._core_plugin, plugin_context, 'security_group', sg_id)
def _get_sgs(self, plugin_context, filters=None):
filters = filters or {}
return self._get_resources(
self._core_plugin, plugin_context, 'security_group', filters)
def _create_sg(self, plugin_context, attrs):
return self._create_resource(self._core_plugin, plugin_context,
'security_group', attrs)
def _update_sg(self, plugin_context, sg_id, attrs):
return self._update_resource(self._core_plugin, plugin_context,
'security_group', sg_id, attrs)
def _delete_sg(self, plugin_context, sg_id):
try:
self._delete_resource(self._core_plugin, plugin_context,
'security_group', sg_id)
except ext_sg.SecurityGroupNotFound:
LOG.warn(_('Security Group %s already deleted'), sg_id)
def _get_sg_rule(self, plugin_context, sg_rule_id):
return self._get_resource(
self._core_plugin, plugin_context, 'security_group_rule',
sg_rule_id)
def _get_sg_rules(self, plugin_context, filters=None):
filters = filters or {}
return self._get_resources(
self._core_plugin, plugin_context, 'security_group_rule', filters)
def _create_sg_rule(self, plugin_context, attrs):
try:
return self._create_resource(self._core_plugin, plugin_context,
'security_group_rule', attrs)
except ext_sg.SecurityGroupRuleExists as ex:
LOG.warn(_('Security Group already exists %s'), ex.message)
return
def _update_sg_rule(self, plugin_context, sg_rule_id, attrs):
return self._update_resource(self._core_plugin, plugin_context,
'security_group_rule', sg_rule_id,
attrs)
def _delete_sg_rule(self, plugin_context, sg_rule_id):
try:
self._delete_resource(self._core_plugin, plugin_context,
'security_group_rule', sg_rule_id)
except ext_sg.SecurityGroupRuleNotFound:
LOG.warn(_('Security Group Rule %s already deleted'), sg_rule_id)
def _get_fip(self, plugin_context, fip_id):
return self._get_resource(
self._l3_plugin, plugin_context, 'floatingip', fip_id)
def _get_fips(self, plugin_context, filters=None):
filters = filters or {}
return self._get_resources(
self._l3_plugin, plugin_context, 'floatingip', filters)
def _create_fip(self, plugin_context, attrs):
return self._create_resource(self._l3_plugin, plugin_context,
'floatingip', attrs)
def _update_fip(self, plugin_context, fip_id, attrs):
return self._update_resource(self._l3_plugin, plugin_context,
'floatingip', fip_id, attrs)
def _delete_fip(self, plugin_context, fip_id):
try:
self._delete_resource(self._l3_plugin, plugin_context,
'floatingip', fip_id)
except l3.FloatingIPNotFound:
LOG.warn(_('Floating IP %s Already deleted'), fip_id)
def _get_servicechain_instance(self, plugin_context, sci_id):
return self._get_resource(
self._servicechain_plugin, plugin_context, 'servicechain_instance',
sci_id)
def _get_servicechain_instances(self, plugin_context, filters=None):
filters = filters or {}
return self._get_resources(
self._servicechain_plugin, plugin_context, 'servicechain_instance',
filters)
def _create_servicechain_instance(self, plugin_context, attrs):
return self._create_resource(
self._servicechain_plugin, plugin_context,
'servicechain_instance', attrs)
def _update_servicechain_instance(self, plugin_context, sci_id, attrs):
return self._update_resource(self._servicechain_plugin, plugin_context,
'servicechain_instance', sci_id, attrs)
def _delete_servicechain_instance(self, context, sci_id):
try:
self._delete_resource(self._servicechain_plugin,
context._plugin_context,
'servicechain_instance', sci_id)
except sc_ext.ServiceChainInstanceNotFound:
# SC could have been already deleted
LOG.warn(_("servicechain %s already deleted"), sci_id)
def _get_servicechain_spec(self, plugin_context, scs_id):
return self._get_resource(
self._servicechain_plugin, plugin_context, 'servicechain_spec',
scs_id)
def _get_servicechain_specs(self, plugin_context, filters=None):
filters = filters or {}
return self._get_resources(
self._servicechain_plugin, plugin_context, 'servicechain_spec',
filters)
def _create_servicechain_spec(self, plugin_context, attrs):
return self._create_resource(
self._servicechain_plugin, plugin_context,
'servicechain_spec', attrs)
def _update_servicechain_spec(self, plugin_context, scs_id, attrs):
return self._update_resource(self._servicechain_plugin, plugin_context,
'servicechain_spec', scs_id, attrs)
def _delete_servicechain_spec(self, context, scs_id):
try:
self._delete_resource(self._servicechain_plugin,
context._plugin_context,
'servicechain_spec', scs_id)
except sc_ext.ServiceChainSpecNotFound:
# SC could have been already deleted
LOG.warn(_("servicechain spec %s already deleted"), scs_id)
|
987,796 | 0d65af61199f5e632b6a22d10a3d64bdcacf0a3e | schema = {
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Example Schema",
"type": "object",
"properties": {
"firstName": {
"type": "string"
},
"lastName": {
"type": "string"
},
"age": {
"description": "Age in years",
"type": "integer",
"minimum": 0
}
},
"required": ["firstName", "lastName"]
}
class User(object):
firstName: "",
lastName: "",
age: 0,
email: "",
username: "",
password: ""
|
987,797 | e236f00280c21171a5307dbdad22f504fd20dd5b | # Generated by Django 3.2.8 on 2021-10-18 14:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rooms', '0002_auto_20191216_0937'),
('users', '0002_alter_user_avatar_alter_user_first_name'),
]
operations = [
migrations.AlterField(
model_name='user',
name='favs',
field=models.ManyToManyField(blank=True, related_name='favs', to='rooms.Room'),
),
]
|
987,798 | 9d5cd86bd4deb3007a7ad3214e1bb3c2bf5cbb70 | from django.apps import AppConfig
class GamepickerConfig(AppConfig):
name = 'gamepicker'
|
987,799 | 3a0370fca1faece049455f051da636d21c87ddda | from .models import Event
from django.utils import timezone
def next_events(request):
events = Event.objects.filter(
start_date__gte=timezone.now()).order_by('start_date')[:5]
return {'events': events} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.