index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
994,100 | bc18a228393c5e94b5de50e2b890cea60e5574cb | from django.db import models
from moderator.models import BaseTimestamp
class Doctor(BaseTimestamp):
"""
Doctor model with OneToOne relation with custom user model.
"""
user = models.OneToOneField("accounts.user", on_delete=models.CASCADE)
specialization = models.CharField(max_length=64)
info = models.TextField()
class Meta:
ordering = ['created_at']
def __str__ (self):
"""
Return doctor's email.
"""
return self.user.email
|
994,101 | 455e83ce0816270521c066a6ddb8fa36f486c671 | import csv
with open("data.csv") as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',', quotechar='"')
for row in csv_reader:
print(row[0])
import json
with open("data.json") as json_file:
data = json.load(json_file)
print(data) |
994,102 | 6d8f2189d7b92b01336ee9d4ff87430db7d0e89e | import datetime
import re
import socket
from _thread import start_new_thread
import config
import db
import utils
infobool = False
def main():
s = socket.socket() # установка соединения
utils.connection(s) # -//-
print(s)
file = open('log.txt', 'a') # запись в лог даты текущего запуска
file.write('\n\n' + datetime.datetime.now().strftime('%d:%m:%Y-[%I h:%M m: %S s]') + '\n')
file.close()
# utils.mess(s, "Hello") #первоначальное приветствие
chat_message = re.compile(r"^:\w+!\w+@\w+\.tmi\.twitch\.tv PRIVMSG #\w+ :")
start_new_thread(utils.fillOpList, ()) # проверка списка присутствующих модераторов
# start_new_thread(utils.info, (s, 600)) #спам инфой
while True: # пропуск "интро" твича
line = str(s.recv(1024))
if "End of /NAMES list" in line:
break
while True: # основное тело. здесь бот получает сообщения из чата и посылает по функциям для обработки
try:
response = s.recv(1024).decode() # получение пакета с сообщением
# print(response)
except Exception:
print('smth wrong happened')
if response == "PING :tmi.twitch.tv\r\n": # отвечает на проверку пингом, чтобы не кикнуло с сервера
s.send("PONG :tmi.twitch.tv\r\n".encode())
print("PONG SENT")
else:
try:
username = re.search(r"\w+", response).group(0) # определение никнейма
message = chat_message.sub("", response) # определение самого сообщения
message = message.strip() # обработка сообщения
with open('log.txt', 'a') as f:
f.write('MSG### ' + username + ': ' + message + '\n')
print('MSG### ' + username + ': ' + message) # вывод сообщения в лог(консоль)
utils.isCommand(s, username, message) # проверка на наличие в сообщении какой-нибудь команды
except Exception as msg: # ловит ошибки и записывает их в лог для дальнейшего дебага
print('#ERROR:', msg)
with open('log.txt', 'a') as f:
f.write('#ERROR: ' + str(msg) + '\n')
if __name__ == "__main__":
main()
|
994,103 | d8d8229a99eeea28295c91c3d39e5d2413b81803 | import cv2
import os
import time
import numpy as np
from keras import backend as K
from keras.models import load_model
from yad2k.models.keras_yolo import yolo_eval, yolo_head
class YOLO(object):
def __init__(self):
self.model_path = 'model_data/yolo.h5'
self.anchors_path = 'model_data/yolo_anchors.txt'
self.classes_path = 'model_data/coco_classes.txt'
self.score = 0.3
self.iou = 0.5
self.class_names = self._get_class()
self.anchors = self._get_anchors()
self.sess = K.get_session()
self.boxes, self.scores, self.classes = self.generate()
def _get_class(self):
classes_path = os.path.expanduser(self.classes_path)
with open(classes_path) as f:
class_names = f.readlines()
class_names = [c.strip() for c in class_names]
return class_names
def _get_anchors(self):
anchors_path = os.path.expanduser(self.anchors_path)
with open(anchors_path) as f:
anchors = f.readline()
anchors = [float(x) for x in anchors.split(',')]
anchors = np.array(anchors).reshape(-1, 2)
return anchors
def generate(self):
model_path = os.path.expanduser(self.model_path)
assert model_path.endswith('.h5'), 'Keras model must be a .h5 file.'
self.yolo_model = load_model(model_path)
# Verify model, anchors, and classes are compatible
num_classes = len(self.class_names)
num_anchors = len(self.anchors) # TODO: Assumes dim ordering is channel last
model_output_channels = self.yolo_model.layers[-1].output_shape[-1]
assert model_output_channels == num_anchors * (num_classes + 5),'Mismatch between model and given anchor and class sizes'
print('{} model, anchors, and classes loaded.'.format(model_path))
# Check if model is fully convolutional, assuming channel last order.
self.model_image_size = self.yolo_model.layers[0].input_shape[1:3]
self.is_fixed_size = self.model_image_size != (None, None) # Generate output tensor targets for filtered bounding boxes.
# TODO: Wrap these backend operations with Keras layers.
yolo_outputs = yolo_head(self.yolo_model.output, self.anchors, len(self.class_names))
self.input_image_shape = K.placeholder(shape=(2, ))
boxes, scores, classes = yolo_eval(yolo_outputs, self.input_image_shape, score_threshold=self.score, iou_threshold=self.iou)
return boxes, scores, classes
def detect_image(self, image):
start = time.time()
y, x, _ = image.shape
if self.is_fixed_size:
# TODO: When resizing we can use minibatch input.
resized_image = cv2.resize(image, tuple(reversed(self.model_image_size)), interpolation=cv2.INTER_CUBIC)
image_data = np.array(resized_image, dtype='float32')
else:
image_data = np.array(image, dtype='float32')
image_data /= 255.
image_data = np.expand_dims(image_data, 0)
# Add batch dimension.
out_boxes, out_scores,out_classes = self.sess.run([self.boxes, self.scores, self.classes], feed_dict={ self.yolo_model.input: image_data, self.input_image_shape: [image.shape[0], image.shape[1]], K.learning_phase(): 0 })
print('Found {} boxes for {}'.format(len(out_boxes), 'img'))
for i, c in reversed(list(enumerate(out_classes))):
predicted_class = self.class_names[c]
box = out_boxes[i]
score = out_scores[i]
label = '{} {:.2f}'.format(predicted_class, score)
top, left, bottom, right = box
top = max(0, np.floor(top + 0.5).astype('int32'))
left = max(0, np.floor(left + 0.5).astype('int32'))
bottom = min(y, np.floor(bottom + 0.5).astype('int32'))
right = min(x, np.floor(right + 0.5).astype('int32'))
print(label, (left, top), (right, bottom))
cv2.rectangle(image, (left, top), (right, bottom), (255, 0, 0), 2)
cv2.putText(image, label, (left, int(top - 4)), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 1, cv2.LINE_AA)
end = time.time()
print(end - start)
return image
def close_session(self):
self.sess.close()
def detect_vedio(video, yolo):
camera = cv2.VideoCapture(video)
cv2.namedWindow("detection", cv2.WINDOW_NORMAL)
while True:
res, frame = camera.read()
if not res:
break
image = yolo.detect_image(frame)
cv2.imshow("detection", image)
if cv2.waitKey(110) & 0xff == 27:
break
camera.release()
cv2.destroyAllWindows()
yolo.close_session()
def detect_img(img, yolo):
image = cv2.imread(img)
r_image = yolo.detect_image(image)
cv2.namedWindow("detection")
while True:
cv2.imshow("detection", r_image)
if cv2.waitKey(110) & 0xff == 27:
break
yolo.close_session()
if __name__ == '__main__':
yolo = YOLO()
#img = 'E:\Documents\Downloads\YAD2K-master\YAD2K-master\images\horses.jpg'
video = './1234.avi'
#detect_img(img, yolo)
detect_vedio(video, yolo)
|
994,104 | 57151c2867ff07aa1d9002d2491dca17fe188299 | from rest_framework import serializers
from fees.models import conductedFees
class conductedFeesSerializer(serializers.ModelSerializer):
class Meta():
model = conductedFees
fields = ('user','room', 'totalFees','adminFees')
lookup_field = 'user'
extra_kwargs = {
'url': {'lookup_field': 'user'}
}
|
994,105 | 5e422d28ecafdedc046ad5b35934dd212173e324 | import json
from steppygraph.machine import Branch, Parallel
from steppygraph.states import Choice, ChoiceCase, Comparison, ComparisonType, Task, StateType, to_serializable, \
Pass, Catcher, ErrorType, State, BatchJob, EcsTask
from steppygraph.states import Resource, ResourceType
from steppygraph.test.testutils import read_json_test_case, write_json_test_case
def test_state_to_str():
assert str(StateType.CHOICE) == "Choice"
def test_comparison_type_to_str():
assert str(ComparisonType.BOOLEAN_EQ) == "BooleanEquals"
def test_resource_to_json():
res = Resource(name="foo-trigger", type=ResourceType.LAMBDA,
aws_ac=1234, region="eu-west-1")
assert json.dumps(res,
default=to_serializable) == '"arn:aws:lambda:eu-west-1:1234:function:foo-trigger"'
def test_lambda_task_to_json():
assert json.dumps(Task(name="sdfdsf", resource=Resource(type=ResourceType.LAMBDA, name="trigger")),
default=to_serializable) == \
"""{"Type": "Task", "Resource": "arn:aws:lambda:::function:trigger", "TimeoutSeconds": 600}"""
def test_batch_task_to_json():
assert json.dumps(BatchJob(name="foo", definition="fooDef", queue="fooQueue", parameters="$.batchJob"),
default=to_serializable) == \
"""{"Type": "Task", "Resource": "arn:aws:states:::batch:submitJob.sync", "TimeoutSeconds": 600, """ + \
""""Parameters": {"JobDefinition": "fooDef", "JobName": "foo", "JobQueue": "fooQueue", "Parameters.$": "$.batchJob"}}"""
def test_ecs_task_to_json():
assert json.dumps(EcsTask(name="foo", cluster="cluster_arn", definition="task_definition", launch_type="FARGATE"),
default=to_serializable) == \
"""{"Type": "Task", "Resource": "arn:aws:states:::ecs:runTask.sync", "TimeoutSeconds": 600, """ + \
""""Parameters": {"Cluster": "cluster_arn", "LaunchType": "FARGATE", "TaskDefinition": "task_definition", "NetworkConfiguration": {}, "Overrides": {}}}"""
def test_choice_case_to_json():
assert json.dumps(ChoiceCase("$.foo.field",
comparison=Comparison(
ComparisonType.BOOLEAN_EQ, value=True),
next=Pass(name="thisistheend")),
default=to_serializable) == \
"""{"Variable": "$.foo.field", "Next": "thisistheend", "BooleanEquals": true}"""
def test_choice_to_json():
t = Task(
name="endstate",
resource=Resource(name="foo-trigger", type=ResourceType.LAMBDA)
)
choices = [ChoiceCase(variable="Foovar",
comparison=Comparison(
comparison_type=ComparisonType.BOOLEAN_EQ, value=True),
next=t)
]
c = Choice(name="Foochoice", choices=choices, default=t)
assert json.dumps(c,
default=to_serializable) == \
"""{"Type": "Choice", "Choices": [{"Variable": "Foovar", "Next": "endstate", "BooleanEquals": true}], "Default": "endstate"}"""
def test_parallel():
t = Task(
name="endstate",
resource=Resource(name="foo-trigger", type=ResourceType.LAMBDA)
)
branch_a = Branch("branchA")
branch_a.next(t)
branch_b = Branch("branchB")
branch_b.next(t)
p = Parallel("ABTest", branches=[branch_a, branch_b])
p.build()
assert len(p.Branches) == 2
read_json_test_case("parallel_simple_state") == p.to_json()
def test_catcher_to_json():
c = Catcher(error_equals=[ErrorType.TASK_FAILED], next=Task("foo", resource=Resource("fooAct",
type=ResourceType.ACTIVITY)))
assert json.dumps(c, default=to_serializable) == read_json_test_case(
"catcher_to_json")
def test_setting_timeout_works():
t = Task("foo", timeout_seconds=7, resource=Resource(
"fooRes", type=ResourceType.LAMBDA))
assert t.TimeoutSeconds == 7
def test_setting_catcher_on_task_works():
t2 = Task("foob",
resource=Resource("sfs", type=ResourceType.LAMBDA))
t = Task(
name="catachable",
resource=Resource(name="foo-trigger", type=ResourceType.LAMBDA),
catch=[Catcher(error_equals=[ErrorType.TASK_FAILED], next=t2)]
)
assert read_json_test_case("catcher_in_the_task") == json.dumps(
t, default=to_serializable)
|
994,106 | 2ac550ae9f2f81b11c348c1e6008f748d79bd3ad | from operator import add, sub
from _utils import *
inp = get_input(2020, 8)
tape = inp.strip().split("\n")
def step(i, acc):
op = {"+": add, "-": sub}
instr, arg = tape[i].split()
sign, num = arg[:1], arg[1:]
if instr == "nop":
i += 1
elif instr == "acc":
i += 1
acc = op[sign](acc, int(num))
elif instr == "jmp":
i = op[sign](i, int(num))
else:
raise ValueError(i)
return i, acc
def evaluate(tape, return_state_at_repeated):
seen, i, acc = set(), 0, 0
while i + 1 < len(tape):
seen.add(i)
i, acc = step(i, acc)
if i in seen:
return acc if return_state_at_repeated else None
return acc
# part 1
print(evaluate(tape, True))
# part 2
def swaps(tape):
for i in range(len(tape)):
tape_ = list(tape)
t = tape_.pop(i)
instr, arg = t.split()
if instr == "jmp":
t = f"nop {arg}"
elif instr == "nop":
t = f"jmp {arg}"
tape_.insert(i, t)
yield tape_
for tape in swaps(tape):
acc = evaluate(tape, False)
if acc:
print(acc)
|
994,107 | 78b96d5adbdde3e7433701663d9b5344ccaee955 | from django.template import Library, Node
from distribution.models import FoodNetwork
class FoodNet(Node):
def render(self, context):
try:
answer = FoodNetwork.objects.all()[0]
except IndexError:
answer = None
context['food_network'] = answer
return ''
def do_get_food_network(parser, token):
return FoodNet()
register = Library()
register.tag('food_network', do_get_food_network)
|
994,108 | 6e7221bfbbb27e7d5207f6fa33b80c2d4d7801d3 | from __future__ import unicode_literals
from django.apps import AppConfig
class Schedule1Config(AppConfig):
name = 'schedule1'
|
994,109 | 28dcc6b55b3a4470a519605d85e8dc7b64a898b3 | from hashlib import sha1 as sha1_oracle
from hypothesis import given
from hypothesis.strategies import binary, lists
from firmware import Sha1
def test_empty() -> None:
assert Sha1().digest().hex() == sha1_oracle().digest().hex()
def test_short() -> None:
chunk = b"abc"
actual = Sha1()
expected = sha1_oracle()
actual.update(chunk)
expected.update(chunk)
assert actual.digest().hex() == expected.digest().hex()
def test_padding_up_to_one_fits_in_first_block() -> None:
actual = Sha1()
expected = sha1_oracle()
chunk = b"x" * (expected.block_size - 1)
actual.update(chunk)
expected.update(chunk)
assert actual.digest().hex() == expected.digest().hex()
def test_padding_up_to_zeroes_fits_in_first_block() -> None:
actual = Sha1()
expected = sha1_oracle()
chunk = b"x" * (expected.block_size - 2)
actual.update(chunk)
expected.update(chunk)
assert actual.digest().hex() == expected.digest().hex()
def test_padding_up_to_length_fits_in_same_block() -> None:
actual = Sha1()
expected = sha1_oracle()
chunk = b"x" * (expected.block_size - 9)
actual.update(chunk)
expected.update(chunk)
assert actual.digest().hex() == expected.digest().hex()
@given(lists(binary()))
def test_prop_comparing_to_python_version(chunks: list[bytes]) -> None:
actual = Sha1()
expected = sha1_oracle()
for chunk in chunks:
actual.update(chunk)
expected.update(chunk)
assert actual.digest().hex() == expected.digest().hex()
|
994,110 | efa0eeddaceb76c67d1d07f222be1c4c14d215b7 | # -*- coding: utf-8 -*-
{
'name': 'Project Weekly Report',
'version': '1.0',
'category': 'Project',
'sequence': 1,
'summary': ' create weekly report',
'description': "This module will create project report weekly.",
'website': 'http://www.hashmicro.com/',
'author': 'Hashmicro / Niyas',
'depends': ['project_issue_extension'],
'data': [
'views/project_weekly_report_view.xml',
'security/ir.model.access.csv',
],
'qweb' : [
],
'installable': True,
'auto_install': False,
'application': True,
}
|
994,111 | 98c565a141ca8d0b3d3664f7bf19a6582d359a05 |
# number two practice problem #
# all in m/s #
a = 9.8
ang = 45
v_i = 26.8
y = 2
# physics is wrong #
x = ang*v_i*y
print(x)
|
994,112 | a8eebd21b8453709c7dab6eaae93ff36893dfecd | from generators.lib.dates_generator import DatesGenerator
from generators.lib.hash_splitter import HashSplitter
from generators.lib.length_cutter import LengthCutter
from generators.lib.file_splitter import FileSplitter
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--file_in", required=False,
help="Specify a file containing words")
parser.add_argument("-o", "--file_out", required=True,
help="Specify a output file")
parser.add_argument("-g", "--generator", required=True,
help="Specify generator type")
parser.add_argument("-s", "--date_start", required=False,
help="Specify start date in ddmmyyyy format")
parser.add_argument("-e", "--date_end", required=False,
help="Specify end date in ddmmyyyy format")
parser.add_argument("-f", "--format", required=False,
help="Specify format options")
parser.add_argument("-d", "--delimiter", required=False,
help="Specify delimiter options")
parser.add_argument("-min", "--minimum_length", required=False,
help="Specify minimum length of word")
parser.add_argument("-max", "--maximum_length", required=False,
help="Specify maximum length or number of words")
args = parser.parse_args()
if args.generator == "date_generator":
generator = DatesGenerator(start_date_ddmmyyyy=args.date_start,
end_date_ddmmyyyy=args.date_end,
_format=args.format,
delimiter=args.delimiter,
file_out=args.file_out
)
elif args.generator == "hash_splitter":
generator = HashSplitter(file_in=args.file_in,
file_out=args.file_out,
separator=args.delimiter)
elif args.generator == "word_cutter":
generator = LengthCutter(file_in=args.file_in,
file_out=args.file_out,
minimum=args.minimum_length,
maximum=args.maximum_length)
elif args.generator == "file_splitter":
generator = FileSplitter(file_in=args.file_in,
file_out=args.file_out,
maximum=args.maximum_length)
|
994,113 | 9101c47ec1e472d3dd5bc65f7fd75a8fe42eff42 | # coding: utf-8
from Tkinter import *
from pprint import *
from random import randint
import os.path
phase = "init" # les autres phases possibles sont : "in-game" / "end-game"
def initGrid(): # Retourne une liste de liste de 10*10
liste = [0] * 10
temp = liste[:]
for i in range(len(liste)):
liste[i] = temp[:]
return liste
def nb_lignes(name): # retourne le nombre de lignes d'un fichier
file = open(name, "r")
temp = file.readline()
count = 0
while temp != "":
count += 1
temp = file.readline()
file.close()
return count
def txt_to_grid(name): # Retourne chaque caractère d'un fichier formaté (d'une manière particulière) dans une liste de liste
if (os.path.exists(name)) and (os.path.getsize(name) > 0):
n_lignes = nb_lignes(name)
liste = [[0]]
for i in range(n_lignes - 1):
liste.append([0])
file = open(name, "r")
for i in range(n_lignes):
temp = file.readline()
stock = ""
for car in temp:
if car == " ":
if stock == "0":
liste[i].append(0)
else:
liste[i].append(float(stock))
stock = ""
elif (car != "") and (car != "\n"):
stock += car
file.close()
return liste
else:
file = open(name, "w")
file.write(grid_to_txt(zeroGrid))
file.close()
return zeroGrid
def determineTir(): # Determine en fonction du niveau de l'ia et de l'avancement du jeu quel mode de tir utiliser
if IA_level.get() == 1:
tirIA(ships, "Random")
elif IA_level.get() == 2:
if to_follow[0] != 0:
tirIA(ships, "Following")
else:
tirIA(ships, "Random")
elif IA_level.get() == 3:
if len(possibleBoat) != 0:
tirIA(ships, "Intelligent")
if to_follow[0] != 0:
tirIA(ships, "Following")
else:
tirIA(ships, "Random")
def tir_joueur(Grid, car, IA_boats): # Fonction de tir du joueur
global TirPlayer
if phase == "in-game":
try:
assert 2 <= len(car) <= 3
lettre = car[0].upper()
Nombre = car[1:]
# Conversion du caractère lettre dans ex:"A10" en nombre
for i in range(10):
if lettre == lettres[i]:
x = i
Nombre_valid = 0
for i in range(10):
if Nombre == str(i):
Nombre_valid = 1
assert Nombre_valid == 1
y = int(Nombre) - 1
assert 0 <= x <= 9
assert 0 <= y <= 9
Indic.config(text="Coordonées acceptées")
ship_tag = ""
TirPlayer[x][y] = 1
if Grid[x][y] == 1: # Si un bateau de l'ia est présent aux coordonnées du tir:
boat_index = 0
for b in range(len(IA_boats)):
for p in range(len(IA_boats[b])):
if (IA_boats[b][p][0] == x + 1) and (IA_boats[b][p][1] == y + 1): # Test de la présence d'un bateau
if IA_boats[b][p][2] == 1: # Si le bateau n'a pas déjà été touché
boat_index = b
ship_tag = IAships_name[b]
IA_boats[b][p][2] = 0
display_case(Grilles, "IA", x + 1, y + 1, 3, ship_tag)
IA_f_ships.set(pformat(IA_boats))
else:
Indic.config(text="Vous avez déjà tiré ici et vous aviez touché !\nDommage, vous perdez un tour")
# Tour de l'IA
determineTir()
return
pts_coule = 0
for p in range(len(IA_boats[boat_index])):
pts_coule += IA_boats[boat_index][p][2]
if pts_coule == 0:
Grilles.itemconfig(ship_tag, fill="red")
Indic.config(text="Vous avez coulé le " + str(ship_tag)[2:] + " de votre adversaire !")
# Bateau coulé
finDuJeu()
else:
display_case(Grilles, "IA", x + 1, y + 1, 1, nametag="fail")
# Tour de l'IA
determineTir()
except AssertionError:
Indic.config(text="Les coordonées du tir ne sont pas valides\n(elles doivent être de la forme : LXX)\n(avec L une lettre et XX un nombre entre 1 et 10)")
if car == "annihilation": # Variable de destruction totale
for l_item in lettres:
for n_item in range(1, 11):
tir_joueur(IAGrid, l_item + str(n_item), IAships)
if car == "r-annihilation":
for l_item in reversed_Lettres:
for n_item in range(1, 11):
tir_joueur(IAGrid, l_item + str(n_item), IAships)
else:
Indic.config(text='Vous êtes encore en phase de placement des bateaux.\nPour commencer la bataille veuillez cliquer sur le bouton :\n"Début du combat"')
def moveboat(canvas, Grid, vehicle, direction): # Fonction de déplacement de bateau
if phase == "init": # Test de la phase de jeu
if len(vehicle) != 0:
for i in range(len(ships_name)):
if vehicle[0] == i: # Détermination du bateau choisi pour être bougé
boat = ships[i]
decal = []
for i in range(5):
if direction == fleches[i]:
decal = offset[i]
if decal != "rotation":
testx = boat[0][0] + decal[0]
testy = boat[0][1] + decal[1]
if (1 <= testx <= 10) and (1 <= testy <= 10):
if (1 <= boat[len(boat) - 1][0] + decal[0] <= 10) and (1 <= boat[len(boat) - 1][1] + decal[1] <= 10):
# Test de position en bordure de map
for i in range(len(boat)):
x1 = boat[i][0]
y1 = boat[i][1]
Grid[x1 - 1][y1 - 1] -= 1
boat[i][0] += decal[0]
boat[i][1] += decal[1]
# Déplacement du bateau pour chaque point de ce dernier
x2 = boat[i][0]
y2 = boat[i][1]
Grid[x2 - 1][y2 - 1] += 1
Indic.config(text="Déplacement autorisé")
canvas.delete(ships_name[vehicle[0]])
for p in range(len(boat)):
display_case(canvas, "Player", boat[p][0], boat[p][1], 2, ships_name[vehicle[0]])
boat_color("rien")
else:
Indic.config(text="Vous ne pouvez pas déplacer le bateau plus loin")
return
else:
Indic.config(text="Vous ne pouvez pas déplacer le bateau plus loin")
return
else:
if boat[0][0] == boat[1][0]:
# POUR SENS VERTICAL VERS HORIZONTAL
newlastx = boat[len(boat) - 1][0] + len(boat) - 1
if (1 <= newlastx <= 10):
for p in range(len(boat)):
x1 = boat[p][0]
y1 = boat[p][1]
Grid[x1 - 1][y1 - 1] -= 1
if p != 0:
boat[p][0] = boat[p - 1][0] + 1
boat[p][1] = boat[0][1]
x2 = boat[p][0]
y2 = boat[p][1]
Grid[x2 - 1][y2 - 1] += 1
Indic.config(text="Déplacement autorisé")
else:
Indic.config(text="Impossible de tourner le bateau")
return
else:
# POUR SENS HORIZONTAL VERS VERTICAL
newlasty = boat[len(boat) - 1][1] + len(boat) - 1
if (1 <= newlasty <= 10):
for p in range(len(boat)):
x1 = boat[p][0]
y1 = boat[p][1]
Grid[x1 - 1][y1 - 1] -= 1
if p != 0:
boat[p][1] = boat[p - 1][1] + 1
boat[p][0] = boat[0][0]
x2 = boat[p][0]
y2 = boat[p][1]
Grid[x2 - 1][y2 - 1] += 1
Indic.config(text="Déplacement autorisé")
else:
Indic.config(text="Impossible de tourner le bateau")
return
canvas.delete(ships_name[vehicle[0]])
for p in range(len(boat)):
display_case(canvas, "Player", boat[p][0], boat[p][1], 2, ships_name[vehicle[0]])
boat_color("rien")
P_f_ships.set(pformat(ships))
P_f_Grid.set(pformat(playerGrid))
def validation(Grid, ships): # Test de validation de la phase d'initialisation
global possibleBoat, possibleSafe, IAGrid, phase, IAships
if phase != "init":
return
count = 0
if IA_level.get() == 0:
Indic.config(text="Vous devez choisir une difficulté d'IA\nAvant de jouer")
return
for x in range(len(Grid)):
for y in range(len(Grid[x])):
if Grid[x][y] > 1:
count += 1
if count == 0:
phase = "in-game"
diff = ["facile", "intermédiaire", "difficile"]
for i in range(len(diff)):
if i == (IA_level.get() - 1): # Choix de la difficulté de l'IA
difficulte = diff[i]
Indic.config(text="Bateaux verouillés\nLa partie commence !" + "\n vous avez choisi l'IA " + difficulte)
Grilles.itemconfig(Boatlist.get(ACTIVE), fill='blue')
Boatlist.selection_clear(0, END)
IAGrid = initGrid() # Positionnement des bateaux en fonction de la difficulté de l'IA
if IA_level.get() == 1:
set_IA_Boats(IAships)
init_ships_Grids(IAships, IAGrid)
elif IA_level.get() == 2:
for i in range(len(IAships)):
randomAssign(IAGrid, IAships[i])
init_ships_Grids(IAships, IAGrid)
elif IA_level.get() == 3:
possibleBoat = initializeQueue(old_Average_Pboat, 0.3, 1)
IA_3_Queue.set(pformat(possibleBoat))
# possibleSafe = initializeQueue(old_Average_Pshots, 0.3, 2)
possibleSafe = []
for i in range(len(IAships)):
randomAssign(IAGrid, IAships[i], possibleSafe)
init_ships_Grids(IAships, IAGrid)
IA_f_Grid.set(pformat(IAGrid))
IA_f_ships.set(pformat(IAships))
else:
Indic.config(text="Il y a " + str(count) + " points\nde superpositions")
def saut_ligne(canvas, y, nb): # Définition de l'interligne
for i in range(nb):
y += 40
canvas.create_line(0, y, 430, y, tags="core")
canvas.create_text(15, y - 20, font=("Times", 12), text=numbers[i], tags="core")
return y
def trace_grid(canvas): # Création de la grille
canvas.create_rectangle(2, 0, 30, 830, fill='lightskyblue', tags="core")
canvas.create_rectangle(2, 400, 430, 430, fill='lightskyblue', tags="core")
# Colonnes :
x = 30
for i in range(10):
x += 40
canvas.create_line(x, 0, x, 830, tags="core")
canvas.create_text(x - 20, 415, text=lettres[i], font=("Times", 12), tags="core")
# Lignes :
y = 0
y += 70 + saut_ligne(canvas, y, 9) # 70 <=> le décalage dû à la marge
canvas.create_text(15, 400 - 20, font=("Times", 12), text="10", tags="core")
saut_ligne(canvas, y, 10)
def display_case(canvas, board, x, y, state, nametag=""):
# index des valeurs : [rien, tir raté, bateau de base, touché, coulé]
color = ""
values = [0, 1, 2, 3, 4]
colors = ['white', 'grey', 'blue', 'orange', 'red']
for i in range(5):
if state == values[i]: # Changement de la couleur du bateau en fonction de son état
color = colors[i]
x1, x2 = ((x - 1) * 40) + 1 + 30, (x * 40) - 1 + 30
if board == "Player":
y1, y2 = ((y - 1) * 40) + 1, (y * 40) - 1
if nametag != "":
canvas.create_rectangle(x1, y1, x2, y2, fill=color, tags=("case", nametag))
else:
canvas.create_rectangle(x1, y1, x2, y2, fill=color, tags="case")
elif board == "IA":
y1, y2 = 430 + ((y - 1) * 40) + 1, 430 + (y * 40) - 1
if nametag != "":
canvas.create_rectangle(x1, y1, x2, y2, fill=color, tags=("case", nametag))
else:
canvas.create_rectangle(x1, y1, x2, y2, fill=color, tags="case")
def boat_color(event): # Permet de mettre en vert le bateau séléctioné
if phase != "init":
return
ship = Boatlist.get(ACTIVE)
for item in ships_name:
if item == ship:
Grilles.itemconfig(ship, fill='green')
else:
Grilles.itemconfig(item, fill='blue')
def init_ships_Grids(boat_tab, grid): # Initialisation des grilles de position des bateaux
for b in range(len(boat_tab)):
for p in range(len(boat_tab[b])):
posx = boat_tab[b][p][0]
posy = boat_tab[b][p][1]
grid[posx - 1][posy - 1] = 1
def finDuJeu(): # Création de la phase de fin de jeu
global phase
global endWindow
joueur = 0
IA = 0
for b in range(len(ships)):
for p in range(len(ships[b])):
joueur += ships[b][p][2]
IA += IAships[b][p][2]
if (IA == 0) or (joueur == 0):
phase = "end-game"
endWindow = Tk()
endWindow.geometry("300x75")
endWindow.title("Fin du jeu")
if IA == 0:
state = "gagné"
else:
state = "perdu"
if old_Average_Pboat == zeroGrid:
overwrite_file("AveragePshots.txt", TirPlayer)
else:
add_grids(TirPlayer, old_Average_Pshots)
overwrite_file("AveragePshots.txt", TirPlayer)
if old_Average_Pshots == zeroGrid:
overwrite_file("AveragePboat.txt", playerGrid) # Sauvegarde des résultats dans un fichier à l'aide de la commande « overwrite »
else:
add_grids(playerGrid, old_Average_Pboat)
overwrite_file("AveragePboat.txt", playerGrid)
EndLabel = Label(endWindow, text="La partie est finie, vous avez " + state + " !\nA bientôt pour une nouvelle partie !")
EndLabel.pack()
quitter = Button(endWindow, text="Quitter", command=detruire)
quitter.pack()
def detruire(): # Fonction de débug de l'interface
fenetre.destroy()
endWindow.destroy()
debugWindow.destroy()
def initializeQueue(grid, proba, mode): # Initialisation de la fonction de sauvegarde de probas
liste = []
for x in range(len(grid)):
for y in range(len(grid[x])):
if mode == 1:
if grid[x][y] >= proba:
liste.append([x, y])
elif mode == 2:
if grid[x][y] <= proba:
liste.append([x, y])
return liste
def add_grids(grid, to_Add_Grid):
for x in range(len(grid)):
for y in range(len(grid[x])):
grid[x][y] = (grid[x][y] + to_Add_Grid[x][y]) / 2.0
def overwrite_file(name, grid): # Fonction d'édition de fichier
file = open(name, "w")
file.write(grid_to_txt(grid))
file.close()
def grid_to_txt(grid): # Transfert les entiers de la grille en caratères
to_insert = ""
for x in range(len(grid)):
for y in range(len(grid[x])):
if y != (len(grid[x]) - 1):
to_insert += str(grid[x][y]) + " "
else:
if x == (len(grid) - 1):
to_insert += str(grid[x][y])
else:
to_insert += str(grid[x][y]) + "\n"
return to_insert
def set_IA_Boats(IAships): # Mise en place du positionnement de l'IA (position de 1 à 10)
position = randint(1, 10)
file = open("Placements.txt", "r")
for i in range(position):
ligne = file.readline()
file.close()
liste = [0, 0, 0, 0, 0]
liste[0] = ligne[:10]
liste[1] = ligne[10:18]
liste[2] = ligne[18:24]
liste[3] = ligne[24:30]
liste[4] = ligne[30:]
for b in range(len(IAships)):
a = 0
for p in range(len(IAships[b])):
IAships[b][p][0] = int(liste[b][a]) + 1
a += 1
IAships[b][p][1] = int(liste[b][a]) + 1
a += 1
def coreTir(ships, x, y): # Fonction de tir de l'IA
global TirsIA
print "Tir déclaré en x=", x, "et y=", y
if playerGrid[x - 1][y - 1] == 1:
# touché
for b in range(len(ships)):
for p in range(len(ships[b])):
if (ships[b][p][0] == x) and (ships[b][p][1] == y):
ships[b][p][2] = 0
P_f_ships.set(pformat(ships))
boat = b
display_case(Grilles, "Player", x, y, 3, nametag=ships_name[boat])
valid = 0
for p in range(len(ships[boat])):
valid += ships[boat][p][2]
if valid == 0:
Grilles.itemconfig(ships_name[boat], fill='red')
full_valid = 0
for b in range(len(ships)):
for p in range(len(ships[b])):
full_valid += ships[b][p][2]
if full_valid == 0:
finDuJeu()
TirsIA[x - 1][y - 1] += 1
IA_f_shots.set(pformat(TirsIA))
return "touched", valid
else:
# Raté
display_case(Grilles, "Player", x, y, 1, nametag="fail")
TirsIA[x - 1][y - 1] += 1
IA_f_shots.set(pformat(TirsIA))
return "fail", 0
def detect_dir(x, y, oldShots, border, nextHit, calcul): # Calcul de la validité d'un tir suivi
global to_follow
if oldShots == 1:
for i in range(4):
testX = x - 1 + offset[i][0]
testY = y - 1 + offset[i][1]
if not(0 <= testX <= 9) or not(0 <= testY <= 9) or TirsIA[testX][testY] == 1:
to_follow[3 + i] = False
if border == 1: # Détection du bord
if y == 1:
to_follow[6] = False
if y == 10:
to_follow[5] = False
if x == 1:
to_follow[3] = False
if x == 10:
to_follow[4] = False
if nextHit != 0:
x = to_follow[1] + to_follow[7] * offset[nextHit][0]
y = to_follow[1] + to_follow[7] * offset[nextHit][1]
if TirsIA[x - 1][y - 1] == 1 or not(1 <= x <= 10) or not(1 <= y <= 10):
to_follow[nextHit + 3] = False
if calcul == 1:
count = []
for i in range(4):
if to_follow[3 + i] is True: # Si on détecte une direction vraie alors on applique le patterne de directions
for j in range(4):
to_follow[3 + j] = directions[i][j]
return "a verif"
elif to_follow[3 + i] is False:
count.append(1)
else:
count.append(0)
if sum(count) == 4:
for i in range(len(to_follow)):
to_follow[i] = 0
IA_following.set(pformat(to_follow))
return "Impossible"
if sum(count) == 3:
for i in range(4):
if not to_follow[3 + i] is False:
to_follow[3 + i] = True
to_follow[0] = 2
return 3 + i
if sum(count) == 2:
if count[0] + count[1] == 2:
for i in range(4):
to_follow[3 + i] = directions[2][i]
to_follow[0] = 2
elif count[2] + count[3] == 2:
for i in range(4):
to_follow[3 + i] = directions[0][i]
to_follow[0] = 2
to_follow[0] = 1
return "Incomplet"
def tirIA(ships, mode, primX=0, primY=0):
global possibleBoat
global to_follow
if mode == "Random": # On effectue un tir aléatoire
x = randint(1, 10)
y = randint(1, 10)
while TirsIA[x - 1][y - 1] == 1:
x = randint(1, 10)
y = randint(1, 10)
state, boatLiving = coreTir(ships, x, y)
if state == "touched": # En fonction du niveau de l'ia et si le bateau est vivant on détermine si l'ia rejoue et de quelle manière
if IA_level.get() == 1:
tirIA(ships, "Random")
elif boatLiving > 0:
tirIA(ships, 'Following', x, y)
elif IA_level.get() == 2:
tirIA(ships, 'Random')
elif len(possibleBoat) > 0:
tirIA(ships, "Intelligent")
else:
tirIA(ships, "random")
elif mode == "Intelligent": # Ce mode de tir extrait des parties précédentes les positions les plus probables pour les bateaux d'un joueur
x = -1
while (x == -1) or (TirsIA[x - 1][y - 1] == 1): # Ici on l'empêche d'extraire des coordonnées déjà utilisées
x = possibleBoat[0][0] + 1
y = possibleBoat[0][1] + 1
possibleBoat.pop(0)
IA_3_Queue.set(pformat(possibleBoat))
state, boatLiving = coreTir(ships, x, y)
if boatLiving > 0:
tirIA(ships, "Following", x, y)
elif state == "fail":
return
elif len(possibleBoat) > 0:
tirIA(ships, "Intelligent")
else:
tirIA(ships, "Random")
elif mode == "Following":
if to_follow[0] == 0:
scan_result = detect_dir(primX, primY, 1, 1, 0, 1)
if scan_result != "Impossible":
to_follow[1] = primX
to_follow[2] = primY
to_follow[7] = 1
if scan_result == "Incomplet":
to_follow[0] = 1
else:
to_follow[0] = 2
IA_following.set(pformat(to_follow))
else: # Si le scan est impossible
if IA_level.get() == 2:
tirIA(ships, "Random")
elif IA_level.get() == 3:
if len(possibleBoat) > 0:
tirIA(ships, "Intelligent")
else:
tirIA(ships, 'Random')
return
if to_follow[0] == 1:
direction = randint(0, 3)
while to_follow[direction + 3] is False:
direction = randint(0, 3)
x = to_follow[1] + to_follow[7] * offset[direction][0]
y = to_follow[2] + to_follow[7] * offset[direction][1]
state, boatLiving = coreTir(ships, x, y)
if state == "fail":
to_follow[direction + 3] = False
scan_result = detect_dir(0, 0, 0, 0, 0, 1)
IA_following.set(pformat(to_follow))
elif boatLiving > 0:
to_follow[7] += 1
to_follow[direction + 3] = True
scan_result = detect_dir(x, y, 0, 1, direction, 1)
to_follow[0] = 2
IA_following.set(pformat(to_follow))
tirIA(ships, "Following")
else:
for i in range(len(to_follow)):
to_follow[i] = 0
if IA_level.get() == 2:
tirIA(ships, "Random")
elif IA_level.get() == 3:
if len(possibleBoat) > 0:
tirIA(ships, "Intelligent")
else:
tirIA(ships, 'Random')
elif to_follow[0] == 2:
direction = -1
for i in range(4):
if to_follow[3 + i] is True:
direction = 3 + i
to_follow[8] = True
if direction == -1:
for i in range(4):
if to_follow[3 + i] == "Maybe":
direction = 3 + i
if to_follow[8] is True:
to_follow[7] = 1
to_follow[8] = "Maybe"
x = to_follow[1] + to_follow[7] * offset[direction - 3][0]
y = to_follow[2] + to_follow[7] * offset[direction - 3][1]
state, boatLiving = coreTir(ships, x, y)
if state == "fail":
to_follow[direction] = False
IA_following.set(pformat(to_follow))
elif boatLiving > 0:
to_follow[7] += 1
scan_result = detect_dir(x, y, 0, 1, 1, 0)
IA_following.set(pformat(to_follow))
tirIA(ships, "Following")
else:
for i in range(len(to_follow)):
to_follow[i] = 0
if IA_level.get() == 2:
tirIA(ships, "Random")
elif IA_level.get() == 3:
if len(possibleBoat) > 0:
tirIA(ships, "Intelligent")
else:
tirIA(ships, 'Random')
def randomAssign(boats, preciseBoat, liste=[]):
final_dir = ""
possible_dir = [0, 0, 0, 0] # left/right/down/up
mainx = randint(0, 9)
mainy = randint(0, 9)
while boats[mainx][mainy] == 1:
mainx = randint(0, 9)
mainy = randint(0, 9)
if liste != []:
mainx = liste[0][0]
mainy = liste[0][1]
liste.pop(0)
while boats[mainx][mainy] == 1:
mainx = liste[0][0]
mainy = liste[0][1]
liste.pop(0)
if (0 <= mainx + len(preciseBoat) <= 9):
possible_dir[0] = 1
if (0 <= mainx - len(preciseBoat) <= 9):
possible_dir[1] = -1
if (0 <= mainy + len(preciseBoat) <= 9):
possible_dir[2] = 1
if (0 <= mainy - len(preciseBoat) <= 9):
possible_dir[3] = -1
for indent in range(len(preciseBoat)):
for direct in range(4):
if possible_dir[direct] != 0:
if boats[mainx + (indent * r_offset[direct][0])][mainy + (indent * r_offset[direct][1])] == 1:
possible_dir[direct] = 0
count = 0
stock = []
for i in range(4):
if possible_dir[i] != 0:
stock.append(i)
else:
count += 1
if (i == 3) and (count == 3):
final_dir = i
if count == 4:
randomAssign(boats, preciseBoat, liste)
else:
if count != 3:
final_dir = stock[randint(0, (len(stock) - 1))]
for i in range(len(preciseBoat)):
preciseBoat[i][0] = mainx + (i * r_offset[final_dir][0]) + 1
preciseBoat[i][1] = mainy + (i * r_offset[final_dir][1]) + 1
for p in range(len(preciseBoat)):
posx = preciseBoat[p][0]
posy = preciseBoat[p][1]
boats[posx - 1][posy - 1] = 1
numbers = range(1, 11)
lettres = ["A", "B", "C", "D", "E", "F", "G", "H", "I", "J"]
reversed_Lettres = ["J", "I", "H", "G", "F", "E", "D", "C", "B", "A"]
fleches = ['Left', 'Right', 'Down', 'Up', "Rotate"]
offset = [[-1, 0], [1, 0], [0, 1], [0, -1], "rotation"]
r_offset = [[1, 0], [-1, 0], [0, 1], [0, -1]]
directions = [[True, "Maybe", False, False], ["Maybe", True, False, False], [False, False, True, "Maybe"], [False, False, "Maybe", True]]
ships_name = ["Carrier", "Battleship", "Cruiser", "Submarine", "Destroyer"]
IAships_name = ["IACarrier", "IABattleship", "IACruiser", "IASubmarine", "IADestroyer"]
zeroGrid = initGrid()
possibleSafe = 0
possibleBoat = 0
old_Average_Pboat = txt_to_grid("AveragePboat.txt")
old_Average_Pshots = txt_to_grid("AveragePshots.txt")
# [state, x, y, left, right, down, up, indent, lastdir]
to_follow = [0] * 9
"""
Début de def des données du joueur
Bateaux construits sous la forme Bateau = [Point 1, Point 2, ..., Point n],
Avec Point = [x, y, verif] | Si un bateau est touché alors "verif" = 0
"""
Carrier = [[1, 1, 1], [2, 1, 1], [3, 1, 1], [4, 1, 1], [5, 1, 1]]
Battleship = [[1, 3, 1], [2, 3, 1], [3, 3, 1], [4, 3, 1]]
Cruiser = [[1, 5, 1], [2, 5, 1], [3, 5, 1]]
Submarine = [[1, 7, 1], [2, 7, 1], [3, 7, 1]]
Destroyer = [[1, 9, 1], [2, 9, 1]]
ships = [Carrier, Battleship, Cruiser, Submarine, Destroyer]
playerGrid = initGrid()
TirPlayer = initGrid()
"""
Fin de def des données du joueur
Début de def des données de l'IA
"""
IACarrier = [[1, 1, 1], [2, 1, 1], [3, 1, 1], [4, 1, 1], [5, 1, 1]]
IABattleship = [[1, 3, 1], [2, 3, 1], [3, 3, 1], [4, 3, 1]]
IACruiser = [[1, 5, 1], [2, 5, 1], [3, 5, 1]]
IASubmarine = [[1, 7, 1], [2, 7, 1], [3, 7, 1]]
IADestroyer = [[1, 9, 1], [2, 9, 1]]
IAships = [IACarrier, IABattleship, IACruiser, IASubmarine, IADestroyer]
IAGrid = initGrid()
TirsIA = initGrid()
# Fin de def des données de l'IA
init_ships_Grids(ships, playerGrid)
init_ships_Grids(IAships, IAGrid)
# DEBUT DE CREATION INTERFACE
endWindow = 0
debugWindow = Tk()
debugWindow.title("Debug window")
P_Frame = LabelFrame(debugWindow, text="Infos du Joueur", padx=5, pady=5)
P_Frame.grid(column=1, row=1)
P_f_Grid = StringVar(master=P_Frame, value=pformat(playerGrid))
P_f_ships = StringVar(master=P_Frame, value=pformat(ships))
P_Grid = Label(P_Frame, textvariable=P_f_Grid)
P_Grid.grid(column=1, row=1)
P_ships = Label(P_Frame, textvariable=P_f_ships, justify="left")
P_ships.grid(column=2, row=1)
IA_Frame = LabelFrame(debugWindow, text="Infos de l'IA", padx=5, pady=5)
IA_Frame.grid(column=1, row=2)
IA_f_Grid = StringVar(master=IA_Frame, value=pformat(IAGrid))
IA_f_ships = StringVar(master=IA_Frame, value=pformat(IAships))
IA_f_shots = StringVar(master=IA_Frame, value=pformat(TirsIA))
IA_Grid = Label(IA_Frame, textvariable=IA_f_Grid)
IA_Grid.grid(column=1, row=1)
IA_ships = Label(IA_Frame, textvariable=IA_f_ships, justify="left")
IA_ships.grid(column=2, row=1)
IA_shots = Label(IA_Frame, textvariable=IA_f_shots)
IA_shots.grid(column=1, columnspan=2, row=2)
IA_3_Queue = StringVar(master=IA_Frame, value=pformat(possibleBoat))
IA_3_disp_Queue = Label(master=IA_Frame, textvariable=IA_3_Queue)
IA_3_disp_Queue.grid(column=1, columnspan=2, row=3)
IA_following = StringVar(master=IA_Frame, value=pformat(to_follow))
IA_disp_following = Label(master=IA_Frame, textvariable=IA_following)
IA_disp_following.grid(column=1, columnspan=2, row=4)
debugWindow.withdraw()
fenetre = Tk()
fenetre.geometry("1200x850")
fenetre.title("Bataille Navale")
Grilles = Canvas(fenetre, width=430, height=830) # debut creation grille
Grilles.place(x=400, y=10)
trace_grid(Grilles)
Grilles.create_line(1, 0, 1, 830, tags="core")
Grilles.create_line(30, 0, 30, 830, width=2, tags="core")
Grilles.create_line(0, 400, 430, 400, width=2, tags="core")
Grilles.create_line(0, 430, 430, 430, width=2, tags="core")
Grilles.create_rectangle(0, 400, 30, 430, fill='black', tags="core") # fin creation grille
Boat_title = LabelFrame(fenetre, text="Choisissez le bateau à déplacer\n(Double-cliquez sur son nom)", pady=5)
Boat_title.place(x=100, y=100)
Boatlist = Listbox(Boat_title, height=5)
Boatlist.pack()
Boatlist.bind("<Button-1>", boat_color)
Button_title = LabelFrame(fenetre, text="Déplacez votre bateau\nà l'aide de ces touches :", padx=5, pady=5)
Button_title.place(x=100, y=500)
B_up = Button(Button_title, text="Up", height=2, width=5, command=lambda: moveboat(Grilles, playerGrid, Boatlist.curselection(), "Up"))
B_up.grid(row=1, column=2)
B_down = Button(Button_title, text="Down", height=2, width=5, command=lambda: moveboat(Grilles, playerGrid, Boatlist.curselection(), "Down"))
B_down.grid(row=3, column=2)
B_left = Button(Button_title, text="Left", height=2, width=5, command=lambda: moveboat(Grilles, playerGrid, Boatlist.curselection(), "Left"))
B_left.grid(row=2, column=1)
B_right = Button(Button_title, text="Right", height=2, width=5, command=lambda: moveboat(Grilles, playerGrid, Boatlist.curselection(), "Right"))
B_right.grid(row=2, column=3)
B_rotate = Button(Button_title, text="Rotate", height=2, width=5, command=lambda: moveboat(Grilles, playerGrid, Boatlist.curselection(), "Rotate"))
B_rotate.grid(row=2, column=2)
Indic = Label(fenetre, text="Ici s'afficherons les restrictions\nauquelles vous serez potentiellement soumis.\nLe code couleur est :\nbleu = votre bateau\nvert = votre bateau séléctionné\norange = touché\nrouge = coulé")
Indic.place(x=50, y=300)
B_tir = Button(fenetre, text="TIRER", command=lambda: tir_joueur(IAGrid, FireCoord.get(), IAships))
B_tir.place(x=1000, y=400)
B_verif = Button(fenetre, text="Début du combat", command=lambda: validation(playerGrid, ships))
B_verif.place(x=100, y=700)
FireCoord = Entry(fenetre)
FireCoord.place(x=950, y=350)
IA_selector = LabelFrame(fenetre, text="Choisissez votre niveau d'IA", padx=5, pady=5)
IA_selector.place(x=1000, y=600)
IA_level = IntVar(master=fenetre)
infos_radioB = [["Facile", 1], ["Intermédiaire", 2], ["Difficile", 3]]
for text, level in infos_radioB:
radioB = Radiobutton(IA_selector, text=text, variable=IA_level, value=level)
radioB.pack(anchor=N)
# FIN DE CREATION INTERFACE
for item in ships_name:
Boatlist.insert(END, item)
for b in range(len(ships)):
for p in range(len(ships[b])):
display_case(Grilles, "Player", ships[b][p][0], ships[b][p][1], 2, ships_name[b])
fenetre.bind('<Return>', lambda event: tir_joueur(IAGrid, FireCoord.get(), IAships))
fenetre.bind('<Control_L>', lambda event: debugWindow.deiconify())
fenetre.mainloop()
|
994,114 | c4c4b1872669693e704a0a29cad023acf929b671 | # Modules
from keras.layers import Dense
from keras.layers.core import Activation
from keras.layers.normalization import BatchNormalization
from keras.layers.convolutional import UpSampling2D
from keras.layers.core import Flatten
from keras.layers import Input
from keras.layers.convolutional import Conv2D, Conv2DTranspose
from keras.models import Model
from keras.layers.advanced_activations import LeakyReLU, PReLU
from keras.layers import add
# Residual block
def res_block_gen(inp, kernal_size=3, filters=64, strides=1):
res_block = Conv2D(filters = filters, kernel_size = kernal_size, strides = strides, padding = "same")(inp)
res_block = BatchNormalization(momentum = 0.5)(res_block)
# Using Parametric ReLU
res_block = PReLU(alpha_initializer='zeros', alpha_regularizer=None, alpha_constraint=None, shared_axes=[1,2])(res_block)
res_block = Conv2D(filters = filters, kernel_size = kernal_size, strides = strides, padding = "same")(res_block)
res_block = BatchNormalization(momentum = 0.5)(res_block)
res_block = add([inp, res_block])
return res_block
def up_sampling_block(inp, filters=256, kernal_size=3, strides=1):
up_samp = UpSampling2D(size = 2)(inp)
up_samp = Conv2D(filters = filters, kernel_size = kernal_size, strides = strides, padding = "same")(up_samp)
up_samp = LeakyReLU(alpha = 0.2)(up_samp)
return up_samp
def discriminator_block(inp, filters, kernel_size, strides):
inp = Conv2D(filters = filters, kernel_size = kernel_size, strides = strides, padding = "same")(inp)
inp = BatchNormalization(momentum = 0.5)(inp)
inp = LeakyReLU(alpha = 0.2)(inp)
return inp
class Generator(object):
def __init__(self, input_shape):
self.input_shape = input_shape
self.residual_blocks_num = 16
def create_generator(self):
gen_input = Input(shape = self.input_shape)
gen1 = Conv2D(filters = 64, kernel_size = 9, strides = 1, padding = "same")(gen_input)
gen2 = PReLU(alpha_initializer='zeros', alpha_regularizer=None, alpha_constraint=None, shared_axes=[1,2])(gen1)
res_block = res_block_gen(gen2)
# Add 16 Residual Blocks
for index in range(self.residual_blocks_num-1):
res_block = res_block_gen(res_block)
gen3 = Conv2D(filters = 64, kernel_size = 3, strides = 1, padding = "same")(res_block)
gen4 = BatchNormalization(momentum = 0.5)(gen3)
gen5 = add([gen2, gen4])
# Using 2 UpSampling Blocks
for index in range(2):
gen5 = up_sampling_block(gen5)
gen6 = Conv2D(filters = 3, kernel_size = 9, strides = 1, padding = "same")(gen5)
gen7 = Activation('tanh')(gen6)
generator_model = Model(inputs = [gen_input], outputs = [gen7])
return generator_model
class Discriminator(object):
def __init__(self, image_shape):
self.image_shape = image_shape
def create_discriminator(self):
dis_input = Input(shape = self.image_shape)
dis1 = Conv2D(filters = 64, kernel_size = 3, strides = 1, padding = "same")(dis_input)
dis2 = LeakyReLU(alpha = 0.2)(dis1)
dis3 = discriminator_block(dis2, 64, 3, 2)
dis4 = discriminator_block(dis3, 128, 3, 1)
dis5 = discriminator_block(dis4, 128, 3, 2)
dis6 = discriminator_block(dis5, 256, 3, 1)
dis7 = discriminator_block(dis6, 256, 3, 2)
dis8 = discriminator_block(dis7, 512, 3, 1)
dis9 = discriminator_block(dis8, 512, 3, 2)
dis9 = Flatten()(dis9)
dis10 = Dense(1024)(dis9)
dis11 = LeakyReLU(alpha = 0.2)(dis10)
dis12 = Dense(1)(dis11)
dis13 = Activation('sigmoid')(dis12)
discriminator_model = Model(inputs = [dis_input], outputs = [dis13])
return discriminator_model
|
994,115 | 7031a2a8644c9f78c791c738105ae73957adcf67 | import os
import tempfile
import time
from mock import Mock, patch
import pytest
from clicast.cast import Cast, CastReader, url_content, _url_content_cache_file
CAST_URL = 'https://raw.githubusercontent.com/maxzheng/clicast/master/test/example.cast'
CAST_FILE = os.path.join(os.path.dirname(__file__), 'example.cast')
class TestCast(object):
def test_from_file(self):
cast = Cast.from_file(CAST_FILE)
assert cast.alert == 'We found a big bad bug. Please try not to step on it!! Icky...\nNo worries. It will be fixed soon! :)'
assert cast.alert_exit
assert [m.message for m in cast.messages] == [
'Version 0.1 has been released! If you upgrade, you will get:\n'
'1) Cool feature 1\n'
'2) Cool feature 2\n'
'So what are you waiting for? :)',
'Version 0.2 has been released! Upgrade today to get cool features.',
'There is a small bug over there, so watch out!',
'[-f\\b] A bug that affects the -f option. (applies only if `clicast.filters.match_cli_args` filter is used)'
]
def test_save(self):
from_content = open(CAST_FILE).read()
cast = Cast.from_file(CAST_FILE)
to_cast_file = os.path.join(tempfile.gettempdir(), 'clicast.to_file_test.cast')
try:
cast.save(to_cast_file)
to_content = open(to_cast_file).read()
assert from_content == to_content
finally:
if os.path.exists(to_cast_file):
os.unlink(to_cast_file)
def test_from_url(self):
cast = Cast.from_url(CAST_URL)
assert cast.messages
def test_add_msg(self):
cast = Cast()
cast.add_msg('Message 1')
cast.add_msg('Message Alert', alert=True)
cast.add_msg('Message 2')
assert cast.alert == 'Message Alert'
assert cast.alert_exit == False
assert [(m.key, m.message) for m in cast.messages] == [
('1', 'Message 1'),
('2', 'Message 2')
]
cast.add_msg('Message Alert Exit', alert_exit=True)
assert cast.alert == 'Message Alert Exit'
assert cast.alert_exit == True
def test_del_msg(self):
cast = Cast()
cast.add_msg('Message 1')
cast.add_msg('Message 2')
cast.add_msg('Message Alert', alert_exit=True)
cast.del_msg()
assert cast.alert == 'Message Alert'
assert cast.alert_exit == True
assert [(m.key, m.message) for m in cast.messages] == [('2', 'Message 2')]
del_count = cast.del_msg(100)
assert del_count == 1
cast.add_msg('Message 3')
cast.add_msg('Message 4')
cast.add_msg('Message 5')
cast.del_msg(2)
cast.del_msg(alert=True)
assert not cast.alert
assert not cast.alert_exit
assert [(m.key, m.message) for m in cast.messages] == [('5', 'Message 5')]
cast_file = os.path.join(tempfile.gettempdir(), 'clicast.to_file_test.cast')
try:
cast.save(cast_file)
cast = Cast.from_file(cast_file)
cast.del_msg(100)
cast.save(cast_file)
cast = Cast.from_file(cast_file)
cast.add_msg('Message 6')
assert str(cast) == '[Messages]\n6: Message 6'
finally:
if os.path.exists(cast_file):
os.unlink(cast_file)
def test_filter(self):
def msg_filter(msg, alert=False):
if 'small bug' in msg:
return msg
cast = Cast.from_file(CAST_FILE, msg_filter)
assert str(cast) == '[Messages]\n3: There is a small bug over there, so watch out!'
class TestCastReader(object):
def setup_class(cls):
CastReader.READ_MSG_FILE = '/tmp/clicast.test.read'
if os.path.exists(CastReader.READ_MSG_FILE):
os.unlink(CastReader.READ_MSG_FILE)
def test_new_messages(self):
cast = Cast.from_file(CAST_FILE)
reader = CastReader(cast)
assert reader.new_messages() == [
'We found a big bad bug. Please try not to step on it!! Icky...\nNo worries. It will be fixed soon! :)',
'Version 0.1 has been released! If you upgrade, you will get:\n'
'1) Cool feature 1\n'
'2) Cool feature 2\n'
'So what are you waiting for? :)',
'Version 0.2 has been released! Upgrade today to get cool features.',
'There is a small bug over there, so watch out!',
'[-f\\b] A bug that affects the -f option. (applies only if `clicast.filters.match_cli_args` filter is used)'
]
def test_url_content():
assert '[Messages]' in url_content(CAST_URL)
with patch('requests.get') as requests_get:
mock_response = Mock()
mock_response.text = '[Messages]\n1: Test Message'
requests_get.return_value = mock_response
assert str(url_content('url1', cache_duration=1)) == mock_response.text
cached_text = mock_response.text
mock_response.text = '[Messages]\n1: Test Message Updated'
# This should return cached content
assert str(url_content('url1', cache_duration=1)) == cached_text
requests_get.assert_called_once_with('url1')
assert str(url_content('url2', cache_duration=1)) == mock_response.text
time.sleep(1)
assert str(url_content('url1', cache_duration=1)) == mock_response.text
assert requests_get.call_count == 3
# No content,it should raise
cache_file = _url_content_cache_file('url3')
if os.path.exists(cache_file):
os.unlink(cache_file)
requests_get.side_effect = Exception
with pytest.raises(Exception):
assert str(url_content('url3', from_cache_on_error=True)) == mock_response.text
requests_get.side_effect = None
assert str(url_content('url3', from_cache_on_error=True)) == mock_response.text
requests_get.side_effect = Exception
assert str(url_content('url3', from_cache_on_error=True)) == mock_response.text
|
994,116 | c803314c381f7fcff937861b69e6bb263405a8ab | # Generated by Django 3.0.3 on 2020-02-10 08:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('poll', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('no', models.AutoField(primary_key=True, serialize=False, verbose_name='No.')),
('username', models.CharField(max_length=20, unique=True, verbose_name='Username')),
('password', models.CharField(max_length=32, verbose_name='Password')),
('regdate', models.DateTimeField(auto_now_add=True, verbose_name='Registration Time')),
],
options={
'verbose_name_plural': 'Users',
'db_table': 'tb_user',
},
),
]
|
994,117 | 1c6bc93703c449e28cae303ed6532052d431fb12 | import requests
import base64
ak = 'QsPqs20yfvQ7QcdnYfdWC5Ei'
sk = 'EEMdjil0u1CW5uI3ts1mLD0VCQvTGYs6'
def get_at(api_key, secret_key):
host = "https://aip.baidubce.com/oauth/2.0/token?grant_type=client_credentials&client_id=" + api_key + \
"&client_secret=" + secret_key
response = requests.get(host)
if response:
data = response.json()
at = data["access_token"]
return at
def mask_detect(path):
return_data = {'info': [], 'num': 0, 'msg': ''}
access_token = get_at(ak, sk)
detect_request_url = "https://aip.baidubce.com/rest/2.0/face/v3/detect" + "?access_token=" + access_token
search_request_url = "https://aip.baidubce.com/rest/2.0/face/v3/search" + "?access_token=" + access_token
with open(path, 'rb') as f:
p_data = f.read()
p_data = base64.b64encode(p_data)
data = str(p_data, "utf-8")
f.close()
params = "{\"image\":\"" + str(data) + "\",\"image_type\":\"BASE64\",\"face_field\":\"mask\"}"
headers = {'content-type': 'application/json'}
response1 = requests.post(detect_request_url, data=params, headers=headers)
res_data = response1.json()
face_num = res_data['result']['face_num']
if face_num != 0:
for n in range(face_num):
mask_data = res_data['result']['face_list'][n]['mask']
is_mask = mask_data['type']
return_data['info'].append({'is_mask': 0, 'student_id': 0})
if is_mask == 1:
return_data['info'][n]['is_mask'] = 1
elif is_mask == 0:
return_data['info'][n]['student_id'] = 'unknown'
else:
return [{'msg': 'WRONG', 'num': 0, 'info': ""}]
# ----------------------------
params = "{\"image\":\"" + str(data) + "\",\"image_type\":\"BASE64\",\"group_id_list\":\"students\"," \
"\"quality_control\":\"LOW\",\"liveness_control\":\"NORMAL\"} "
headers = {'content-type': 'application/json'}
response2 = requests.post(search_request_url, data=params, headers=headers)
res_data2 = response2.json()
if res_data2['error_msg'] == 'SUCCESS':
for n in range(face_num):
return_data['info'][n]['student_id'] = res_data2['result']['user_list'][n]['user_id']
return_data['msg'] = 'SUCCESS'
return_data['num'] = face_num
return return_data
def face_register(path, trans_id):
access_token = get_at(ak, sk)
with open(path, 'rb') as f:
p_data = f.read()
p_data = base64.b64encode(p_data)
data = str(p_data, "utf-8")
f.close()
request_url = 'https://aip.baidubce.com/rest/2.0/face/v3/faceset/user/add' + "?access_token=" + access_token
params = "{\"image\":\"" + data + "\",\"image_type\":\"BASE64\"," \
"\"group_id\":\"students\",\"user_id\":\"" + trans_id + \
"\",\"quality_control\":\"LOW\",\"liveness_control\":\"NORMAL\"} "
headers = {'content-type': 'application/json'}
response = requests.post(request_url, data=params, headers=headers)
return response.json()['error_msg']
def user_delete(trans_id):
user_face_token = user_get(trans_id)
if user_face_token == -1:
return {"msg": "FAIL"}
access_token = get_at(ak, sk)
request_url = "https://aip.baidubce.com/rest/2.0/face/v3/faceset/face/delete"
params = "{\"user_id\":\""+str(trans_id)+"\",\"group_id\":\"students\",\"face_token\":\""+str(user_face_token)+"\"}"
request_url = request_url + "?access_token=" + access_token
headers = {'content-type': 'application/json'}
response = requests.post(request_url, data=params, headers=headers)
if response.json()['error_msg'] == 'SUCCESS':
return {"msg": "SUCCESS"}
else:
return {"msg": "FAIL"}
def user_get(trans_id):
request_url = "https://aip.baidubce.com/rest/2.0/face/v3/faceset/face/getlist"
params = "{\"user_id\":\"" + str(trans_id) + "\",\"group_id\":\"students\"}"
access_token = get_at(ak, sk)
request_url = request_url + "?access_token=" + access_token
headers = {'content-type': 'application/json'}
response = requests.post(request_url, data=params, headers=headers)
if response.json()["error_msg"] == "SUCCESS":
return response.json()['result']['face_list'][0]['face_token']
else:
return -1
if __name__ == "__main__":
print(user_delete(1))
|
994,118 | 217a838b6cc58212f992a046b03345febb6dbdc1 | # Generated by Django 2.1 on 2018-09-01 23:15
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('weatherApp', '0005_auto_20180901_1558'),
]
operations = [
migrations.AddField(
model_name='location',
name='time',
field=models.DateField(default=datetime.datetime(2018, 9, 1, 23, 15, 25, 511856, tzinfo=utc)),
),
]
|
994,119 | adb5945f1995e57bba817b7e8763d19fb9f81be5 | # -*- coding: utf-8 -*-
"""
Created on Sat Jul 11 17:45:07 2020
@author: sifan
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import gradients_impl as gradient_ops
from tensorflow.python.ops.parallel_for import control_flow_ops
from tensorflow.python.util import nest
def jacobian(output, inputs, use_pfor=True, parallel_iterations=None):
"""Computes jacobian of `output` w.r.t. `inputs`.
Args:
output: A tensor.
inputs: A tensor or a nested structure of tensor objects.
use_pfor: If true, uses pfor for computing the jacobian. Else uses
tf.while_loop.
parallel_iterations: A knob to control how many iterations and dispatched in
parallel. This knob can be used to control the total memory usage.
Returns:
A tensor or a nested structure of tensors with the same structure as
`inputs`. Each entry is the jacobian of `output` w.r.t. to the corresponding
value in `inputs`. If output has shape [y_1, ..., y_n] and inputs_i has
shape [x_1, ..., x_m], the corresponding jacobian has shape
[y_1, ..., y_n, x_1, ..., x_m]. Note that in cases where the gradient is
sparse (IndexedSlices), jacobian function currently makes it dense and
returns a Tensor instead. This may change in the future.
"""
flat_inputs = nest.flatten(inputs)
output_tensor_shape = output.shape
output_shape = array_ops.shape(output)
output = array_ops.reshape(output, [-1])
def loop_fn(i):
y = array_ops.gather(output, i)
return gradient_ops.gradients(y, flat_inputs, unconnected_gradients=tf.UnconnectedGradients.ZERO)
try:
output_size = int(output.shape[0])
except TypeError:
output_size = array_ops.shape(output)[0]
if use_pfor:
pfor_outputs = control_flow_ops.pfor(
loop_fn, output_size, parallel_iterations=parallel_iterations)
else:
pfor_outputs = control_flow_ops.for_loop(
loop_fn,
[output.dtype] * len(flat_inputs),
output_size,
parallel_iterations=parallel_iterations)
for i, out in enumerate(pfor_outputs):
if isinstance(out, ops.Tensor):
new_shape = array_ops.concat(
[output_shape, array_ops.shape(out)[1:]], axis=0)
out = array_ops.reshape(out, new_shape)
out.set_shape(output_tensor_shape.concatenate(flat_inputs[i].shape))
pfor_outputs[i] = out
return nest.pack_sequence_as(inputs, pfor_outputs) |
994,120 | 23cc26adad4bf94f316bea66dbf2535635921e2a | n=int(input("Enter a number:"))
sum=0
while(n>0):
rem=n%10
sum=sum+rem
n=(n//10)
print("The total sum of digits is:",sum) |
994,121 | 2e52ba3453c7b629f4b6c1862ffa78b63afec7f7 | import subprocess
import os
import getpass
def connection():
path = r"core\\login.bat"
p = subprocess.Popen(path,stdout=subprocess.PIPE,shell=True)
stdout,stderr = p.communicate('dir c:\\')
print stdout
def disconnect():
path = r"core\\delete.bat"
p = subprocess.Popen(path,stdout=subprocess.PIPE,shell=True)
stdout,stderr = p.communicate('dir c:\\')
def create_file(enrl):
path = r"v:\\login\\"+enrl+".txt"
fobj = open(path,"w")
fobj.close()
def delete_file(enrl):
path = r"v:\\login\\"+enrl+".txt"
os.remove(path)
def online_users():
path=r"v:\\login"
user_list = os.listdir(path)
for user in user_list:
print user[:-4]
def check_localdb():
path = r"C:\\Users\\"+getpass.getuser()+"\\Documents\\yogitakijai"
if (not os.path.exists(path)):
os.makedirs(path)
path = path + "\\urchats"
if (not os.path.exists(path)):
os.makedirs(path)
chat_list = os.listdir(path)
for user in chat_list:
print user[:-4]
def menu():
enrl = raw_input("Enter enrollmant number")
enr2 = raw_input("Who do u want to chat with")
if(not os.path.exists(r"v:\\login")):
os.makedirs(r"v:\\login")
create_file(enrl)
online_users()
check_localdb()
delete_file(enrl)
chat(enrl,enr2)
check_newmsg(enrl)
raw_input()
def check_newmsg(enr1):
path=r"v:\\chat"
unread_list=os.listdir(path)
for users in unread_list:
if enr1 in users:
print users
path=path+"\\"+users
with open(path+"\\timeline.txt") as f:
msg=f.readlines()
print msg
def chat(enr1,enr2):
path=r"v:\\chat"
if(not os.path.exists(path)):
os.makedirs(path)
max_enr=max(enr1,enr2)
min_enr=min(enr1,enr2)
path = path+"\\"+min_enr+"_"+max_enr
if(not os.path.exists(path)):
os.makedirs(path)
connection()
menu()
disconnect()
|
994,122 | 09834ef9aa03abb6c2757e0774b804b49c84bbca | def product(*num):
result = 1
for i in num:
result *= i
print(result)
product(10, 20, 30)
def sum(num1, *arg):
result = 0
for i in arg:
result += i
print(result)
sum(5000, 20, 30)
|
994,123 | 6c814da40b377cc8539dce8e14893a46392dd656 | def solve():
N, M = map(int, input().split())
if N == 1 and M == 1:
print(1)
exit()
if M < N:
N, M = M, N
if N == 1:
print(M - 2)
exit()
print((N - 2) * (M - 2))
if __name__ == '__main__':
solve()
|
994,124 | bdd5e8208540a462359d09c04ea3d54d34017f68 | import carla
import erdos
# Pylot specific imports.
import pylot.utils
import pylot.simulation.utils
DEFAULT_VIS_TIME = 30000.0
class CanBusVisualizerOperator(erdos.Operator):
""" CanBusVisualizerOperator visualizes the CanBus locations.
This operator listens on the `can_bus` feed and draws the locations on the
world simulation screen.
Attributes:
_world: A handle to the world to draw the locations on.
"""
def __init__(self, can_bus_stream, name, flags, log_file_name=None):
""" Initializes the CanBusVisualizerOperator with the given
parameters.
Args:
name: The name of the operator.
flags: A handle to the global flags instance to retrieve the
configuration.
log_file_name: The file to log the required information to.
"""
can_bus_stream.add_callback(self.on_can_bus_update)
self._name = name
self._logger = erdos.utils.setup_logging(name, log_file_name)
self._flags = flags
_, self._world = pylot.simulation.utils.get_world(
self._flags.carla_host, self._flags.carla_port,
self._flags.carla_timeout)
if self._world is None:
raise ValueError("Error connecting to the simulator.")
@staticmethod
def connect(can_bus_stream):
return []
def on_can_bus_update(self, msg):
""" The callback function that gets called upon receipt of the
CanBus location to be drawn on the screen.
Args:
msg: CanBus message
"""
self._logger.debug('@{}: {} received message'.format(
msg.timestamp, self._name))
vehicle_transform = msg.data.transform
# Draw position. We add 0.5 to z to ensure that the point is above the
# road surface.
self._world.debug.draw_point(carla.Location(
x=vehicle_transform.location.x,
y=vehicle_transform.location.y,
z=vehicle_transform.location.z + 0.5),
size=0.2,
life_time=DEFAULT_VIS_TIME,
color=carla.Color(255, 0, 0))
|
994,125 | c6bf80e6001a27f353b8d94cc608998b811d0dc8 | #!/usr/bin/env python
# coding=utf-8
from __future__ import unicode_literals, print_function, division
import sys
import binascii
from diameterparser.decode_diameter import decode_diameter
def convertMac(octet):
mac = [binascii.b2a_hex(x) for x in list(octet)]
return "".join(mac)
class DiameterConn:
def __init__(self):
self.diameter = decode_diameter()
def decode(self, input_hex):
headerinfo, tree = self.diameter.decode(convertMac(input_hex))
return headerinfo, tree
|
994,126 | 99c5545cfa2923f80093b8f54b4105075f6a5e43 | import multiprocessing
bind = "0.0.0.0:8000"
workers = 2 # multiprocessing.cpu_count() * 2 + 1
worker_tmp_dir = '/tmp_gunicorn'
|
994,127 | ab174d5f62f09444e0caa82fac6ce6eef9885be8 | #!/usr/bin/env python3
import os
from pyblake2 import blake2b
from sapling_generators import SPENDING_KEY_BASE
from sapling_jubjub import Fr, Point, r_j
from sapling_key_components import to_scalar
from sapling_utils import cldiv, leos2ip
from tv_output import render_args, render_tv
def H(x):
digest = blake2b(person=b'Zcash_RedJubjubH')
digest.update(x)
return digest.digest()
def h_star(B):
return Fr(leos2ip(H(B)))
class RedJubjub(object):
l_G = 256 # l_J
l_H = 512
Public = Point
Private = Fr
Random = Fr
def __init__(self, P_g, random=os.urandom):
self.P_g = P_g
self._random = random
def gen_private(self):
return to_scalar(self._random(64))
def derive_public(self, sk):
return self.P_g * sk
def gen_random(self):
T = self._random((self.l_H + 128) // 8)
return h_star(T)
@staticmethod
def randomize_private(sk, alpha):
return sk + alpha
def randomize_public(self, vk, alpha):
return vk + self.P_g * alpha
def sign(self, sk, M):
T = self._random((self.l_H + 128) // 8)
r = h_star(T + M)
R = self.P_g * r
Rbar = bytes(R)
S = r + h_star(Rbar + M) * sk
Sbar = bytes(S) # TODO: bitlength(r_j)
return Rbar + Sbar
def verify(self, vk, M, sig):
mid = cldiv(self.l_G, 8)
(Rbar, Sbar) = (sig[:mid], sig[mid:]) # TODO: bitlength(r_j)
R = Point.from_bytes(Rbar)
S = leos2ip(Sbar)
c = h_star(Rbar + M)
return R and S < r_j and self.P_g * Fr(S) == R + vk * c
def main():
args = render_args()
from random import Random
rng = Random(0xabad533d)
def randbytes(l):
ret = []
while len(ret) < l:
ret.append(rng.randrange(0, 256))
return bytes(ret)
rj = RedJubjub(SPENDING_KEY_BASE, randbytes)
test_vectors = []
for i in range(0, 10):
sk = rj.gen_private()
vk = rj.derive_public(sk)
alpha = rj.gen_random()
rsk = rj.randomize_private(sk, alpha)
rvk = rj.randomize_public(vk, alpha)
M = bytes([i] * 32)
sig = rj.sign(sk, M)
rsig = rj.sign(rsk, M)
assert rj.verify(vk, M, sig)
assert rj.verify(rvk, M, rsig)
assert not rj.verify(vk, M, rsig)
assert not rj.verify(rvk, M, sig)
test_vectors.append({
'sk': bytes(sk),
'vk': bytes(vk),
'alpha': bytes(alpha),
'rsk': bytes(rsk),
'rvk': bytes(rvk),
'm': M,
'sig': sig,
'rsig': rsig,
})
render_tv(
args,
'sapling_signatures',
(
('sk', '[u8; 32]'),
('vk', '[u8; 32]'),
('alpha', '[u8; 32]'),
('rsk', '[u8; 32]'),
('rvk', '[u8; 32]'),
('m', '[u8; 32]'),
('sig', '[u8; 64]'),
('rsig', '[u8; 64]'),
),
test_vectors,
)
if __name__ == '__main__':
main()
|
994,128 | 1e62c472b562a30cf42f1be049425df9e35ce1c8 | # -*- coding: utf-8 -*-
class Frob(object):
def __init__(self, name):
self.name = name
self.before = None
self.after = None
def setBefore(self, before):
self.before = before
def setAfter(self, after):
self.after = after
def getBefore(self):
return self.before
def getAfter(self):
return self.after
def myName(self):
return self.name
def __str__(self):
return self.name
def __repr__(self):
return self.name
def insert(atMe, newFrob):
"""
atMe: a Frob that is part of a doubly linked list
newFrob: a Frob with no links
This procedure appropriately inserts newFrob into the linked list that atMe is a part of.
"""
before = atMe.getBefore()
after = atMe.getAfter()
if atMe.name < newFrob.name:
if after == None:
atMe.setAfter(newFrob)
newFrob.setBefore(atMe)
elif newFrob.name < after.name:
atMe.setAfter(newFrob)
newFrob.setBefore(atMe)
newFrob.setAfter(after)
after.setBefore(newFrob)
else:
insert(after, newFrob)
elif newFrob.name < atMe.name:
if before == None:
newFrob.setAfter(atMe)
atMe.setBefore(newFrob)
elif before.name < newFrob.name:
before.setAfter(newFrob)
newFrob.setBefore(before)
newFrob.setAfter(atMe)
atMe.setBefore(newFrob)
else:
insert(before, newFrob)
else:
if before == None:
newFrob.setAfter(atMe)
atMe.setBefore(newFrob)
else:
before.setAfter(newFrob)
newFrob.setBefore(before)
newFrob.setAfter(atMe)
atMe.setBefore(newFrob)
eric = Frob('eric')
andrew = Frob('andrew')
ruth = Frob('ruth')
fred = Frob('fred')
martha = Frob('martha')
danny = Frob('danny')
bob = Frob('bob')
jeff = Frob('jeff')
john = Frob('john')
jeb = Frob('jeb')
frobs = [eric, andrew, ruth, fred, martha, danny, bob, jeff, jeb, john, jeb]
for frob in frobs [1:]:
insert(eric,frob)
frob = andrew
while frob != None:
print frob
frob = frob.getAfter()
|
994,129 | 1cbcef34ffd194a5e8fd8cb111d05423d76fa9ad | import pandas as pd
import numpy as np
import seaborn as sns
from tensorflow.python.ops.gen_array_ops import pad_eager_fallback
sns.set()
df = pd.read_csv("../data/fake_reg.csv")
sns.pairplot(df)
from sklearn.model_selection import train_test_split
X = df[["feature1", "feature2"]].values
y = df["price"].values
X_train, X_test, y_train, y_test = \
train_test_split(
X, y, test_size=0.3,
random_state=42)
# train_test_split?
X_train.shape
X_test.shape
from sklearn.preprocessing import MinMaxScaler
# help(MinMaxScaler)
scaler = MinMaxScaler()
scaler.fit(X_train)
# data leakege
#
X_train = scaler.transform(X_train)
X_test = scaler.transform(X_test)
X_train.max()
X_train.min()
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
# read the documentation of Sequential and Dense
#%%
model = Sequential([
Dense(4, activation="relu"),
Dense(2, activation="relu"),
Dense(1)
])
# a Dense Layer is a layer that is connected
# to all nodes
#%%
# below is another way to build the model
model = Sequential()
model.add(Dense(4, activation="relu"))
model.add(Dense(2, activation="relu"))
model.add(Dense(1))
#%%
model = Sequential()
model.add(Dense(4, activation="relu"))
model.add(Dense(4, activation="relu"))
model.add(Dense(4, activation="relu"))
model.add(Dense(1))
model.compile(optimizer="rmsprop",
loss="mse")
#%%
model.fit(x=X_train, y=y_train, epochs=250)
#%%
df_loss = pd.DataFrame(model.history.history)
df_loss.plot()
#%%
# the last layer will determine what type of
# model will be produced
# in a regression problema we leave the
# last layer with a identity activation function
# if the problema was a classifcation problem
# we shoul use more final neurons and with
# a different activation function like
# a sigmoid
# what are the parameters for the "compile()" method?
# multi-class classification problem
model.compile(optimizer="rmsprop",
loss="categorical_crossentropy",
metrics=["accuracy"])
# for binary classification problem
model.compile(optimizer="rmsprop",
loss="binary_crossentropy",
metrics=["accuracy"])
# for a regression problem
model.compile(optimizer="rmsprop",
loss="mse")
#%%
# some notes
"""
In the world of neural networks
and linear regression specially for machine learning
it is a good practive to scale the variables
there are some options: standar scaler, minmax scaler
this is important for models that use gradient descent
optimazition.
If the features are not scalled the hyper surface of
features and loss function will be distorted.
And it may cause difficulty for the optimization
algorithm to find the optimum point.
For tree methods this is not a problem.
Decision trees do not use gradient descent as
optimization algorithms.
For inference models (am I using the word 'inferece'
in the right context here?) the scalaing of the variables
may be not what we want to do.
I did not see in econometrics books scalling is not
mentioned. (find out books to read about)
That is because for inference we want to find how
explatory variables are affecting the explained variable
(target variable). Usually this is done in linear regression.
"""
"""
Another discussion:
The word inference.
Inferential statistics.
I think that inference does not mean causal.
For example inferential can refer to something
like using a sample to know how the population
behaves.
Usually there is a distinction between inferential
models and predictive models.
"""
"""
Make a discussion about the words
"Decision Trees"
"Regression Trees"
"Classification Trees"
What are the differences between those words?
"""
#%%
y_test
X_test
model.evaluate(X_train, y_train, verbose=0)
model.evaluate(X_test, y_test, verbose=0)
#%%
test_predictions = model.predict(X_test)
test_predictions = pd.Series(test_predictions.reshape(-1,))
y_test_series = pd.Series(y_test)
df_pred = pd.concat([y_test_series, test_predictions], axis=1)
df_pred.columns = ["observed","predicted"]
sns.pairplot(df_pred)
sns.scatterplot(data=df_pred, x="observed", y="predicted")
#%%
from sklearn.metrics import mean_absolute_error, mean_squared_error
mean_absolute_error(df_pred["observed"], df_pred["predicted"])
mean_squared_error(df_pred["observed"], df_pred["predicted"])**.5
df.describe()
#%%
new_gem = [[998,1000]]
new_gem = scaler.transform(new_gem)
model.predict(new_gem)
#%%
from tensorflow.keras.models import load_model
model.save('../models/my_gem_model.hdf5')
#%%
later_model = load_model('../models/my_gem_model.hdf5')
later_model.predict(new_gem) |
994,130 | 4773586881229fa669e4516ae4e514105ca3e270 | def circulo():
r=int(input("Ingrese radio"))
import math
t=math.pi*pow(r,2)
print("Área= ",t,"\n")
def triangulo():
a=int(input("Ingrese Base"))
b=int(input("Ingrese altura"))
t=(a*b)/2
print("Área= ",t,"\n")
def rectangulo():
a=int(input("Ingrese Largo"))
b=int(input("Ingrese Ancho"))
t=a*b
print("Área= ",t,"\n")
x=""
while x!="s":
x=input("a. Circulo\nb.Triangulo\nc.Rectangulo\ns.Salir\nDigite la opcion: ")
def area(opcion):
if opcion=="a":
circulo()
elif opcion=="b":
triangulo()
elif opcion=="c":
rectangulo()
elif opcion=="s":
print("Gracias por usar nuestro programa")
else:
print("Ingrese opcion valida")
area(x)
|
994,131 | 9e2c318da890f30b8e8164a93b705e89329219a5 | s1=input()
l1=s1.split(' ')
d1={}
for i in l1:
d1[i]=l1.count(i)
for i,j in zip(d1.keys(),d1.values()):
print(i,":",j)
|
994,132 | 213fdaae53055fa2a46abc0e5f63ed989741ee91 | from django.contrib.auth.decorators import login_required
from django.urls import path
from . import views
app_name = 'gamescoring'
urlpatterns = [
path('ScoreConfirm/', login_required(views.ScoreConfirm), name = "ScoreConfirm" ),
path('SaveScore/', login_required(views.SaveScore), name = "SaveScore" ),
path('NewScore/', login_required(views.NewScore), name = "NewScore" ),
path('<category>/',login_required(views.EnterScore), name = "EnterScore" ),
]
|
994,133 | ec2af862fe2bd0162e4a75fa9c581b9d308eb872 | from django.shortcuts import render, redirect
from django.views.decorators.cache import cache_control
from django.contrib.auth.decorators import login_required
from tests.models import TestInfo, QuestionInfo, TestingResults, QuestionAnswer, QuestionAnswerUser, HtmlBlocks
import datetime
from enum import Enum
from django.db import connection
class QuestionType(Enum):
ONE = 3
SEVERAL = 4
MANUAL = 6
@cache_control(must_revalidate=True, no_store=True)
@login_required(login_url='/login/')
def testing(request):
if 'question' not in request.session:
return render(request, 'tests/start_page.html')
return render(request, 'tests/test.html', context={
'question': request.session['question'] if 'question' in request.session else None,
'answers': request.session['answers'] if 'answers' in request.session else None,
'time_to_solve': request.session['time_to_solve'] if 'time_to_solve' in request.session else None,
'has_prev': request.session['has_prev'] if 'has_prev' in request.session else None,
'has_next': request.session['has_next'] if 'has_next' in request.session else None,
'temp_answers': request.session['temp_answers'].get(str(request.session['question']['id'])) if 'temp_answers' in request.session and 'question' in request.session else None
})
@cache_control(must_revalidate=True, no_store=True)
@login_required(login_url='/login/')
def test_action(request):
action_type = request.POST.get('action-type')
if action_type == 'start':
request.session.clear()
request.session.setdefault('temp_answers', dict())
request.session['test-id'] = request.POST.get('test-id')
test = TestInfo.objects.get(pk=request.session['test-id'])
if action_type == 'start':
question = test.questions.first()
elif action_type == 'next':
temp_save(request)
question = test.questions.filter(ord__gt=request.session['ord']).first()
elif action_type == 'prev':
temp_save(request)
question = test.questions.filter(ord__lt=request.session['ord']).last()
elif action_type == 'finish':
temp_save(request)
request.session['result-id'] = finish_testing(request)
request.session.pop('question', None)
request.session.pop('answers', None)
request.session.pop('has_next', None)
request.session.pop('has_prev', None)
request.session.pop('time_to_solve', None)
return redirect('tests:result')
request.session['ord'] = question.ord
request.session['question'] = dict(id=question.id, question_text=question.question_text, question_type=question.question_type.type_name)
if question.question_type.type_name in [question_type.name for question_type in QuestionType]:
request.session['answers'] = [
question.question_type.html_block.html_block.format(name=f"ANSWER-{question.id}-{question.question_type.type_name}",
id=answer.id,
text=answer.answer_text)
for answer
in question.answers]
request.session['time_to_solve'] = test.time_to_solve if action_type == 'start' else int(
request.POST.get('available-time')) + 1
request.session['has_prev'] = True if test.questioninfo_set.all().filter(
ord__lt=question.ord).last() is not None else False
request.session['has_next'] = True if test.questioninfo_set.all().filter(
ord__gt=question.ord).first() is not None else False
return redirect('tests:testing')
def testing_result(request):
count_correct, count_incorrect, ball = get_results(request.session['result-id'])
result_block = HtmlBlocks.objects.get(pk=4).html_block.format(count_correct=count_correct,
count_incorrect=count_incorrect,
ball=ball)
return render(request, 'tests/testing_result.html', context={"result_block": result_block})
def temp_save(request):
selected_answer = [request.POST.getlist(key) for key in request.POST if 'ANSWER' in key]
temp_answers = request.session['temp_answers']
if len(selected_answer) == 0:
if str(request.session['question']['id']) in temp_answers:
temp_answers[str(request.session['question']['id'])] = None
return
assert len(selected_answer) == 1
temp_answers[str(request.session['question']['id'])] = selected_answer[0]
def finish_testing(request):
result = TestingResults(test=TestInfo.objects.get(pk=request.session['test-id']),
user=request.user,
testing_date=datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"))
result.save()
for question_id in request.session['temp_answers']:
temp_question = QuestionInfo.objects.get(pk=question_id)
if temp_question.question_type.type_name == 'MANUAL':
QuestionAnswerUser(result=result,
question_answer=QuestionAnswer.objects.get(pk=request.session['temp_answers'][question_id]),
entered_text=request.session['temp_answers'][question_id]).save()
else:
for selected_answer in request.session['temp_answers'][question_id]:
QuestionAnswerUser(result=result,
question_answer=QuestionAnswer.objects.get(pk=selected_answer),
entered_text=None).save()
return result.id
def get_results(result_id):
with connection.cursor() as cursor:
cursor.execute('select count_correct, count_incorrect, ball '
'from get_results(p_result_id => {p_result_id})'.format(p_result_id=result_id))
count_correct, count_incorrect, ball = cursor.fetchone()
return count_correct, count_incorrect, ball
|
994,134 | 445f68417a57e1bbe4b7c685a662f2354362dd1b | import unittest
from csvtools.test import ReaderWriter
import csvtools.unzip as m
class TestUnzip(unittest.TestCase):
def test_out_spec(self):
csv_in = ReaderWriter()
csv_in.writerow('a b c'.split())
csv_in.writerow('a1 b1 c1'.split())
csv_in.writerow('a2 b2 c2'.split())
csv_out_spec = ReaderWriter()
csv_out_unspec = ReaderWriter()
m.unzip(csv_in, ['a'], csv_out_spec, csv_out_unspec)
self.assertListEqual(
['id a'.split(),
'0 a1'.split(),
'1 a2'.split()],
csv_out_spec.rows)
def test_out_unspec(self):
csv_in = ReaderWriter()
csv_in.writerow('a b c'.split())
csv_in.writerow('a1 b1 c1'.split())
csv_in.writerow('a2 b2 c2'.split())
csv_out_spec = ReaderWriter()
csv_out_unspec = ReaderWriter()
m.unzip(csv_in, ['a'], csv_out_spec, csv_out_unspec)
self.assertListEqual(
['id b c'.split(),
'0 b1 c1'.split(),
'1 b2 c2'.split()],
csv_out_unspec.rows)
def test_zip_id_defaults_to_id(self):
csv_in = self.csv_header_a_b_c()
csv_out_spec = ReaderWriter()
csv_out_unspec = ReaderWriter()
m.unzip(csv_in, ['a'], csv_out_spec, csv_out_unspec)
self.assertListEqual(['id b c'.split()], csv_out_unspec.rows)
def csv_header_a_b_c(self):
csv = ReaderWriter()
csv.writerow('a b c'.split())
return csv
def test_custom_zip_id_in_out_spec(self):
csv_in = self.csv_header_a_b_c()
csv_out_spec = ReaderWriter()
csv_out_unspec = ReaderWriter()
m.unzip(
csv_in, ['a'], csv_out_spec, csv_out_unspec,
zip_field='zip_id')
self.assertListEqual(['zip_id a'.split()], csv_out_spec.rows)
def test_custom_zip_id_in_out_unspec(self):
csv_in = self.csv_header_a_b_c()
csv_out_spec = ReaderWriter()
csv_out_unspec = ReaderWriter()
m.unzip(
csv_in, ['a'], csv_out_spec, csv_out_unspec,
zip_field='zip_id')
self.assertListEqual(['zip_id b c'.split()], csv_out_unspec.rows)
def test_input_contain_zip_field_exception(self):
csv_in = self.csv_header_a_b_c()
csv_out_spec = ReaderWriter()
csv_out_unspec = ReaderWriter()
with self.assertRaises(m.DuplicateFieldError):
m.unzip(csv_in, ['a'], csv_out_spec, csv_out_unspec, zip_field='a')
|
994,135 | 0251ceec4bf5492529bb1076496524d014c70a90 | # -*- coding: utf-8 -*-
"""
Microsoft-Windows-Provisioning-Diagnostics-Provider
GUID : ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9
"""
from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct
from etl.utils import WString, CString, SystemTime, Guid
from etl.dtyp import Sid
from etl.parsers.etw.core import Etw, declare, guid
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=10, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_10_0(Etw):
pattern = Struct(
"Message1" / WString,
"Message2" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=11, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_11_0(Etw):
pattern = Struct(
"Message1" / WString,
"Message2" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=12, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_12_0(Etw):
pattern = Struct(
"Message1" / WString,
"HRESULT" / Int32ul,
"Message2" / WString,
"Message3" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=20, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_20_0(Etw):
pattern = Struct(
"Message1" / WString,
"Message2" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=21, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_21_0(Etw):
pattern = Struct(
"Message1" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=22, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_22_0(Etw):
pattern = Struct(
"Message1" / WString,
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=40, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_40_0(Etw):
pattern = Struct(
"Message1" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=42, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_42_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=45, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_45_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=60, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_60_0(Etw):
pattern = Struct(
"UInt1" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=61, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_61_0(Etw):
pattern = Struct(
"Message1" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=62, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_62_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=63, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_63_0(Etw):
pattern = Struct(
"Message1" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=64, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_64_0(Etw):
pattern = Struct(
"Message1" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=65, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_65_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul,
"Message1" / WString,
"Uint1" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=66, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_66_0(Etw):
pattern = Struct(
"Message1" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=67, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_67_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul,
"Message1" / WString,
"Uint1" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=68, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_68_0(Etw):
pattern = Struct(
"Message1" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=69, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_69_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul,
"Message1" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=70, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_70_0(Etw):
pattern = Struct(
"Int1" / Int32sl
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=71, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_71_0(Etw):
pattern = Struct(
"Int1" / Int32sl
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=72, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_72_0(Etw):
pattern = Struct(
"Int1" / Int32sl,
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=80, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_80_0(Etw):
pattern = Struct(
"UInt1" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=81, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_81_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=82, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_82_0(Etw):
pattern = Struct(
"UInt1" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=83, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_83_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=90, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_90_0(Etw):
pattern = Struct(
"Message1" / WString,
"Message2" / WString,
"Message3" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=91, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_91_0(Etw):
pattern = Struct(
"Message1" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=92, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_92_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul,
"Message1" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=93, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_93_0(Etw):
pattern = Struct(
"Message1" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=94, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_94_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul,
"Message1" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=100, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_100_0(Etw):
pattern = Struct(
"Message1" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=101, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_101_0(Etw):
pattern = Struct(
"Message1" / WString,
"Int1" / Int32sl
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=102, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_102_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul,
"Message1" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=103, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_103_0(Etw):
pattern = Struct(
"Message1" / WString,
"Message2" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=104, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_104_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul,
"Message1" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=106, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_106_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=107, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_107_0(Etw):
pattern = Struct(
"Message1" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=108, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_108_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=109, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_109_0(Etw):
pattern = Struct(
"Message1" / WString,
"Message2" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=110, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_110_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul,
"Message1" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=112, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_112_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=113, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_113_0(Etw):
pattern = Struct(
"Int1" / Int32sl
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=115, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_115_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=153, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_153_0(Etw):
pattern = Struct(
"InitialState" / Int32ul,
"UpdateState" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=154, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_154_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=155, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_155_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=157, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_157_0(Etw):
pattern = Struct(
"UInt1" / Int64ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=171, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_171_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=172, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_172_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=173, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_173_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=174, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_174_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=175, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_175_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=176, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_176_0(Etw):
pattern = Struct(
"Uint1" / Int32ul,
"Uint2" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=177, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_177_0(Etw):
pattern = Struct(
"Uint1" / Int32ul,
"Uint2" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=178, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_178_0(Etw):
pattern = Struct(
"Int1" / Int32sl
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=179, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_179_0(Etw):
pattern = Struct(
"State" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=180, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_180_0(Etw):
pattern = Struct(
"State" / Int32ul,
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=181, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_181_0(Etw):
pattern = Struct(
"State" / Int32ul,
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=182, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_182_0(Etw):
pattern = Struct(
"UInt1" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=184, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_184_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=300, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_300_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=301, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_301_0(Etw):
pattern = Struct(
"State" / Int32ul,
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=302, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_302_0(Etw):
pattern = Struct(
"State" / Int32ul,
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=303, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_303_0(Etw):
pattern = Struct(
"State" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=310, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_310_0(Etw):
pattern = Struct(
"Message1" / WString,
"Message2" / WString,
"Message3" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=311, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_311_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=312, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_312_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=313, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_313_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul,
"Message1" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=1002, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_1002_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=1005, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_1005_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul,
"File" / CString,
"Line" / Int32sl,
"Message" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=1006, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_1006_0(Etw):
pattern = Struct(
"Message1" / WString
)
@declare(guid=guid("ed8b9bd3-f66e-4ff2-b86b-75c7925f72a9"), event_id=1008, version=0)
class Microsoft_Windows_Provisioning_Diagnostics_Provider_1008_0(Etw):
pattern = Struct(
"HRESULT" / Int32ul
)
|
994,136 | 7809922a9b4274cf738c0032193d287eb8c28f41 | """
Test of ABARES report_builder script
"""
#%load_ext autoreload
#%autoreload 2
#%cd "J:\ProductivityAndWaterAndSocial\Water\_Resources\ABARESdocs\\charts_and_tables"
import pandas as pd
import report_builder
import numpy as np
import os
home = os.getcwd()
tablesf = home + "\\output\\"
home_1 = os.path.dirname(home)
reportf = home_1 + "\\report\\"
datain = home + "\\input\\"
# =============================
#
# Create report builder instance for any chapter that has linked data
#
# =============================
rb2 = report_builder.ReportBuilder(tablesf, reportf, "2_text")
rb4 = report_builder.ReportBuilder(tablesf, reportf, "4_tables")
# =============================
#
# Read in some data
#
# =============================
data = pd.read_csv(datain + "raw_crop_data.csv")
data = data.set_index(['YEAR', 'SEASON', 'UNIT'])
# =============================
#
# Create some tables and stats
#
# =============================
table1 = data.xs(('winter', 'production'), level=('SEASON', 'UNIT'))
table1 = table1.drop(['TAS', 'AUS'], 1)
table1.to_csv(tablesf + 'winter_crop_prod_aus.csv')
table3 = data.xs(('winter', 'area'), level=('SEASON', 'UNIT'))
table3 = table3.drop(['TAS', 'AUS'], 1)
table3.to_csv(tablesf + 'winter_crop_area_aus.csv')
stats = {}
stats['win_prod_vic'] = format(table1.ix[2015]['VIC'], ',.1f')
stats['sum_crop_area'] = format(table1.ix[2015]['QLD'], ',.1f')
# =============================
#
# Write tables and stats to chapters
#
# =============================
rb2.insert_tables_stats(['winter_crop_prod_aus', 'winter_crop_area_aus'], [',.1f', ',.1f'], stats)
rb4.insert_tables_stats(['winter_crop_prod_aus', 'winter_crop_area_aus'], [',.1f', ',.1f'], stats)
# =============================
#
# Create chart and table index file
#
# =============================
rb2.make_chart_index('chart_table_index.csv', ['2_text.md', '3_charts.md', '4_tables.md'])
|
994,137 | 3f4a620e83fa8c8e6fd3ee3a51f9628964c80631 | from typing import List, Optional
from kubernetes.client import V1beta1CronJob
from streams_explorer.models.kafka_connector import KafkaConnector
from streams_explorer.models.sink import Sink
from streams_explorer.models.source import Source
class Extractor:
sources: List[Source] = []
sinks: List[Sink] = []
def on_streaming_app_env_parsing(self, env, streaming_app_name: str):
pass
def on_connector_info_parsing(
self, info: dict, connector_name: str
) -> Optional[KafkaConnector]:
pass
def on_cron_job_parsing(self, cron_job: V1beta1CronJob):
pass
@staticmethod
def split_topics(topics: Optional[str]) -> List[str]:
if topics:
return topics.replace(" ", "").split(",")
return []
|
994,138 | 3dc2073e149521a6993307dcb26bce9ebb3adbc3 | version https://git-lfs.github.com/spec/v1
oid sha256:1d4a7d60bede05345f4fc82cea2ada325e8cdd8be80f17c59a4930c26ae88a78
size 15392
|
994,139 | d6d6b765671a6f5d0750ab4044d732e6bb24c554 | #! python3
#### Builing a text classifier
import zipfile
import pandas as pd
import numpy as np
import pickle
import nltk
from nltk import FreqDist
import re
import sys
import sklearn
sys.path.append("C:\Python34\Scripts\Mike AI Job\Final")
#import tweepy2
from text_preprocessing import remove_stopwords
#from text_preprocessing import tokenise,preprocess,remove_stopwords
#zipfile_ref = zipfile.ZipFile("trainingandtestdata.zip","r")
#zipfile_ref.extractall()
#zipfile_ref.close()
##tweets_info = pd.read_csv("training.1600000.processed.noemoticon.csv",encoding = "ISO-8859-1",index_col = 1, header = None)
##
##tweets_info.columns = ["Label","Timestamp","Status","User","Text"]
###print(tweets_info)
##
##training_data = open("training_data","wb")
##pickle.dump(tweets_info,training_data)
datastream = open("training_data","rb")
input_df = pickle.load(datastream)
training_df = input_df[["Label","Text"]]
label_new = ["positive","negative","neutral"]
training_df["Label"] = training_df["Label"].map({4:"positive",2:"neutral",0:"negative"})
training_df = training_df.sample(frac = 1)
training_data = training_df.head(200)
#training_data["Lexicon"] = list(map(text_preprocess.tokenize(str(training_data["Text"])),str(training_data["Text"])))
#training_data["Lexicon"] = map(remove_stopwords(training_data["Text"]))
tweet_list = []
tweet_list = [(remove_stopwords(str(x)),sentiment) for x in training_data["Text"] for sentiment in training_data["Label"]]
def get_words(tweet_list):
words_list = []
for (words,sentiment) in tweet_list:
words_list.extend(words)
return words_list
def get_word_features(words_list):
word_freq = FreqDist(words_list)
words_features = word_freq.keys()
return words_features
def feature_extractor(document):
features = {}
for words in words_features:
features['contains(%s)' %words] = (words in document)
return features
words_features = get_word_features(get_words(tweet_list))
training_set = training_data.values
text_list = list(training_set[:,1])
sentiment_list = list(training_set[:,0])
tweet_list = [(text,sentiment) for text in text_list for sentiment in sentiment_list]
training_set = nltk.classify.apply_features(feature_extractor,tweet_list)
classifier = nltk.NaiveBayesClassifier.train(training_set)
tweet_try = "I'm happy and cheerful!"
print(classifier.classify(feature_extractor(tweet_try.split())))
|
994,140 | 3f9ac7d5f3b9b7c9a8f12cee2ace589c024375bf | class Timeline:
"""タイムラインの取得と ``since_id`` と ``max_id`` を保存、取得するクラス
Attributes
ーーーーーー
home_timeline_ids : TimelineIndex or None
ホームタイムラインの ``since_id`` と ``max_id`` を保持するオブジェクト
"""
def __init__(self, api, storage):
"""
Parameters
----------
api : tweepy.api.API
tweepyでユーザー認証したTwitterAPIのラッパー
storage : TimelineIndexStorage
``since_id`` と ``max_id`` を保存するためのストレージ
"""
self._api = api
self._storage = storage
def home_timeline(self, count, since_id=None, max_id=None):
"""ホームタイムライン上のツイートを取得する
Parameters
----------
count : int
取得するツイートの総数。最大は200
since_id : int, default None
タイムラインを取得し始めるツイートID
max_id : int, default None
タイムラインを取得し終えるツイートID
Returns
-------
tweets : tweepy.models.ResultSet
ホームタイムライン上のツイート
Notes
-----
``since_id`` で指定した値を超えるIDを持つツイートを取得する。
``max_id`` で指定した値以下のIDを持つツイートを取得する。
両方指定しなければ、最新のタイムラインを取得する。
"""
tweets = self._api.home_timeline(count=count, since_id=since_id,
max_id=max_id)
timeline_name = self.home_timeline.__name__
if tweets != []:
try:
self._storage.create_ids(timeline_name, tweets)
except ValueError:
self._storage.update_ids(timeline_name, tweets)
return tweets
@property
def home_timeline_ids(self):
"""前回の ``since_id`` と ``max_id`` を保持するオブジェクトを取得する
Returns
-------
TimelineIndex or None
``since_id`` と ``max_id`` を保持するオブジェクト。存在しなければ ``None``
"""
return self._storage.get_ids("home_timeline")
|
994,141 | abc4a581916abf8c87b4ade2dc8962a050bb8ebd | n = int(input())
li =[]
for _ in range(n):
li.append(int(input()))
def gogo(li):
cnt=1
s =li[0]
for i in range(1,n):
if li[i]>=s:
s= li[i]
cnt+=1
if s == max(li):
break
print(cnt)
gogo(li)
li.reverse()
gogo(li)
|
994,142 | 31f85fa991342fd8531451e21e43dbe8d2622f0c | import PyPDF2
class PDF2Txt():
def __init__(self,pdf):
self.pdf = pdf
self.pdfObj = open(self.pdf,'rb')
self.CompleteStr = ""
self.AllPages()
def Read(self):
return self.CompleteStr
def AllPages(self):
self.pdfRead = PyPDF2.PdfFileReader(self.pdfObj)
numPages = self.pdfRead.numPages
for i in range(numPages):
page = self.pdfRead.getPage(i)
self.CompleteStr = self.CompleteStr + page.extractText()
if __name__== "__main__":
PDF2Txt(None)
|
994,143 | c5f677ab0db77914ee3572a236c292b0ce5e6395 | # -*- coding: utf-8 -*-
# Sõnastikud
# Väino Tuisk
sonastik = {"janku":[(100,250),"red","mjau.vaw"],"raamat":"book","auto":"car","meri":"sea"}
hinded ={"A":96,"B":90,"C":76,"D":66,"E":50}
for i in sonastik:
print (i,end = " ")
print (" ")
eesti = input("mis sõna tõlkida? ")
print (sonastik[eesti])
##print ("")
##print (sonastik["auto"])
##sonastik["hiir"] = "mouse"
##for i in sonastik:
## print (i,end = " ")
##del sonastik["raamat"]
##print ("")
##for i in sonastik:
## print (i,end = " ")
print (sonastik["janku"][2])
|
994,144 | 0cb119f2d5528351ca3432e7f2d89b1307515e72 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2018 Cesar Sinchiguano <cesarsinchiguano@hotmail.es>
#
# Distributed under terms of the BSD license.
"""
Everthing done with open3d, pyntcloud, CloudCompare, meshLab and python_pcl binding(wrapper)
--------------------------------------------------------------------------------------------
#Python 2.7 anaconda in a virtual environment working with ros, and extra "tensorflow"
#In order to change the format .obj(CAD model) to .ply(triangle mesh) do the following:
1_open Cloud compare and save a copy as .ply extension
#From .ply (triangle mesh) to .pcd(sample points) run the following code(meshtopc.py),
# meshtopc.py use the a pyntcloud that only run in python3 where i used the method to
# sample point from the mesh data, also use open3D in order to save as .pcd the sample points.
#finally, for processing the data i am working with python_pcl
-----------------------------------------------------------------------------------------
"""
from thesis_class import prepare_dataset,draw_registration_result,preprocess_point_cloud
from thesis_class import camera, execute_global_registration
from thesis_library import *
import open3d
from open3d import PointCloud, Vector3dVector, write_point_cloud,read_triangle_mesh
import pcl
# Returns Downsampled version of a point cloud
# The bigger the leaf size the less information retained
def do_voxel_grid_filter(point_cloud, LEAF_SIZE = 0.01):
voxel_filter = point_cloud.make_voxel_grid_filter()
voxel_filter.set_leaf_size(LEAF_SIZE, LEAF_SIZE, LEAF_SIZE)
return voxel_filter.filter()
# Returns only the point cloud information at a specific range of a specific axis
def do_passthrough_filter(point_cloud, name_axis = 'z', min_axis = 0.6, max_axis = 1.1):
pass_filter = point_cloud.make_passthrough_filter()
pass_filter.set_filter_field_name(name_axis);
pass_filter.set_filter_limits(min_axis, max_axis)
return pass_filter.filter()
# Use RANSAC planse segmentation to separate plane and not plane points
# Returns inliers (plane) and outliers (not plane)
def do_ransac_plane_segmentation(point_cloud, max_distance = 0.01):
segmenter = point_cloud.make_segmenter()
segmenter.set_model_type(pcl.SACMODEL_PLANE)
segmenter.set_method_type(pcl.SAC_RANSAC)
segmenter.set_distance_threshold(max_distance)
#obtain inlier indices and model coefficients
inlier_indices, coefficients = segmenter.segment()
inliers = point_cloud.extract(inlier_indices, negative = False)
outliers = point_cloud.extract(inlier_indices, negative = True)
return inliers, outliers
def main():
counter=0
flag=True
while (True):
counter+=1
# #Get point PointCloud2
# pc=camObj.get_point_cloud()
#
# if pc is None:
# print('no PointCloud2!!!')
# continue
# print(type(pc))
# print(pc.shape)
# if flag:
# pc=np.reshape(pc,(np.size(pc[:,:,0]),3))#I took the size of my x coordinates
# print(pc.shape)
# pc=np.nan_to_num(pc)
#
# #Pass xyz to Open3D.PointCloud and visualize
# pcd = PointCloud()
# print('In progress!!!')
#
# start_timer=time.time()
#
# pcd.points = Vector3dVector(pc)
# write_point_cloud("data.pcd", pcd)
#
#
# print('elapsed time:',time.time()-start_timer)
#
# flag=False
##################################################################################
# The following lines of codes separate work with open3D, it is a good one in order to make
# the local and global registration
# """
# Load!!!
# """
# print("Load a ply point cloud, print it, and visualize it")
# source =read_point_cloud("data.pcd")
# print(type(source))
# tmp_source=np.asarray(source.points)
# print('shape:',tmp_source.shape)
# # Flip it, otherwise the pointcloud will be upside down
# source.transform([[1, 0, 0, 0], [0, -1, 0, 0], [0, 0, -1, 0], [0, 0, 0, 1]])
# #draw_geometries([source])
# #exit()
# """
# Donwsample!!!
# """
# print("Downsample the point cloud with a voxel of 0.02")
# voxel_down_source = voxel_down_sample(source, voxel_size =0.0008)#5mm
# tmp_source=np.asarray(voxel_down_source.points)
# print('shape:',tmp_source.shape)
# # draw_geometries([voxel_down_source])
# # exit()
##################################################################################
# The following lines of codes separate the objects in the table from the given scene
# and the job is done with the help of python_pcl, a binding python to the PCL
# Load the point cloud from the memory
cloud = pcl.load('data.pcd')
# Downsample the cloud as high resolution which comes with a computation cost
downsampled_cloud = do_voxel_grid_filter(point_cloud = cloud, LEAF_SIZE = 0.01)
pcl.save(downsampled_cloud, 'downsampled_cloud.pcd')
# Get only information in our region of interest, as we don't care about the other parts
filtered_cloud = do_passthrough_filter(point_cloud = downsampled_cloud,name_axis = 'z', min_axis = 0.6, max_axis = 1.1)
pcl.save(filtered_cloud, 'roi_tabletop.pcd')
# Separate the table from everything else
table_cloud, objects_cloud = do_ransac_plane_segmentation(filtered_cloud, max_distance = 0.01)
pcl.save(table_cloud, 'table_only.pcd');
pcl.save(objects_cloud, 'objects.pcd');
exit()
# print("Downsample the point cloud with a voxel of 1")#voxel_size = 0.05 # means 5cm for the dataset
# voxel_down_target = voxel_down_sample(target, voxel_size = 1)
# tmp_target=np.asarray(voxel_down_target.points)
# print('shape:',tmp_target.shape)
# # Flip it, otherwise the pointcloud will be upside down
# voxel_down_target.transform([[1, 0, 0, 0], [0, -1, 0, 0], [0, 0, -1, 0], [0, 0, 0, 1]])
# draw_geometries([voxel_down_target])
print('------------------')
print('counter:',counter)
# close any open windows
#cv2.destroyAllWindows()
# spin() simply keeps python from exiting until this node is stopped
#rospy.spin()
if __name__ == '__main__':
camObj=camera()
main()
#help for the future
# # voxel_size = 0.05 # means 5cm for the dataset
# # source, target, source_down, target_down, source_fpfh, target_fpfh = prepare_dataset()
#
# # result_ransac = execute_global_registration(source_down, target_down,source_fpfh, target_fpfh, voxel_size)
# # print(result_ransac)
# # draw_registration_result(source_down, target_down,result_ransac.transformation)
|
994,145 | d52e2155960779ba0716af74d344f4e681d5552e | # -*- test-case-name: signing.test.test_persistence -*-
from twisted.internet import defer
class Persistence(object):
"""
Simple deferred key:(field:value) store.
If a field is set which already has a value, the value is overwritten.
get_all returns a list of all fields for that key.
"""
keyvals = {}
def set(self, key, field, value):
d = defer.Deferred()
if key not in self.keyvals:
self.keyvals[key] = {}
self.keyvals[key][field] = value
d.callback(None)
return d
def get(self, key, field):
d = defer.Deferred()
if key not in self.keyvals:
d.callback(None)
elif field not in self.keyvals[key]:
d.callback(None)
else:
d.callback(self.keyvals[key][field])
return d
def get_all(self, key):
d = defer.Deferred()
if key not in self.keyvals:
d.callback([])
else:
d.callback(self.keyvals[key].keys())
return d
def delete(self, key, field = None):
d = defer.Deferred()
if key in self.keyvals:
if field in self.keyvals[key]:
del self.keyvals[key][field]
if field is None or len(self.keyvals[key]) == 0:
del self.keyvals[key]
d.callback(None)
return d
|
994,146 | c21d77989748fe9e49f93d38fa1e0d80b589f085 | import itertools
import numpy as np
import pytest
from chunkblocks.global_offset_array import GlobalOffsetArray
from chunkblocks.iterators import Iterator
from chunkblocks.models import Block, Chunk
class IdentityIterator(Iterator):
def get_all_neighbors(self, index, max=None):
return index
def get(self, start, dimensions):
yield start
class TestChunk:
def test_get_border_slices_2d(self):
bounds = (slice(0, 50), slice(0, 50))
chunk_shape = (30, 30)
overlap = (10, 10)
block = Block(bounds=bounds, chunk_shape=chunk_shape, overlap=overlap)
chunk = Chunk(block, (0, 0))
borders = list(itertools.product(range(0, len(bounds)), [-1, 1]))
fake_data = np.zeros(chunk.shape)
for slices in chunk.border_slices(borders):
fake_data[slices] += 1
fake_data[chunk.core_slices(borders)] += 1
assert fake_data.sum() == np.product(fake_data.shape)
def test_get_border_slices_3d(self):
bounds = (slice(0, 70), slice(0, 70), slice(0, 70))
chunk_shape = (30, 30, 30)
overlap = (10, 10, 10)
block = Block(bounds=bounds, chunk_shape=chunk_shape, overlap=overlap)
chunk = Chunk(block, (0, 0, 0))
borders = list(itertools.product(range(0, len(bounds)), [-1, 1]))
fake_data = np.zeros(chunk.shape)
for slices in chunk.border_slices(borders):
fake_data[slices] += 1
fake_data[chunk.core_slices(borders)] += 1
assert fake_data.sum() == np.product(fake_data.shape)
def test_get_border_slices_3d_overlapping(self):
bounds = (slice(0, 7), slice(0, 7), slice(0, 7))
chunk_shape = (3, 3, 3)
overlap = (1, 1, 1)
block = Block(bounds=bounds, chunk_shape=chunk_shape, overlap=overlap)
chunk = Chunk(block, (0, 0, 0))
borders = list(itertools.product(range(0, len(bounds)), [-1, 1]))
fake_data = np.zeros(chunk.shape)
for slices in chunk.border_slices(borders, nonintersecting=False):
fake_data[slices] += 1
fake_data[chunk.core_slices(borders)] += 1
assert np.array_equal(fake_data, [[[3, 2, 3],
[2, 1, 2],
[3, 2, 3]],
[[2, 1, 2],
[1, 1, 1],
[2, 1, 2]],
[[3, 2, 3],
[2, 1, 2],
[3, 2, 3]]])
class TestBlock:
def test_init_wrong_size_no_overlap(self):
bounds = (slice(0, 70), slice(0, 70))
chunk_shape = (30, 30)
with pytest.raises(ValueError):
Block(bounds=bounds, chunk_shape=chunk_shape)
def test_init(self):
bounds = (slice(0, 70), slice(0, 70))
offset = (0, 0)
num_chunks = (3, 3)
overlap = (10, 10)
chunk_shape = (30, 30)
# test with bounds
Block(bounds=bounds, chunk_shape=chunk_shape, overlap=overlap)
# test with offset/num_chunks
Block(offset=offset, num_chunks=num_chunks, chunk_shape=chunk_shape, overlap=overlap)
# test with both offset/num_chunks
Block(bounds=bounds, offset=offset, num_chunks=num_chunks, chunk_shape=chunk_shape, overlap=overlap)
# test fail with neither block and offset offset/num_chunks
with pytest.raises(ValueError):
Block(chunk_shape=chunk_shape, overlap=overlap)
# test fail with only offset no num_chunks
with pytest.raises(ValueError):
Block(offset=offset, chunk_shape=chunk_shape, overlap=overlap)
# test fail with only num_chuks no offset
with pytest.raises(ValueError):
Block(num_chunks=num_chunks, chunk_shape=chunk_shape, overlap=overlap)
# test incorrect matching bounds with offset/num_chunks
with pytest.raises(Exception):
Block(bounds=(slice(b.start, b.stop + 1) for b in bounds),
offset=offset, num_chunks=num_chunks, chunk_shape=chunk_shape, overlap=overlap)
def test_init_wrong_size_overlap(self):
bounds = (slice(0, 70), slice(0, 70))
chunk_shape = (30, 30)
with pytest.raises(ValueError):
Block(bounds=bounds, chunk_shape=chunk_shape)
def test_index_to_slices(self):
bounds = (slice(0, 70), slice(0, 70))
chunk_shape = (30, 30)
overlap = (10, 10)
block = Block(bounds=bounds, chunk_shape=chunk_shape, overlap=overlap)
assert block.unit_index_to_slices((0, 0)) == (slice(0, 30), slice(0, 30))
assert block.unit_index_to_slices((0, 1)) == (slice(0, 30), slice(20, 50))
assert block.unit_index_to_slices((1, 0)) == (slice(20, 50), slice(0, 30))
def test_slices_to_index(self):
bounds = (slice(0, 70), slice(0, 70))
chunk_shape = (30, 30)
overlap = (10, 10)
block = Block(bounds=bounds, chunk_shape=chunk_shape, overlap=overlap)
assert block.chunk_slices_to_unit_index((slice(0, 30), slice(0, 30))) == (0, 0)
assert block.chunk_slices_to_unit_index((slice(0, 30), slice(20, 50))) == (0, 1)
assert block.chunk_slices_to_unit_index((slice(20, 50), slice(0, 30))) == (1, 0)
assert block.chunk_slices_to_unit_index((slice(20, 50), slice(20, 50))) == (1, 1)
def test_iterator(self):
bounds = (slice(0, 70), slice(0, 70))
chunk_shape = (30, 30)
overlap = (10, 10)
start = (0, 0)
block = Block(bounds=bounds, chunk_shape=chunk_shape, overlap=overlap, base_iterator=IdentityIterator())
chunks = list(block.chunk_iterator(start))
assert len(chunks) == 1
assert chunks[0].unit_index == start
def test_get_slices_2d(self):
bounds = (slice(0, 7), slice(0, 7))
chunk_shape = (3, 3)
overlap = (1, 1)
block = Block(bounds=bounds, chunk_shape=chunk_shape, overlap=overlap)
fake_data = GlobalOffsetArray(np.zeros(block.shape), global_offset=(0, 0))
assert block.num_chunks == (3, 3)
for chunk in block.chunk_iterator((0, 0)):
for edge_slice in block.overlap_slices(chunk):
fake_data[edge_slice] += 1
fake_data[block.core_slices(chunk)] += 1
assert fake_data.sum() == np.product(fake_data.shape)
def test_overlap_slices_3d(self):
bounds = (slice(0, 7), slice(0, 7), slice(0, 7))
chunk_shape = (3, 3, 3)
overlap = (1, 1, 1)
block = Block(bounds=bounds, chunk_shape=chunk_shape, overlap=overlap)
assert block.num_chunks == (3, 3, 3)
fake_data = GlobalOffsetArray(np.zeros(block.shape), global_offset=(0, 0, 0))
for chunk in block.chunk_iterator((1, 0, 1)):
for edge_slice in block.overlap_slices(chunk):
fake_data[edge_slice] += 1
fake_data[block.core_slices(chunk)] += 1
assert fake_data.sum() == np.product(fake_data.shape)
def test_checkpoints(self):
bounds = (slice(0, 7), slice(0, 7), slice(0, 7))
chunk_shape = (3, 3, 3)
overlap = (1, 1, 1)
block = Block(bounds=bounds, chunk_shape=chunk_shape, overlap=overlap)
for chunk in block.chunk_iterator((1, 0, 1)):
block.checkpoint(chunk)
assert block.is_checkpointed(chunk)
assert block.is_checkpointed(chunk, stage=0)
for chunk in block.chunk_iterator((1, 0, 1)):
assert not block.is_checkpointed(chunk, stage=1)
assert not block.checkpoint(chunk, stage=1)
assert block.all_neighbors_checkpointed(chunk, stage=0)
block.checkpoint(chunk, stage=1)
stage = 0
for chunk in block.chunk_iterator((1, 0, 1)):
print(block.checkpoints[stage][chunk.unit_index])
for c in block.get_all_neighbors(chunk):
print(c.unit_index, block.checkpoints[stage][c.unit_index])
assert block.all_neighbors_checkpointed(chunk, stage=0)
def test_slices_to_indices(self):
bounds_1 = (slice(0, 16), slice(0, 16), slice(0, 16))
chunk_shape_1 = (4, 4, 4)
overlap_1 = (1, 1, 1)
block_1 = Block(bounds=bounds_1, chunk_shape=chunk_shape_1, overlap=overlap_1)
bounds_2 = (slice(-1, 25), slice(-1, 25), slice(-1, 25))
chunk_shape_2 = (6, 6, 6)
overlap_2 = (1, 1, 1)
block_2 = Block(bounds=bounds_2, chunk_shape=chunk_shape_2, overlap=overlap_2)
index = 1
for unit_index in range(0, block_2.num_chunks[index]):
chunk_2 = Chunk(block_2, (0, unit_index))
chunk_2_coords = set(filter(lambda x: x >= block_1.bounds[index].start and x < block_1.bounds[index].stop,
range(chunk_2.slices[index].start, chunk_2.slices[index].stop)))
print('expect:', chunk_2.slices, chunk_2_coords)
for unit_index in block_1.slices_to_unit_indices(chunk_2.slices):
chunk_1 = Chunk(block_1, unit_index)
chunk_1_coords = set(filter(lambda x: x >= block_1.bounds[index].start and x < block_1.bounds[index].stop,
range(chunk_1.slices[index].start, chunk_1.slices[index].stop)))
print(chunk_1.slices, chunk_1_coords)
chunk_2_coords.difference_update(chunk_1_coords)
assert all(tuple(u >= 0 and u <= n for u, n in zip(unit_index, block_1.num_chunks)))
print('left', chunk_2_coords)
assert len(chunk_2_coords) == 0
# Test reverse direction
block_2_temp = block_2
block_2 = block_1
block_1 = block_2_temp
index = 1
for unit_index in range(0, block_2.num_chunks[index]):
chunk_2 = Chunk(block_2, (0, unit_index))
chunk_2_coords = set(filter(lambda x: x >= block_1.bounds[index].start and x < block_1.bounds[index].stop,
range(chunk_2.slices[index].start, chunk_2.slices[index].stop)))
print('expect:', chunk_2.slices, chunk_2_coords)
for unit_index in block_1.slices_to_unit_indices(chunk_2.slices):
chunk_1 = Chunk(block_1, unit_index)
chunk_1_coords = set(filter(lambda x: x >= block_1.bounds[index].start and x < block_1.bounds[index].stop,
range(chunk_1.slices[index].start, chunk_1.slices[index].stop)))
print(chunk_1.slices, chunk_1_coords)
chunk_2_coords.difference_update(chunk_1_coords)
assert all(tuple(u >= 0 and u <= n for u, n in zip(unit_index, block_1.num_chunks)))
print('left', chunk_2_coords)
assert len(chunk_2_coords) == 0
# Test None
index = 1
for unit_index in range(0, block_2.num_chunks[index]):
chunk_2 = Chunk(block_2, (0, unit_index))
# use fake slices with None here!
chunk_2_slices = (slice(None, None),) + chunk_2.slices[1:]
chunk_2_coords = set(filter(lambda x: x >= block_1.bounds[index].start and x < block_1.bounds[index].stop,
range(chunk_2_slices[index].start, chunk_2_slices[index].stop)))
print('expect:', chunk_2_slices, chunk_2_coords)
for unit_index in block_1.slices_to_unit_indices(chunk_2_slices):
chunk_1 = Chunk(block_1, unit_index)
chunk_1_coords = set(filter(lambda x: x >= block_1.bounds[index].start and x < block_1.bounds[index].stop,
range(chunk_1.slices[index].start, chunk_1.slices[index].stop)))
print(chunk_1.slices, chunk_1_coords)
chunk_2_coords.difference_update(chunk_1_coords)
assert all(tuple(u >= 0 and u <= n for u, n in zip(unit_index, block_1.num_chunks)))
print('left', chunk_2_coords)
assert len(chunk_2_coords) == 0
|
994,147 | 6df2ceda44307f438ed54c15fb12913082fc1f90 | #!/usr/bin/env python
""" Constructs quantum system for ASE-internal water molecule descript in and
extracts ESP"""
from ase.io import read
from ase.io import write
from gpaw import GPAW
from gpaw import restart
from ase.build import molecule
from ase.optimize.bfgslinesearch import BFGSLineSearch #Quasi Newton
from ase.units import Bohr
from ase.units import Hartree
import numpy as np
import os.path
import sys
import argparse
parser = argparse.ArgumentParser(description='Constructs quantum system'
' for ASE-internal water molecule desciption and extracts electrostatic'
' potential (ESP)')
#parser.add_argument('infile')
parser.add_argument('-c', '--charge',metavar='INTEGER_CHARGE',type=int,
nargs='?', const=1, default=0)
parser.add_argument('outfile_cube', nargs='?', metavar='outfile.cube',
default='esp.cube', help="Electrostatic potential in GAUSSIAN-native"
" .cube format, default 'esp.cube'")
parser.add_argument('outfile_csv', nargs='?', metavar='outfile.csv',
default='esp.csv', help="Electrostatic potential and x,y,z coordinates"
" as four-valued lines of .8 digits precision mantissa"
" notation, default 'esp.csv'")
parser.add_argument('outfile_rho_cube', nargs='?', metavar='outfile_rho.cube',
default='rho.cube', help="All-electron density in GAUSSIAN-native .cube"
" format, default 'rho.cube'")
parser.add_argument('outfile_rho_pseudo_cube', nargs='?',
metavar='outfile_rho_pseudo.cube', default='rho_pseudo.cube',
help="All-electron density in GAUSSIAN-native .cube format, default"
"'rho_pseudo.cube'")
args = parser.parse_args()
charge = args.charge
struc = molecule('H2O')
struc.set_pbc([0,0,0])
struc.set_cell([10,10,10])
struc.center()
calc = GPAW(xc='PBE', h=0.2, charge=charge,
spinpol=True, convergence={'energy': 0.001})
struc.set_calculator(calc)
# ESP from non-optimized H2O structure
Epot = struc.get_potential_energy()
# https://wiki.fysik.dtu.dk/gpaw/devel/electrostatic_potential.html tells us, the
# get_electrostatic_corrections() method will return an array of integrated
# corrections with the unit
# [corrections] = eV Angstrom^3
# However,
# https://wiki.fysik.dtu.dk/gpaw/tutorials/ps2ae/ps2ae.html?highlight=get_electrostatic_potential#gpaw.utilities.ps2ae.PS2AE.get_electrostatic_potential
# states, the interpolated ESP PS2AE.get_electrostatic_potential(ae=True,
# rcgauss=0.02) is given in
# [U_interpolated] = eV
# No unit information has been found on the
# gpaw.calculator.GPAW.get_electrostatic_potential() called here,
# however we assume homogeneous units throughout GPAW for now
phi = calc.get_electrostatic_potential()
# potential query comes from gpaw/hamiltonian.py
# def get_electrostatic_potential(self, dens):
# self.update(dens)
#
# v_g = self.finegd.collect(self.vHt_g, broadcast=True)
# v_g = self.finegd.zero_pad(v_g)
# if hasattr(self.poisson, 'correction'):
# assert self.poisson.c == 2
# v_g[:, :, 0] = self.poisson.correction
# return v_g
#
# A comment from the same file ...
# The XC-potential and the Hartree potential are evaluated on the fine grid,
# and the sum is then restricted
# to the coarse grid.
# ... and a note from https://wiki.fysik.dtu.dk/gpaw/algorithms.html?highlight=fine%20grid ...
# Finite-difference (FD):
# Uniform real-space orthorhombic grids. Two kinds of grids are involved in
# the calculations: A coarse grid used for the wave functions and a fine
# grid (23=8 times higher grid point density) used for densities and
# potentials. The pseudo electron density is first calculated on the coarse
# grid from the wave functions, and then interpolated to the fine grid, where
# compensation charges are added for achieving normalization. The effective
# potential is evaluated on the fine grid (solve the Poisson equation and
# calculate the exchange-correlation potential) and then restricted to the
# coarse grid where it needs to act on the wave functions (also on the
# coarse grid).
# ... tell us: potential has twice as many grid points in each spatial
# dimension as the actual number of coarse grid points queried by
# "calc.get_number_of_grid_points()"
nX = phi.shape # = 2 * calc.get_number_of_grid_points()
X = struc.cell.diagonal()
x_grid = np.linspace(0,X[0],nX[0])
y_grid = np.linspace(0,X[1],nX[1])
z_grid = np.linspace(0,X[2],nX[2])
x_grid3,y_grid3,z_grid3=np.meshgrid(x_grid,y_grid,z_grid)
# https://theochem.github.io/horton/2.1.0b3/lib/mod_horton_units.html?highlight=units#module-horton.units
# apparently, Horton internally uses atomic units.
# If this applies strictly, we have electron mass m_e = 1, electron charge e=1,
# reduced Planck's constant h_bar = 1 and Coulomb force constant
# k_e = 1 / (4 Pi eps_0 ) = 1 per definition
# Furthermore, it should expect
# length in Bohr (a_0) , defined as 4 Pi eps_0 h_bar^2 / (m_e e^2) = 1
# energy in Hartree (E_h), defined as m_e e^4 / (4 Pi eps_0 h_bar)^2 = 1
# electric potential, defined as E_h / e = 1
# thus, GPAW potential in units of "eV"
# are to be converted to units of "E_h / e = m_e e^3 / (4 Pi eps_0 h_bar)^2"
# U_hor = U_gpw * E_h / (e*eV)
# we use
# ase.units.Hartree = 27.211386024367243 (eV)
phi_hartree = phi / Hartree
# put potential in grid points and xyz-coordinates in csv-file format
# (four %.8e values, seperated by whitespace)
# as expected by resp FORTRAN code 2.1 (October 1994 Jim Caldwell)
dat = np.vstack( ( phi_hartree.flatten(), x_grid3.flatten()/Bohr,
y_grid3.flatten()/Bohr, z_grid3.flatten()/Bohr ) ).T
# spatial units are converted to Bohr. What unit is the potential?
# Division (not multiplication) is necessary here, as ase.units.Bohr defined as
# u['Bohr'] = (4e10 * pi * u['_eps0'] * u['_hbar']**2 / u['_me'] / u['_e']**2) # Bohr radius
# with unit [ Bohr ] = Angstrom / Bohr
# ase.units.Bohr = 0.5291772105638411 (Ang)
write(args.outfile_cube, struc, data=phi_hartree)
# apparently the native GAUSSIAN format for ESP, readible by Horton
np.savetxt(args.outfile_csv,dat,fmt='%.8e',delimiter=' ')
#grid = calc.hamiltonian.gd.get_grid_point_coordinates()
#dat2 = np.vstack( ( phi.flatten(), np.concatenate(([0],grid[0,:,:,:].flatten().T),axis=0).T, \
# np.concatenate(([0],grid[1,:,:,:].flatten().T),axis=0).T, \
# np.concatenate(([0],grid[2,:,:,:].flatten().T),axis=0).T ) ).T
#np.savetxt(args.outfile_csv +'.compare',dat2,fmt='%.8e',delimiter=' ')
# https://wiki.fysik.dtu.dk/gpaw/tutorials/bader/bader.html#bader-analysis
rho_pseudo = calc.get_pseudo_density()
rho = calc.get_all_electron_density()
# https://wiki.fysik.dtu.dk/gpaw/tutorials/all-electron/all_electron_density.html:
# As the all-electron density has more structure than the pseudo-density, it is
# necessary to refine the density grid used to represent the pseudo-density.
# This can be done using the gridrefinement keyword of the
# get_all_electron_density method:
#
# >>> n = calc.get_all_electron_density(gridrefinement=2)
#
# Current only the values 1, 2, and 4 are supported (2 is default).
# https://wiki.fysik.dtu.dk/gpaw/tutorials/bader/bader.html
# gives an example on how to convert and extract the electron densities:
rho_pseudo_per_bohr_cube = rho_pseudo * Bohr**3
rho_per_bohr_cube = rho * Bohr**3
write(args.outfile_rho_cube, struc, data=rho_per_bohr_cube)
write(args.outfile_rho_pseudo_cube, struc, data=rho_pseudo_per_bohr_cube)
|
994,148 | cdba7675594ef20fc33e3d310fb8e828355ad0b7 | """
test_passwordless
~~~~~~~~~~~~~~~~~
Passwordless tests
"""
import re
import time
from urllib.parse import parse_qsl, urlsplit
import warnings
import pytest
from flask import Flask
from tests.test_utils import (
capture_flashes,
capture_passwordless_login_requests,
logout,
)
from flask_security import Security, UserMixin, login_instructions_sent
pytestmark = pytest.mark.passwordless()
def test_passwordless_flag(app, client, get_message):
recorded = []
@login_instructions_sent.connect_via(app)
def on_instructions_sent(app, user, login_token):
assert isinstance(app, Flask)
assert isinstance(user, UserMixin)
assert isinstance(login_token, str)
recorded.append(user)
# Test disabled account
response = client.post(
"/login", data=dict(email="tiya@lp.com"), follow_redirects=True
)
assert get_message("DISABLED_ACCOUNT") in response.data
# Test login with json and valid email
data = dict(email="matt@lp.com")
response = client.post(
"/login", json=data, headers={"Content-Type": "application/json"}
)
assert response.status_code == 200
assert len(recorded) == 1
assert len(app.mail.outbox) == 1
# Test login with json and invalid email
data = dict(email="nobody@lp.com")
response = client.post(
"/login", json=data, headers={"Content-Type": "application/json"}
)
assert b"errors" in response.data
# Test sends email and shows appropriate response
with capture_passwordless_login_requests() as requests:
response = client.post(
"/login", data=dict(email="matt@lp.com"), follow_redirects=True
)
assert len(recorded) == 2
assert len(requests) == 1
assert len(app.mail.outbox) == 2
assert "user" in requests[0]
assert "login_token" in requests[0]
user = requests[0]["user"]
assert get_message("LOGIN_EMAIL_SENT", email=user.email) in response.data
token = requests[0]["login_token"]
response = client.get("/login/" + token, follow_redirects=True)
assert get_message("PASSWORDLESS_LOGIN_SUCCESSFUL") in response.data
# Test already authenticated
response = client.get("/login/" + token, follow_redirects=True)
assert get_message("PASSWORDLESS_LOGIN_SUCCESSFUL") not in response.data
logout(client)
# Test invalid token
response = client.get("/login/bogus", follow_redirects=True)
assert get_message("INVALID_LOGIN_TOKEN") in response.data
# Test login request with invalid email
response = client.post("/login", data=dict(email="bogus@bogus.com"))
assert get_message("USER_DOES_NOT_EXIST") in response.data
def test_passwordless_template(app, client, get_message):
# Check contents of email template - this uses a test template
# in order to check all context vars since the default template
# doesn't have all of them.
with capture_passwordless_login_requests() as requests:
client.post("/login", data=dict(email="joe@lp.com"), follow_redirects=True)
outbox = app.mail.outbox
assert len(outbox) == 1
matcher = re.findall(r"\w+:.*", outbox[0].body, re.IGNORECASE)
# should be 4 - link, email, token, config item
assert matcher[1].split(":")[1] == "joe@lp.com"
assert matcher[2].split(":")[1] == requests[0]["login_token"]
assert matcher[3].split(":")[1] == "True" # register_blueprint
# check link
link = matcher[0].split(":", 1)[1]
response = client.get(link, follow_redirects=True)
assert get_message("PASSWORDLESS_LOGIN_SUCCESSFUL") in response.data
@pytest.mark.settings(login_within="1 milliseconds")
def test_expired_login_token(client, app, get_message):
e = "matt@lp.com"
with capture_passwordless_login_requests() as requests:
client.post("/login", data=dict(email=e), follow_redirects=True)
token = requests[0]["login_token"]
user = requests[0]["user"]
time.sleep(1)
response = client.get("/login/" + token, follow_redirects=True)
assert (
get_message("LOGIN_EXPIRED", within="1 milliseconds", email=user.email)
in response.data
)
@pytest.mark.settings(
redirect_host="localhost:8081",
redirect_behavior="spa",
post_login_view="/login-redirect",
)
def test_spa_get(app, client):
"""
Test 'single-page-application' style redirects
This uses json only.
"""
with capture_flashes() as flashes:
with capture_passwordless_login_requests() as requests:
response = client.post(
"/login",
json=dict(email="matt@lp.com"),
headers={"Content-Type": "application/json"},
)
assert response.headers["Content-Type"] == "application/json"
token = requests[0]["login_token"]
response = client.get("/login/" + token)
assert response.status_code == 302
split = urlsplit(response.headers["Location"])
assert "localhost:8081" == split.netloc
assert "/login-redirect" == split.path
qparams = dict(parse_qsl(split.query))
assert qparams["email"] == "matt@lp.com"
assert len(flashes) == 0
@pytest.mark.settings(
login_within="1 milliseconds",
redirect_host="localhost:8081",
redirect_behavior="spa",
login_error_view="/login-error",
)
def test_spa_get_bad_token(app, client, get_message):
"""Test expired and invalid token"""
with capture_flashes() as flashes:
with capture_passwordless_login_requests() as requests:
response = client.post(
"/login",
json=dict(email="matt@lp.com"),
headers={"Content-Type": "application/json"},
)
assert response.headers["Content-Type"] == "application/json"
token = requests[0]["login_token"]
time.sleep(1)
response = client.get("/login/" + token)
assert response.status_code == 302
split = urlsplit(response.headers["Location"])
assert "localhost:8081" == split.netloc
assert "/login-error" == split.path
qparams = dict(parse_qsl(split.query))
assert all(k in qparams for k in ["email", "error", "identity"])
msg = get_message("LOGIN_EXPIRED", within="1 milliseconds", email="matt@lp.com")
assert msg == qparams["error"].encode("utf-8")
# Test mangled token
token = (
"WyIxNjQ2MzYiLCIxMzQ1YzBlZmVhM2VhZjYwODgwMDhhZGU2YzU0MzZjMiJd."
"BZEw_Q.lQyo3npdPZtcJ_sNHVHP103syjM"
"&url_id=fbb89a8328e58c181ea7d064c2987874bc54a23d"
)
response = client.get("/login/" + token)
assert response.status_code == 302
split = urlsplit(response.headers["Location"])
assert "localhost:8081" == split.netloc
assert "/login-error" == split.path
qparams = dict(parse_qsl(split.query))
assert len(qparams) == 1
assert all(k in qparams for k in ["error"])
msg = get_message("INVALID_LOGIN_TOKEN")
assert msg == qparams["error"].encode("utf-8")
assert len(flashes) == 0
def test_deprecated(app, sqlalchemy_datastore):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
security = Security()
security.init_app(app, sqlalchemy_datastore)
assert any("passwordless feature" in str(m.message) for m in w)
|
994,149 | b3b6fb5557c71c6d7b6fd053b455bc351f67f571 | from django.db import models
from django.utils.translation import gettext as _
from django.urls import reverse
from django.forms import ModelForm
class Meta:
managed = True
class Squirrels(models.Model):
X = models.FloatField(
help_text=_('Longitude'),
)
Y = models.FloatField(
help_text=_('Latitude'),
)
Unique_squirrel_id = models.CharField(
max_length=100,
help_text=_('Unique Squirrel ID'),
primary_key= True,
default = None,
)
PM='PM'
AM='AM'
SHIFT_CHOICES=(
(PM,'PM'),
(AM,'AM'),
)
Shift = models.CharField(
max_length=100,
choices = SHIFT_CHOICES,
)
Date = models.DateField(
help_text=_('Date'),
)
Adult='Adult'
Juvenile='Juvenile'
AGE_CHOICES=(
(Adult,'Adult'),
(Juvenile,'Juvenile'),
)
Age = models.CharField(
max_length=100,
choices=AGE_CHOICES,
null = True)
GRAY = 'Gray'
CINNAMON = 'Cinnamon'
BLACK = 'Black'
COLOR_CHOICES = (
(GRAY, 'Gray'),
(CINNAMON, 'Cinnamon'),
(BLACK, 'Black'),
)
Primary_Fur_Color = models.CharField(
help_text=_('Primary Fur Color'),
max_length=20,
choices=COLOR_CHOICES,
null =True,
)
GROUND_PLANE = 'Ground Plane'
ABOVE_GROUND = 'Above Ground'
LOCATION_CHOICES = (
(GROUND_PLANE, 'Ground Plane'),
(ABOVE_GROUND, 'Above Ground'),
)
Location = models.CharField(
help_text=_('Location'),
max_length=20,
choices=LOCATION_CHOICES,
null = True,
)
Location = models.CharField(
max_length=100,
help_text=_('Location'),
null = True
)
Specific_location=models.CharField(
max_length=100,
help_text=_('Specific Location'),
null = True
)
TRUE='TRUE'
FALSE='FALSE'
CHOICES=(
(TRUE,'TRUE'),
(FALSE,'FALSE'),
)
Running=models.CharField(
max_length=100,
choices=CHOICES,
help_text=_('Running'))
Chasing=models.CharField(
max_length=100,
choices=CHOICES,
help_text=_('Chasing'))
Climbing=models.CharField(
max_length=100,
choices=CHOICES,
help_text=_('Climbing'))
Eating=models.CharField(
max_length=100,
choices=CHOICES,
help_text=_('Eating'))
Foraging=models.CharField(
max_length=100,
choices=CHOICES,
help_text=_('Foraging'))
Other_activities=models.CharField(
max_length=100,
help_text=_('Other Activities'),
null = True)
Kuks=models.CharField(
max_length=100,
choices=CHOICES,
help_text=_('Kuks'))
Quaas=models.CharField(
max_length=100,
choices=CHOICES,
help_text=_('Quaas'))
Moans=models.CharField(
max_length=100,
choices=CHOICES,
help_text=_('Moans'))
Tail_flags=models.CharField(
max_length=100,
choices=CHOICES,
help_text=_('Tail flags'))
Tail_twitches=models.CharField(
max_length=100,
choices=CHOICES,
help_text=_('Tail twitches'))
Approaches=models.CharField(
max_length=100,
choices=CHOICES,
help_text=_('Approaches'))
Indifferent=models.CharField(
max_length=100,
choices=CHOICES,
help_text=_('Indifferent'))
Runs_from=models.CharField(
max_length=100,
choices=CHOICES,
help_text=_('Runs_from'))
def __str__(self):
return self.Unique_squirrel_id
def get_absolute_url(self):
return reverse('squirrels-detail', kwargs={'id':self.Unique_squirrel_id})
# Create your models here.
|
994,150 | b69cc5c07cc0be99471c1fa869d09c382749cae5 | __author__ = 'julian'
from agent import Agent
from house import House
import pandas as pd
class Dilemma():
dilemma_count = 0
def __init__(self, num_rounds, num_houses):
self.id = Dilemma.dilemma_count + 1
self.round = 0
self.final_round = None # Final round with rented houses
self.number_rented_house = 0 # Number of rented houses
self.num_rounds = num_rounds
self.num_houses = num_houses
self.house = None
self.agents = []
self.houses = []
self.treatment = None
self.high_limit = None
self.low_limit = None
Dilemma.dilemma_count +=1
# Running the dilemma
def runDilemma(self, agents, house, price_diff, treatment, **kwargs):
self.high_limit = kwargs.get('high_limit', None)
self.low_limit = kwargs.get('low_limit', None)
self.house = house # first house of the session
self.agents = agents # all agents in the session
self.treatment = treatment # treatment of the session
for r in range(self.num_rounds):
# in each round do actions
is_rented, renter = self.doActions(r)
if is_rented: # increment o decrement function of the last action
self.house.rounds_rented.append(1)
# In case apply the treatment and the price get the limit we continue with the same price, else we
# increment the price
if self.treatment:
if self.house.price * price_diff > self.high_limit:
new_price = self.house.price
else:
new_price = self.house.price * price_diff
else:
new_price = self.house.price * price_diff
self.house = self.house.updateRenting(renter, r+1)
self.houses.append(self.house)
# Create a new house
self.house = House(new_price, r+1)
else:
self.house.rounds_rented.append(0)
# In case apply the treatment and the price get the limit we continue with the same price, else we
# decrease the price
if self.treatment:
if self.house.price / price_diff < self.low_limit:
new_price = self.house.price
else:
new_price = self.house.price / price_diff
else:
new_price = self.house.price / price_diff
self.house.price = new_price
self.house.prices.append(new_price)
self.house.rounds.append(r+1)
#print 'End dilemma'
def doActions(self, round):
self.round = round # current round
idx_renter = None # index_renter
is_rented = False
# print '---- Round %d ----' %(round+1)
# actions for each agent
for index, a in enumerate(self.agents):
# agent want to rent and agent has no house
if a.decisions[round] == 1 and a.house is None:
# In case of first round
if idx_renter is None:
idx_renter = index
# In the other cases get the house the faster
if self.agents[idx_renter].time_decisions[round] > a.time_decisions[round]:
idx_renter = index
# house rented in this round
if idx_renter is not None:
is_rented = True # house is rented
self.agents[idx_renter].house = self.house
self.agents[idx_renter].endowment = self.agents[idx_renter].endowment - self.house.price
self.number_rented_house += 1
if self.number_rented_house == self.num_houses:
self.final_round = self.round
#print '<<<<<<<<<< Round %d >>>>>>>>>>' %(round+1)
#self.agents[idx_renter].displayAgent()
return is_rented, self.agents[idx_renter]
else:
'Non-agent'
return is_rented, None
def displaySession(self):
print 'Session %d' %(self.id)
for h in self.houses:
#h.displayHouse()
h.displayPriceEvolution()
def displaySessionById(self, id):
if self.id == id:
print 'Session %d' %(self.id)
for h in self.houses:
#h.displayHouse()
h.displayPriceEvolution()
def priceHousesRented(self):
prices = []
for h in self.houses:
if h.renter is not None:
prices.append(h.price)
return prices
|
994,151 | dcfa564e3b1b8bd22de06f483276f5c6ad983f0e | #! /usr/bin/env python
# ----------------------------------------------------------------
# @author: Shamail Tayyab
# @date: Thu Apr 4 12:33:00 IST 2013
#
# @desc: Redis/Python ORM for storing relational data in redis.
# ----------------------------------------------------------------
import inspect
import redis
r = redis.Redis ()
r.flushall ()
class classproperty(object):
"""
Lets support for making a property on a class.
"""
def __init__(self, getter):
self._getter = getter
def __get__(self, instance, owner):
return self._getter(owner)
class RField ():
"""
This class defined a field in Redis Database, similar to a column in a Relational DB.
"""
# If this field is mandatory.
required = False
# The default value of this field, if not provided.
default = None
def __init__ (self, *k, **kw):
if kw.has_key ("required"):
self.required = kw['required']
if kw.has_key ("default"):
self.default = kw['default']
class StringField (RField):
"""
@inherit RField
Implementation of String Field, where user wants to store a String in the Database.
"""
pass
class IntField (RField):
"""
@inherit RField
Implementation of Integer Field, where user wants to store a Integer in the Database.
"""
pass
class ForeignKey (RField):
"""
@inherit RField
Implementation of Foreign Key, where user wants to store One to One relation in the Database.
"""
def __init__ (self, *k, **kw):
RField.__init__ (self, *k, **kw)
self.relation = k[0]
class RModel (object):
"""
The actual Redis based model class implementation.
"""
id = IntField ()
keyvals = {}
locals = []
def __init__ (self, *k, **kw):
"""
Stores the provided values.
"""
self.newobj = True
self.keyvals = {}
self.locals = []
self.reinit ()
for i in self.locals:
fieldobj = object.__getattribute__(self, i)
if kw.has_key (i):
self.keyvals[i] = kw[i]
else:
if fieldobj.required == True:
if fieldobj.default is not None:
self.keyvals[i] = fieldobj.default
else:
raise Exception ("Need a default value for %s" % (i))
def from_id (self, id):
"""
Loads a model from its ID.
"""
self.seq = int(id)
self.newobj = False
return self
def reinit (self):
"""
Reloads the properties of this class from Database.
"""
#for name, obj in inspect.getmembers (self):
## if isinstance (obj, RField):
# self.keyvals[name] = obj.default
inspect.getmembers (self)
def validate (self):
"""
Validations for a Field.
"""
if kw.has_key (name):
self.keyvals[name] = kw[name]
elif obj.default is not None:
self.keyvals[name] = obj.default
else:
if obj.required:
raise AttributeError ("This field is required")
@property
def classkey (self):
"""
Generates the Key for this class.
"""
return 'rmodel:%s' % (self.__class__.__name__.lower ())
def sequence (self):
"""
Sequence Generator, uses Redis's atomic operation.
"""
seq_av_at = "%s:__seq__" % (self.classkey)
seq = r.incr (seq_av_at)
return seq
def prepare_key (self, key, for_seq):
"""
Prepares a key to be stored for this class.
"""
r_key = "%s:%d:%s" % (self.classkey, for_seq, key)
return r_key
def save (self):
"""
Persist this object into the Redis Database.
"""
if self.newobj:
using_sequence = self.sequence ()
self.keyvals['id'] = using_sequence
self.seq = using_sequence
else:
using_sequence = self.seq
for key, val in self.keyvals.items ():
r_key = self.prepare_key (key, using_sequence)
r.set (r_key, val)
self.keyvals = {}
self.newobj = False
@classproperty
def objects (self):
"""
Supports UserClass.objects.all () like stuff.
"""
return InternalObjectList (self)
def __getattribute__ (self, attr):
"""
Getter for this class.
"""
attrib = object.__getattribute__(self, attr)
if not isinstance (attrib, RField):
return attrib
if attr not in self.locals:
self.locals.append (attr)
if self.newobj:
if self.keyvals.has_key (attr):
return self.keyvals[attr]
else:
fieldobj = object.__getattribute__(self, attr)
return fieldobj.default
answer = r.get (self.prepare_key (attr, self.seq))
fieldobj = object.__getattribute__(self, attr)
if answer == None:
answer = fieldobj.default
else:
if isinstance (fieldobj, ForeignKey):
fkey = r.get (self.prepare_key ('__relationfor__', self.seq))
cls = globals ()[fkey]
return cls.objects.get (id = answer)
return answer
def __setattr__ (self, attr, val):
"""
Setter for this class.
"""
try:
attrib = object.__getattribute__(self, attr)
except AttributeError:
object.__setattr__ (self, attr, val)
return
if not isinstance (attrib, RField):
object.__setattr__ (self, attr, val)
return
if isinstance (attrib, ForeignKey):
self.keyvals[attr] = val.id
self.keyvals['__relationfor__'] = attrib.relation
else:
self.keyvals[attr] = val
class InternalObjectList (object):
"""
The query object, to support UserClass.objects.get () or UserClass.object.get_by_id () etc.
"""
def __init__ (self, classfor):
self.classfor = classfor
def get_by_id (self, id):
"""
Returns an object by its ID.
"""
clsfor_obj = self.classfor()
clsfor_obj.from_id (id)
return clsfor_obj
return
for name, obj in inspect.getmembers (clsfor_obj):
if isinstance (obj, RField):
key = clsfor_obj.prepare_key (name, int(id))
def get (self, *k, **kw):
"""
Returns an object by one of its property, say name.
"""
if kw.has_key ('id'):
return self.get_by_id (kw['id'])
if __name__ == "__main__":
# Lets define a Profile Class which is a Redis Based Model (inherits RModel).
class Profile (RModel):
fbid = StringField ()
# Again, lets define a User.
class User (RModel):
# A Field that can store a String.
username = StringField (required = True)
first_name = StringField (required = True)
last_name = StringField ()
password = StringField (required = True)
email = StringField (required = True)
# Lets now define a Table which will act as foreign key for another table.
class FK (RModel):
# Can store a String.
name = StringField ()
# Lets now define another Table Test that will have a property for ForeignKey
class Test (RModel):
username = StringField ()
# Stores a String
password = StringField ()
# Refers to another Table called 'FK'.
rel = ForeignKey ('FK')
# Stores a String with some default value.
defa = StringField (default = 'a')
# Stores a String with some validation.
req = StringField (required = True, default = 'abc')
# Creates an object of FK
fk = FK (name = 'abc')
fk.save ()
# See if the object is creates?
print "FKID:", fk.id
# Lets now create an object for Test
t = Test (username = "u", password = "p")
# Put the previous object as its relation reference.
t.rel = fk
# Save it.
t.save ()
print t.id
# See what we get back is the object itself!!
k= t.rel
print "Name:", k.name
#t.username = "new"
#t.save ()
#t = Test ()
#t.username = 22
# Lets see what keys were saved in the DB.
for i in r.keys ():
print i, r.get (i)
|
994,152 | b14e07c70375ae7e24e3bb60cec32b869746f50e | import tinychain as tc
import unittest
from testutils import start_host
class TestGraph(tc.graph.Graph):
__uri__ = tc.URI("/test/graph")
def _schema(self):
users = tc.table.Schema(
[tc.Column("user_id", tc.U64)],
[tc.Column("email", tc.String, 320), tc.Column("display_name", tc.String, 100)])
products = tc.table.Schema(
[tc.Column("sku", tc.U64)],
[tc.Column("name", tc.String, 256), tc.Column("price", tc.U32)])
orders = tc.table.Schema(
[tc.Column("order_id", tc.U64)],
[tc.Column("user_id", tc.U64), tc.Column("sku", tc.U64), tc.Column("quantity", tc.U32)]
).create_index("user", ["user_id"]).create_index("product", ["sku"])
schema = (tc.graph.Schema(tc.chain.Block)
.create_table("users", users)
.create_table("products", products)
.create_table("orders", orders)
.create_edge("friends", tc.graph.edge.Schema("users.user_id", "users.user_id"))
.create_edge("order_products", tc.graph.edge.Schema("products.sku", "orders.sku"))
.create_edge("user_orders", tc.graph.edge.Schema("users.user_id", "orders.user_id")))
return schema
@tc.put_method
def add_product(self, sku: tc.U64, data: tc.Map):
return self.products.insert([sku], [data["name"], data["price"]])
@tc.put_method
def create_user(self, user_id: tc.U64, data: tc.Map):
return self.users.insert([user_id], [data["email"], data["display_name"]])
@tc.put_method
def add_friend(self, user_id: tc.U64, friend: tc.U64):
return self.add_edge("friends", user_id, friend), self.add_edge("friends", friend, user_id)
@tc.post_method
def place_order(self, user_id: tc.U64, sku: tc.U64, quantity: tc.U64):
order_id = self.orders.max_id() + 1
return tc.After(self.orders.insert([order_id], [user_id, sku, quantity]), order_id)
@tc.get_method
def recommend(self, txn, user_id: tc.U64):
txn.vector = tc.tensor.Sparse.zeros([tc.I64.max()], tc.Bool)
txn.user_ids = tc.After(txn.vector.write([user_id], True), txn.vector)
txn.friend_ids = tc.If(
user_id.is_some(),
self.friends.match(txn.user_ids, 2),
tc.error.BadRequest("invalid user ID: {{user_id}}", user_id=user_id))
txn.order_ids = self.user_orders.forward(txn.friend_ids)
txn.product_ids = self.order_products.forward(txn.order_ids)
return self.products.read_vector(txn.product_ids)
# TODO: test replication between multiple hosts & interaction between multiple apps, including a18n
class GraphTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.host = start_host("test_app", [TestGraph], overwrite=True, cache_size="1G")
def testTraversal(self):
user1 = {"email": "user12345@example.com", "display_name": "user 12345"}
self.host.put("/test/graph/create_user", 12345, user1)
user2 = {"email": "user23456@example.com", "display_name": "user 23456"}
self.host.put("/test/graph/create_user", 23456, user2)
self.host.put("/test/graph/add_friend", 12345, 23456)
product1 = {"name": "widget 1", "price": 399}
self.host.put("/test/graph/add_product", 1, product1)
product2 = {"name": "widget 2", "price": 499}
self.host.put("/test/graph/add_product", 2, product2)
order = {"user_id": 23456, "sku": 1, "quantity": 5}
_order_id = self.host.post("/test/graph/place_order", order)
recommended = self.host.get("/test/graph/recommend", 12345)
self.assertEqual(recommended, [[1, "widget 1", 399]])
@classmethod
def tearDownClass(cls):
cls.host.stop()
if __name__ == "__main__":
unittest.main()
|
994,153 | 2ba219e22f32cda13de75827b39586633f7c6ddb | # -*- coding: utf-8 -*-
"""
Created on Fri Nov 03 14:59:03 2017
统计基因类型
@author: Administrator
"""
import numpy as np
#from pandas import DataFrame as df
import pandas as pd
import datetime
value12=[]
with open("Methlytion4.3-new.txt",'r') as f:
m=0
for line in f.readlines():
line = line.strip('\n')
data = line.split("\t")
coll=len(data)
m=m+1
value12.append(data)
file12=[]
with open("geneGTF_file.txt",'r') as f1:
for lines in f1.readlines():
lines=lines.strip('\n')
datas=lines.split('\t')
file12.append(datas)
print m,coll
rows=m
cols=coll+1
myList = [([0] * cols) for i in range(rows)]
def write_list_to_file(filename):
matrix_a = np.array(myList)
np.savetxt(filename,matrix_a,fmt=['%s']*matrix_a.shape[1],delimiter='\t',newline='\n')
if __name__ == '__main__':
s_time=datetime.datetime.now()
i=1
myList[0][:-1]=value12[0]
myList[0][-1]=file12[0][-2]
myList[-1][:-1]=value12[-1]
myList[-1][-1]='NA'
print myList[0]
print myList[-1]
del value12[0]
del file12[0]
del value12[-1]
del file12[-1]
type1=[]
for x in value12:
for xx in file12:
if x[0]==xx[1]:
myList[i][:-1]=x
myList[i][-1]=xx[-2]
type1.append(xx[-2])
print "正在处理第"+str(i)+"行......"
i=i+1
break
type2=list(set(type1))
for x in type2:
print "%s共同有%s个"%(str(x),str(type1.count(x)))
write_list_to_file("Methylation Beta Value9.txt")
data=pd.read_csv("Methylation Beta Value9.txt",sep='\t',index_col=0)
g = data['gene_type'].unique()
for i in list(g):
print str(i)+"包括"+str(list(data[data['gene_type']==i].index))
# data=data.T
# del data['0']
# data.T.to_csv("Methylation Beta Value1.3.txt",sep='\t',header=True,index=True)
##
print "保存完成。。。。。。。"
e_time=datetime.datetime.now()
time=e_time-s_time
print "程序执行完成,共用时"+str(time)
'''
unitary_pseudogene共同有1个
sense_intronic共同有1个
lincRNA共同有10个
unprocessed_pseudogene共同有1个
antisense共同有6个
protein_coding共同有46个
transcribed_unprocessed_pseudogene共同有1个
processed_transcript共同有2个
rRNA共同有1个
miRNA共同有2个
misc_RNA共同有1个
processed_pseudogene共同有5个
protein_coding包括['PTPN22', 'PCDHGB4', 'CCDC36', 'PLAT', 'LDHC', 'CST7', 'PM20D1',
'NYNRIN', 'CXCR3', 'KCNH8', 'CRB1', 'IL32', 'MPZ', 'LELP1', 'MOV10L1', 'C10orf71',
'HRNR', 'LIMCH1', 'OR6C1', 'LRRTM2', 'EVX2', 'S100A12', 'C1orf54', 'KLHL1', 'KIR3DX1',
'RUNX3', 'ZFP42', 'SULT1B1', 'CDH9', 'KLHDC8A', 'TCL1A', 'SDR42E2', 'FAP', 'CST9',
'TAGLN', 'DEGS1', 'COL5A2', 'HIST1H3E', 'C4BPA', 'TRIM77', 'OR7D4', 'PI15', 'PLCZ1', 'DEFB131', 'SCN3B', 'CBLN4']
lincRNA包括['RP11-367G6.3', 'LINC00908', 'LINC00606', 'RP11-56L13.1', 'RP11-495P10.6', 'LINC01114',
'LINC00486', 'LINC00977', 'RP11-25O3.1', 'RP11-744J10.3']
processed_transcript包括['AC016747.3', 'PCED1B-AS1']
processed_pseudogene包括['RP11-863K10.4', 'RP11-157G21.2', 'CASC4P1', 'RP11-69M1.3', 'TOMM20P2']
unitary_pseudogene包括['PRSS30P']
miRNA包括['MIR184', 'MIR16-2']
antisense包括['FAM83A-AS1', 'GRIK1-AS1', 'RP11-553L6.2', 'AC010127.3', 'RP11-1334A24.6', 'PABPC5-AS1']
misc_RNA包括['WT1-AS_7']
sense_intronic包括['RP11-103B5.4']
transcribed_unprocessed_pseudogene包括['PCDHB19P']
rRNA包括['RNA5SP77']
unprocessed_pseudogene包括['STK19B']
''' |
994,154 | 642b2c1a62421694d937981d9fb0d4d422c012d4 | import json
import requests
from .config_connect import config, confluence_url_request, con_request
# separate calls for data as one function
def dpl_title_list():
url_root = confluence_url_request(config())
url = url_root + "/rest/api/content/search?cql=(type=page and space=DEP and title ~ '2018' and (title ~ WIP or title ~ Ready))"
auth = con_request(config())
url2_Part3 = "/child?expand=page.body.view"
response = requests.get(url, auth=auth)
deploy_file = json.loads(response.text)
info = deploy_file['results']
dpl_plan_list = []
for data in info:
team_url_data = data['_expandable']['history']
team_url_string = team_url_data.replace('/history', '')
new_url = url_root + team_url_string + url2_Part3
response = requests.get(new_url, auth=auth)
deploy_file = json.loads(response.text)
info2 = deploy_file['page']['results']
for new_data in info2:
startLine = new_data['body']['view']['value'].find("jqlQuery=key in (")
endline = new_data['body']['view']['value'].find(")", startLine + 1) + 1
startLine2 = new_data['body']['view']['value'].find("jqlQuery=key =")
endline2 = new_data['body']['view']['value'].find("|", startLine2 + 1) - 1
sub_list = []
sub_list.append(new_data['title'])
sub_list.append("NA")
if startLine != -1:
tmp = url_root + new_data['_links']['tinyui']
sub_list[1] = tmp
startLine = startLine + 17
tmp = new_data['body']['view']['value'][startLine:endline]
tmp = tmp.split(",")
for i in tmp:
i = i.strip()
i = i.strip("	")
i = i.strip(")")
sub_list.append(i)
dpl_plan_list.append(sub_list)
elif startLine2 != -1:
startLine2 = startLine2 + 15
tmp = url_root + new_data['_links']['tinyui']
sub_list[1] = tmp
tmp = new_data['body']['view']['value'][startLine2:endline2]
sub_list.append(tmp)
dpl_plan_list.append(sub_list)
return dpl_plan_list
|
994,155 | c3e27f35079b55818ce3b5463b7aab1d6ef0dc09 | """
Use a stack to check if a string has balanced usage of parenthesis.
Example:
(), ()(), (({[]})) <- Balanced.
((), {{)}], [][]]] <- Not Balanced.
"""
from stack_ds import Stack
def is_match(top, p):
if top == "{" and p == "}":
return True
if top == "[" and p == "]":
return True
if top == "(" and p == ")":
return True
return False
def check_paranthesis(p_str):
s = Stack()
is_bal = True
index = 0
while len(p_str) > index and is_bal:
paren = p_str[index]
if paren in "{[(":
s.push(paren)
else:
if s.is_empty():
is_bal = False
else:
top = s.pop()
if not is_match(top, paren):
is_bal = False
index += 1
if s.is_empty() and is_bal:
return True
else:
return False
print(check_paranthesis("{{}}"))
print(check_paranthesis("{){(}}]")) |
994,156 | 6bfa4fda3eb8dea650a7194bb104e37b9a442441 | import preprocess as prp
from sklearn.model_selection import train_test_split
from sklearn import metrics
from sklearn import svm
import preprocess as prp
from sklearn import metrics
import numpy as np
from sklearn.pipeline import make_pipeline
from sklearn import preprocessing
import matplotlib.pyplot as plt
from datetime import datetime
from joblib import dump, load
from sklearn.model_selection import GridSearchCV
from joblib import dump, load
pkr_data = prp.pkr_data()
pkr_data.clean()
pkr_data.target = 'hand'
pkr_data.features = pkr_data.all.columns[pkr_data.all.columns != pkr_data.target]
pkr_data.init_model_data(target =['hand'],features = ['suit1','card1','suit2','card2','suit3','card3','suit4','card4'])
#ab data
ab_data = prp.ab_data(n=100)
# ab_data.encode = ['room_type']
ab_data.clean()
ab_data.target = 'room_type'
ab_data.features = ab_data.all.columns[ab_data.all.columns != ab_data.target]
ab_data.init_model_data(target=ab_data.target,features=ab_data.features)
print("Models Initiated")
ab_train_scores = {}
ab_test_scores = {}
ab_test_auc = {}
ab_train_auc = {}
pkr_train_scores = {}
pkr_test_scores = {}
pkr_test_auc = {}
pkr_train_auc = {}
for krn in ['linear', 'poly', 'rbf', 'sigmoid']:
SVM_ab = svm.SVC(kernel=krn,verbose=False,probability=True)
SVM_ab.fit(ab_data.x_train,ab_data.y_train)
ab_data.y_predict=SVM_ab.predict(ab_data.x_test)
y_prob = SVM_ab.predict_proba(ab_data.x_test)
ab_acc_score = metrics.accuracy_score(ab_data.y_test,ab_data.y_predict)
ab_roc_score = metrics.roc_auc_score(ab_data.y_test,ab_data.y_predict,multi_class='ovr',average='macro',max_fpr=1.0) #for ab data
# SVM_ab.fit(ab_data.x_train, ab_data.y_train)
ab_train_score = SVM_ab.score(ab_data.x_train, ab_data.y_train)
ab_test_score = SVM_ab.score(ab_data.x_test, ab_data.y_test)
ab_train_scores[krn] = round(ab_train_score,4)
ab_test_scores[krn] = round(ab_test_score,4)
ab_test_auc[krn] = metrics.roc_auc_score(ab_data.y_test,SVM_ab.predict(ab_data.x_test),average='macro',multi_class='ovr')
ab_train_auc[krn] = metrics.roc_auc_score(ab_data.y_train, SVM_ab.predict_proba(ab_data.x_train), average='macro',multi_class='ovr')
SVM_pkr = svm.SVC(kernel=krn,verbose=False)
SVM_pkr.fit(pkr_data.x_train, pkr_data.y_train)
pkr_train_score = round(SVM_pkr.score(pkr_data.x_train, pkr_data.y_train),4)
pkr_test_score = round(SVM_pkr.score(pkr_data.x_test, pkr_data.y_test),4)
pkr_train_scores[krn] = pkr_train_score
pkr_test_scores[krn] = pkr_test_score
pkr_test_auc[krn] = round(metrics.roc_auc_score(pkr_data.y_test, SVM_pkr.predict(pkr_data.x_test), average='macro'),4)
pkr_train_auc[krn] = round(metrics.roc_auc_score(pkr_data.y_train, SVM_pkr.predict(pkr_data.x_train), average='macro'),4)
dump(SVM_ab, 'SVM_ab_'+krn+'.joblib')
dump(SVM_pkr, 'SVM_pkr_' + krn + '.joblib')
marker = 1
# https://python-graph-gallery.com/11-grouped-barplot/
barWidth = 0.1
# set height of bar
ab_train = list(ab_train_scores.values())
ab_test = list(ab_test_scores.values())
ab_auc_train = list(ab_train_auc.values())
ab_auc_test = list(ab_test_auc.values())
pkr_train = list(pkr_train_scores.values())
pkr_test = list(pkr_test_scores.values())
pkr_auc_train = list(pkr_train_auc.values())
pkr_auc_test = list(pkr_test_auc.values())
width = 0.1 # the width of the bars
x = np.arange(len(ab_train))
fig, ax = plt.subplots()
rects1 = ax.bar(x - 4*width, ab_train, width, label='ab_train',color='blue')
rects2 = ax.bar(x - 2.5*width, ab_test, width, label='ab_test',color='cyan')
rects3 = ax.bar(x, pkr_train, width, label='pkr_train',color='red')
rects4 = ax.bar(x + width, pkr_test, width, label='pkr_test',color='orange')
rects5 = ax.bar(x + 2.5*width, pkr_auc_train, width, label='pkr_auc_train',color='magenta')
rects6 = ax.bar(x + 4*width, pkr_auc_test, width, label='pkr_auc_test',color='black')
# Add some text for labels, title and custom x-axis tick labels, etc.
ax.set_ylabel('Score (%)')
ax.set_title('SVM Kernel Performance')
ax.set_xticks(x)
ax.set_xticklabels(list(ab_train_scores.keys()))
ax.legend(loc='lower left')
def autolabel(rects):
"""Attach a text label above each bar in *rects*, displaying its height."""
for rect in rects:
height = rect.get_height()
ax.annotate('{}'.format(height),
xy=(rect.get_x() + rect.get_width() / 2, height),
xytext=(0, 3), # 3 points vertical offset
textcoords="offset points",
ha='center', va='bottom')
autolabel(rects1)
autolabel(rects2)
autolabel(rects3)
autolabel(rects4)
autolabel(rects5)
autolabel(rects6)
fig.tight_layout()
plt.close()
|
994,157 | 7e0ca838a84511d2b6d9d2c2daaea7ad57b4f531 | # -*- coding:utf8 -*-
# ***************************************************************************
# Create on 2015-12-10
#
# @Author:sunlf
#
# ***************************************************************************
import os
import logging
from config.dbconfig import *
ENV = "beta"
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
LOG_PATH = os.path.join(BASE_DIR, "logs/server.log")
LOG_CONSOLE = True
LOG_LEVEL = logging.INFO
ICE_CONFIG_PATH = os.path.join(BASE_DIR, "config/config.server")
INTERFACE_DIR = os.path.join(BASE_DIR, "interface")
INSTALLED_SERVANTS = (
"libs.api.smsI.SmsSendI",
)
REDIS_STORE = REDIS_LIST[ENV]
|
994,158 | ffc4d9d9fa1a77464b386ded277578c14df5eff0 | # -*- coding: utf-8 -*-
from infra_scraper.input.saltstack import SaltStackInput
from infra_scraper.utils import setup_logger
logger = setup_logger('input.reclass')
class SaltReclassInput(SaltStackInput):
def __init__(self, **kwargs):
super(SaltReclassInput, self).__init__(**kwargs)
self.kind = 'salt'
def _create_relations(self):
for resource_id, resource in self.resources.get('salt_job', {}).items():
for minion_id, result in resource['metadata'].get('Result', {}).items():
self._scrape_relation(
'on_salt_minion',
resource_id,
minion_id)
def scrape_all_resources(self):
self.scrape_minions()
self.scrape_resources()
self.scrape_jobs()
# self.scrape_services()
def scrape_resources(self):
response = self.api.low([{
'client': 'local',
'expr_form': 'compound',
'tgt': 'I@salt:master',
'fun': 'reclass.graph_data'
}]).get('return')[0]
for minion_id, minion in response.items():
for service in minion['graph']:
service_id = '{}|{}'.format(service['host'],
service['service'])
self._scrape_resource(service_id,
service['service'],
'salt_service', None,
metadata=service)
self._scrape_relation(
'on_salt_minion',
service_id,
service['host'])
for rel in service['relations']:
if rel['host'] not in self.resources['salt_minion']:
self._scrape_resource(rel['host'],
rel['host'],
'salt_minion', None,
metadata={})
rel_service_id = '{}|{}'.format(rel['host'],
rel['service'])
if rel_service_id not in self.resources['salt_service']:
self._scrape_resource(rel_service_id,
rel['service'],
'salt_service', None,
metadata={})
self._scrape_relation(
'on_salt_minion',
rel_service_id,
rel['host'])
self._scrape_relation(
'requires_salt_service',
service_id,
rel_service_id)
def scrape_jobs(self):
response = self.api.low([{
'client': 'runner',
'fun': 'jobs.list_jobs',
'arg': "search_function='[\"state.apply\", \"state.sls\"]'"
}]).get('return')[0]
for job_id, job in response.items():
if job['Function'] in ['state.apply', 'state.sls']:
result = self.api.lookup_jid(job_id).get('return')[0]
job['Result'] = result
self._scrape_resource(job_id,
job['Function'],
'salt_job', None, metadata=job)
|
994,159 | 14a1b7f31044929330f5de54df0c474f3846b48b | from sqlite3 import Connection as SQLite3Connection
from datetime import datetime
from sqlalchemy import event
from sqlalchemy.engine import Engine
from flask import Flask, request, jsonify
from flask_sqlalchemy import SQLAlchemy
import linked_list
import hash_table
import binary_search_tree
import custom_queue
import stack
import random
# App
app = Flask(__name__)
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///sqlitedb.file"
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = 0
# Configure sqlite3 to enforce foreign key constraints
@event.listens_for(Engine, "connect")
def _set_sqlite_pragma(dbapi_connection, connection_record):
if isinstance(dbapi_connection, SQLite3Connection):
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON;")
cursor.close()
db = SQLAlchemy(app)
now = datetime.now()
# Models
class User(db.Model):
__tablename__ = "user"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50))
email = db.Column(db.String(50))
address = db.Column(db.String(200))
phone = db.Column(db.String(50))
posts = db.relationship("BlogPost", cascade="all, delete")
class BlogPost(db.Model):
__tablename__ = "blog_post"
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(50))
body = db.Column(db.String(200))
date = db.Column(db.Date)
user_id = db.Column(db.Integer, db.ForeignKey("user.id"), nullable=False)
# Routes
# Create the user
@app.route("/user", methods=["POST"])
def create_user():
data = request.get_json()
new_user = User(
name=data["name"],
email=data["email"],
address=data["address"],
phone=data["phone"],
)
db.session.add(new_user)
db.session.commit()
return jsonify({"message": "User created"}), 200
# Linked List
# Get all users in a descending order
@app.route("/user/descending_id", methods=["GET"])
def get_all_users_descending():
users = User.query.all()
all_users_ll = linked_list.LinkedList()
for user in users:
all_users_ll.insert_beginning(
{
"id": user.id,
"name": user.name,
"email": user.email,
"address": user.address,
"phone": user.phone,
}
)
return jsonify(all_users_ll.to_list()), 200
# Linked List
# Get all users in a ascending order
@app.route("/user/ascending_id", methods=["GET"])
def get_all_users_ascending():
users = User.query.all()
all_users_ll = linked_list.LinkedList()
for user in users:
all_users_ll.insert_at_end(
{
"id": user.id,
"name": user.name,
"email": user.email,
"address": user.address,
"phone": user.phone,
}
)
return jsonify(all_users_ll.to_list()), 200
# Linked List
# Get one user
@app.route("/user/<user_id>", methods=["GET"])
def get_one_user(user_id):
users = User.query.all()
all_users_ll = linked_list.LinkedList()
for user in users:
all_users_ll.insert_beginning(
{
"id": user.id,
"name": user.name,
"email": user.email,
"address": user.address,
"phone": user.phone,
}
)
user = all_users_ll.get_user_by_id(user_id)
return jsonify(user), 200
# Delete user
@app.route("/user/<user_id>", methods=["DELETE"])
def delete_user(user_id):
user = User.query.filter_by(id=user_id).first()
db.session.delete(user)
db.session.commit()
return jsonify({}), 200
# Hash Table
# Create a blog post
@app.route("/blog_post/<user_id>", methods=["POST"])
def create_blog_post(user_id):
"""Create new blog post and add it to database.
Return the success message if the operation is done.
Args:
user_id (int): user id
Returns:
JSON: success message
"""
data = request.get_json()
# Check if the user is in the database
user = User.query.filter_by(id=user_id).first()
if not user:
return jsonify({"message": "user does not exist!"}), 400
# Create an instance of a HashTable
ht = hash_table.HashTable(10)
# Create a blog post
ht.add_key_value("title", data["title"])
ht.add_key_value("body", data["body"])
ht.add_key_value("date", now)
ht.add_key_value("user_id", user_id)
# Add a blog post to the database
new_blog_post = BlogPost(
title=ht.get_value("title"),
body=ht.get_value("body"),
date=ht.get_value("date"),
user_id=ht.get_value("user_id"),
)
db.session.add(new_blog_post)
db.session.commit()
return jsonify({"message": "new blog post created"}), 200
# Binary Search
# Get blog post id
@app.route("/blog_post/<blog_post_id>", methods=["GET"])
def get_one_blog_post(blog_post_id):
"""Search for a blog post using the binary search method and
return blog post data as a JSON object or provide an error
message if requesting blog post id does not exist.
Args:
blog_post_id (int): numeric blog post id
Returns:
JSON: binary search result
"""
# Query all the blog post data
blog_posts = BlogPost.query.all()
# Shuffle data to optimize future search tree
random.shuffle(blog_posts)
# Create BinarySearchTree instance
bst = binary_search_tree.BinarySearchTree()
# Insert all retrieved data to the tree
for post in blog_posts:
bst.insert({
"id" : post.id,
"title" : post.title,
"body" : post.body,
"user_id" : post.user_id,
})
# Search post using binary search method
post = bst.search(blog_post_id)
if not post:
return jsonify({"message": "post not found"})
return jsonify(post)
# Queue
# Get numeric body of the blog post
@app.route("/blog_post/numeric_body", methods=["GET"])
def get_numeric_post_bodies():
"""Covert text of the blog post body to a single integer,
and return all blog posts with the this transformation.
Returns:
JSON: transformed blog post data
"""
# Query all the blog posts data
blog_posts = BlogPost.query.all()
# Create an empty Queue instance
q = custom_queue.Queue()
# Add data to the queue using enqueue method
# (ascending order)
for post in blog_posts:
q.enqueue(post)
return_list = []
# Remove data from the queue using dequeue method
# (descending order)
for _ in range(len(blog_posts)):
post = q.dequeue()
# Convert each character of removed post body to an integer
# (increasing the numeric body with this integer)
numeric_body = 0
for char in post.data.body:
numeric_body += ord(char)
# Assign new value to the blog post body
post.data.body = numeric_body
# Add transformed data to a return list
return_list.append(
{
"id": post.data.id,
"title" : post.data.title,
"body" : post.data.body,
"user_id" : post.data.user_id,
}
)
return jsonify(return_list)
# Stack
# Delete the number of last blog posts from the database
@app.route("/blog_post/delete_last_n_posts/<int:n_posts>", methods=["DELETE"])
def delete_last_n_posts(n_posts):
"""Delete the number of last blog posts from the database.
Args:
n_posts (int): the number of blog posts to delete
Returns:
JSON: result message
"""
# Query all the blog posts data
blog_posts = BlogPost.query.all()
# Create the Stack instance
s = stack.Stack()
# Loop through the blog posts and push the blog post
# objects to the to of the stack in ascending order, so
# the highest blog_post_id will be on the top of the stack.
for post in blog_posts:
s.push(post)
# Pop the provided number of posts from the stack,
# and delete them from the database one by one
for _ in range(n_posts):
post_to_delete = s.pop()
db.session.delete(post_to_delete.data)
db.session.commit()
return jsonify({
"message" : f"{n_posts} last blog posts was successfully deleted, " +
f"the current number of blog posts in the database is {s.size}"
})
if __name__ == "__main__":
app.run(debug=True) |
994,160 | f2cc7628e19e30ddae49e2b4774ef0efa943448c | from django.shortcuts import render, redirect
from .forms import ContactForm
from django.core.mail import send_mail
from django.template.loader import render_to_string
from django.conf import settings
# Create your views here.
def index(request):
form = ContactForm()
if request.method == 'POST':
form = ContactForm(request.POST)
if form.is_valid():
send_mail(subject=form.cleaned_data['subject'],
message=render_to_string('frontend/contact_mail.html', {'name': form.cleaned_data['name'],
'email': form.cleaned_data['email'],
'message': form.cleaned_data['message']}),
from_email=settings.EMAIL_HOST_USER,
recipient_list=['amilkarms@outlook.com'],
fail_silently=False)
return redirect('frontend:thankyou')
context = {
'form': form,
}
return render(request, 'frontend/index.html', context)
def thank_you(request):
return render(request, 'frontend/thankyou.html')
def portfolio(request):
return render(request, 'frontend/portfolio.html')
def contact(request):
form = ContactForm()
if request.method == 'POST':
form = ContactForm(request.POST)
if form.is_valid():
send_mail(subject=form.cleaned_data['subject'],
message=render_to_string('frontend/contact_mail.html', {'name': form.cleaned_data['name'],
'email': form.cleaned_data['email'],
'message': form.cleaned_data['message']}),
from_email=settings.EMAIL_HOST_USER,
recipient_list=['amilkarms@outlook.com'],
fail_silently=False)
return redirect('frontend:thankyou')
context = {
'form': form,
}
return render(request, 'frontend/contact.html', context)
def next(request):
return render(request, 'frontend/next.html')
|
994,161 | 0012c12553a71e5f40bc38786abb157eb8a7b1b4 | from ._components import * # noqa: F403, F401
from ._contents import * # noqa: F403, F401
from ._core import * # noqa: F403, F401
from ._deletion import * # noqa: F403, F401
from ._referrer import * # noqa: F403, F401
from ._tokens import * # noqa: F403, F401
|
994,162 | 9e2bca3f51203718d2927d7c109531bee7b7f819 | __author__ = 'pmagdon'
import sys
import os
import time
import argparse
from im import *
start = time.time()
parser = argparse.ArgumentParser(description='Process to orthorectify RapidEye L1B data based on RPC correction.')
parser.add_argument("InFileName",action='store', help="Path to the corresponding MetaData.xml file")
parser.add_argument("OutFileName",action='store', help="L1B output file name")
parser.add_argument("InDEMName", action='store', help="DEM file name")
inputs=parser.parse_args()
InFileName = os.path.normpath(inputs.InFileName)
OutFileName = os.path.normpath(inputs.OutFileName)
InDemName = os.path.normpath(inputs.InDEMName)
#Import Metadata File and create metadata object
metadata = MetaData()
MetaDataRapidEye.import_rapideye_metadata(metadata ,inputs.InFileName)
print metadata
band=1
EPSG=32632
#Orthorectify and Import L1B Band
inband=ImportL1B.ConvertFileName(inputs.InFileName,band)
ortho_band=ImportL1B.OrthoRectifyRaster(inband,inputs.InDEMName,EPSG)
band=ImportL1B.ReadRaster(ortho_band)
#Parametrizes the Atmosperhic models
#s=SixS()
#print metadata.Acquisitiondate
#s.geometry.solar_z = metadata.SunZenith
#s.geometry.solar_a = metadata.SunElevation
#s.geometry.view_a = metadata.SensorAzimuth
#s.geometry.view_z = metadata.SensorZenith
#s.geometry.month=metadata.Month
#s.geometry.day = metadata.Day
#s.geometry.latitude = metadata.CenterLatidue
#s.geometry.longitude = metadata.CenterLongitude
#print inputs.InAERONETName
#s.atmos_profile = SixSHelpers.Aeronet.import_aeronet_data(s,inputs.InAERONETName, "03/12/2014 11:18:27.38")
#s.atmos_profile = SixSHelpers.Aeronet.import_aeronet_data(s,inputs.InAERONETName)
#s.run()
end = time.time()
elapsed = end - start
print "Time taken: ", elapsed, "seconds."
|
994,163 | 77da71ae2711559b7dab525ccdc05896f7fa6a06 | ''''
from math import sqrt
multisqrt = vectorize(sqrt)
multisqrt(4.0, 25.0, 1.0, 10.0)
[2.0, 5.0, 1.0, 3.1622776601683795]
'''
from math import sqrt
def vectorize(func):
def inner(*args):
return [func(x) for x in args]
return inner
if __name__ == "__main__":
multisqrt = vectorize(sqrt)
print (multisqrt(4.0, 25.0, 1.0, 10.0))
|
994,164 | 2130057edb2507505e485508bf53e1b130ce7a89 | import tensorflow as tf
from net import load_data
import matplotlib.pyplot as plt
import matplotlib.image as Image
from net import U_Net
def loss_func(v_xs, v_ys):
result = tf.reduce_mean(tf.reduce_mean(tf.square(v_xs-v_ys), axis=[1, 2, 3]))
return result
def main(_):
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
# config.gpu_options.per_process_gpu_memory_fraction = 0.8
with tf.name_scope('inputs'):
xs = tf.placeholder(tf.float32, [None, 672, 672, 256], name='x_inputs')
ys = tf.placeholder(tf.float32, [None, 672, 672, 1], name='y_inputs')
prediction = U_Net.U_Net(xs)
with tf.name_scope('loss_function'):
loss = loss_func(prediction, ys)
tf.summary.scalar('loss', loss)
with tf.name_scope('train'):
train_step = tf.train.AdamOptimizer(1e-3).minimize(loss)
img, label = load_data.get_data('data/train.tfrecords', batch=1) # 读取数据
sess = tf.Session(config=config)
# merged = tf.summary.merge_all()
# writer = tf.summary.FileWriter("logs/", sess.graph)
saver = tf.train.Saver()
sess.run(tf.global_variables_initializer())
for i in range(900):
sess.run(train_step, feed_dict={xs: img, ys: label})
if i % 10 == 0:
print(sess.run(loss, feed_dict={xs: img, ys: label}))
# result = sess.run(merged, feed_dict={xs: img, ys: label})
# writer.add_summary(result, i)
saver.save(sess, "train_dir/U_net_barch1_1000.ckpt")
img11 = sess.run(prediction, feed_dict={xs:img,ys:label})
img11 = img11.reshape([672, 672])
Image.imsave('训练结果/23.png', img11, cmap='gray')
# plt.imshow(img11,cmap='gray')
# plt.show()
if __name__ == '__main__':
tf.app.run()
|
994,165 | aeb51a268b99b57c6784395f33bf4e1f71d3c91e | class Solution:
def reverse(self, x):
b = ''
a = str(x)
if int(x) < 0:
a = a[1:]
b = -int(a[::-1])
if int(x) > 0:
b = int(a[::-1])
if -2 ** 31 < b < 2 ** 31 - 1:
return b
else:
return 0
print(Solution().reverse(76854391))
|
994,166 | c5cfe653c1060688c1df993e002289ec0dddd247 | def getAltitudeLatLon(self,lat,lon):
row = int(round((lat - self.__latBias)*self.__delAlevation))
col = int(round((lon - self.__lonBias)*self.__delAlevation))
sz = self.__altidata.shape
if row >= sz[0]:
row = sz[0]-1
if col >= sz[1]:
col = sz[1]-1
# print(self.__altidata[row][col])
return self.__altidata[row][col]
def getAltitude(self,location):
row = int(round((location.get_Latitude() - self.__latBias)*self.__delAlevation))
col = int(round((location.get_Longitude() - self.__lonBias)*self.__delAlevation))
sz = self.__altidata.shape
# print(sz)
if row >= sz[0]:
row = sz[0]-1
if col >= sz[1]:
col = sz[1]-1
# print(self.__altidata[row][col])
return self.__altidata[row][col]
def turn(self,veicleid,left):
delang = 15
if left == 0:
if self.__currentHeadingAngleUAV[veicleid] < delang:
self.__currentHeadingAngleUAV[veicleid] = 360 - delang
else:
self.__currentHeadingAngleUAV[veicleid] -= delang
elif left == 1:
self.__currentHeadingAngleUAV[veicleid] = (self.__currentHeadingAngleUAV[veicleid]+delang)%360
self.sendHeadingAngleCommand(veicleid,self.__currentHeadingAngleUAV[veicleid])
def turn(self,veicleid):
headingAngle = 0
if veicleid == 1:
if self.__currentHeadingAngleUAV[veicleid] == 0:
self.__currentHeadingAngleUAV[veicleid] = 1
if self.__keepoutOption[veicleid-1] == 0:
headingAngle = 90
else:
headingAngle = 270
elif self.__currentHeadingAngleUAV[veicleid] == 1:
self.__currentHeadingAngleUAV[veicleid] = 2
headingAngle = 180
elif self.__currentHeadingAngleUAV[veicleid] == 2:
self.__currentHeadingAngleUAV[veicleid] = 3
if self.__keepoutOption[veicleid-1] == 0:
headingAngle = 90
else:
headingAngle = 270
elif self.__currentHeadingAngleUAV[veicleid] == 3:
self.__currentHeadingAngleUAV[veicleid] = 0
headingAngle = 0
elif veicleid == 2 :
if self.__currentHeadingAngleUAV[veicleid] == 0:
self.__currentHeadingAngleUAV[veicleid] = 1
if self.__keepoutOption[veicleid-1] == 0:
headingAngle = 270
else:
headingAngle = 90
elif self.__currentHeadingAngleUAV[veicleid] == 1:
self.__currentHeadingAngleUAV[veicleid] = 2
headingAngle = 180
elif self.__currentHeadingAngleUAV[veicleid] == 2:
self.__currentHeadingAngleUAV[veicleid] = 3
if self.__keepoutOption[veicleid-1] == 0:
headingAngle = 270
else:
headingAngle = 90
elif self.__currentHeadingAngleUAV[veicleid] == 3:
self.__currentHeadingAngleUAV[veicleid] = 0
headingAngle = 0
elif veicleid == 3:
if self.__currentHeadingAngleUAV[veicleid] == 0:
self.__currentHeadingAngleUAV[veicleid] = 1
if self.__keepoutOption[veicleid-1] == 0:
headingAngle = 180
else:
headingAngle = 0
elif self.__currentHeadingAngleUAV[veicleid] == 1:
self.__currentHeadingAngleUAV[veicleid] = 2
headingAngle = 270
elif self.__currentHeadingAngleUAV[veicleid] == 2:
self.__currentHeadingAngleUAV[veicleid] = 3
if self.__keepoutOption[veicleid-1] == 0:
headingAngle = 180
else:
headingAngle = 0
elif self.__currentHeadingAngleUAV[veicleid] == 3:
self.__currentHeadingAngleUAV[veicleid] = 0
headingAngle = 90
elif veicleid == 4:
if self.__currentHeadingAngleUAV[veicleid] == 0:
self.__currentHeadingAngleUAV[veicleid] = 1
if self.__keepoutOption[veicleid-1] == 0:
headingAngle = 0
else:
headingAngle = 180
elif self.__currentHeadingAngleUAV[veicleid] == 1:
self.__currentHeadingAngleUAV[veicleid] = 2
headingAngle = 270
elif self.__currentHeadingAngleUAV[veicleid] == 2:
self.__currentHeadingAngleUAV[veicleid] = 3
if self.__keepoutOption[veicleid-1] == 0:
headingAngle = 0
else:
headingAngle = 180
elif self.__currentHeadingAngleUAV[veicleid] == 3:
self.__currentHeadingAngleUAV[veicleid] = 0
headingAngle = 90
print('turning',veicleid,' heading', headingAngle)
self.sendHeadingAngleCommand(veicleid,headingAngle)
def isLeft(self,location,center):
R = 111000
a = location.get_Latitude() - center.get_Latitude()
b = location.get_Longitude() - center.get_Longitude()
# x = R*a
# y = R*radians(lat)*b
if a > 0 and b > 0:
return True
elif a < 0 and b > 0:
return False
elif a < 0 and b < 0:
return True
elif a > 0 and b < 0:
return False
def getFourcenter(self,arbitararyCenter): # have to work
zid = 0
mind = 10e10
i=0
for zcenter in self.__zoneCenter:
d = self.getdistance(arbitararyCenter,zcenter)
i += 1
if d < mind:
mind = d
zid = i
[x,y] = self.convertLatLonToxy(arbitararyCenter.get_Latitude(),arbitararyCenter.get_Longitude())
cornerpoints = self.__zoneboundaryPoints[zid]
xc1,yc1 = (cornerpoints[0][0]+x)/2,(cornerpoints[0][1]+y)/2
xc2,yc2 = (cornerpoints[1][0]+x)/2,(cornerpoints[1][1]+y)/2
xc3,yc3 = (cornerpoints[2][0]+x)/2,(cornerpoints[2][1]+y)/2
xc4,yc4 = (cornerpoints[3][0]+x)/2,(cornerpoints[3][1]+y)/2
points = [[xc1,yc1],[xc2,yc2],[xc3,yc3],[xc4,yc4]]
for vid in self.__currentLocationofUAV.keys():
if vid in self.__uavsInSearch and not vid in self.__uavsInSarvey:
mind = 10e10
i=0
zid1 = 0
for zcenter in self.__zoneCenter:
d = self.getdistance(arbitararyCenter,self.__currentLocationofUAV[vid])
i += 1
if d < mind:
mind = d
zid1 = i
if zid1 == zid:
refPoint = self.convertLatLonToxy(self.__currentLocationofUAV[vid].get_Latitude(),self.__currentLocationofUAV[vid].get_Longitude())
destipoint = getClosestPoint(self,points,refPoint)
def calculateGridCoordinate(self):
self.__zoneCenter = {}
self.__allGridLocation = []
self.__waypoints = {}
w = self.__searchAreaWidth
h = self.__searchAreaHeight
A = (w*h*4)/self.__noOfZone
a = sqrt(A)
zone = int(sqrt(self.__noOfZone))
minidel = self.__minidel
maxdel = self.__maxdel
rowseg = int(maxdel/minidel)
row = int(round(a/maxdel))*2+1
col = int(round(a/minidel))
# print(row,col,a)
delta = w/zone
delxp = 0
delxn = 0
for z1 in range(zone):
if z1%2==0:
delxp += delta
zx = delxp
else:
delxn -= delta
zx = delxn
delyp = 0
delyn = 0
for z2 in range(zone):
if z2%2==0:
delyp += delta
zy = delyp
else:
delyn -= delta
zy = delyn
waypoints = []
waypointNumber = 1
x = 0
y = 0
[lat,lon] = self.convertxyToLatLon(x,y)
waypoint = Waypoint()
waypoint.set_Latitude(lat)
waypoint.set_Longitude(lon)
alti = self.getAltitudeLatLon(lat,lon)
if alti < self.__normalSearchAltitude:
waypoint.set_Altitude(self.__normalSearchAltitude)
else:
waypoint.set_Altitude(alti + self.__safeHeight)
waypoint.set_AltitudeType(AltitudeType.MSL)
waypoint.set_Number(waypointNumber)
waypoint.set_NextWaypoint(waypointNumber+1)
waypoint.set_Speed(35)
waypoint.set_SpeedType(SpeedType.Airspeed)
waypoint.set_ClimbRate(15)
waypoint.set_TurnType(TurnType.TurnShort)
waypoint.set_ContingencyWaypointA(0)
waypoint.set_ContingencyWaypointB(0)
waypoints.append(waypoint)
waypointNumber += 1
for i in range(row):
if i%2 == 0:
if z2 == 0:
if y == 0:
option = 1
else:
option = 2
else:
if y == 0:
option = 2
else:
option = 1
for j in range(col):
if option == 1:
y += minidel
else:
y -= minidel
[lat,lon] = self.convertxyToLatLon(x,y)
waypoint = Waypoint()
waypoint.set_Latitude(lat)
waypoint.set_Longitude(lon)
alti = self.getAltitudeLatLon(lat,lon)
if alti < self.__normalSearchAltitude:
waypoint.set_Altitude(self.__normalSearchAltitude)
else:
waypoint.set_Altitude(alti + self.__safeHeight)
waypoint.set_AltitudeType(AltitudeType.MSL)
waypoint.set_Number(waypointNumber)
# else:
waypoint.set_NextWaypoint(waypointNumber+1)
waypoint.set_Speed(35)
waypoint.set_SpeedType(SpeedType.Airspeed)
waypoint.set_ClimbRate(15)
waypoint.set_TurnType(TurnType.TurnShort)
waypoint.set_ContingencyWaypointA(0)
waypoint.set_ContingencyWaypointB(0)
waypoints.append(waypoint)
if i == row-1 and j == col-1:
wpoints = self.getBetweenLatLonwithoutVID(x,y,0,0,waypointNumber,1)
waypoints = waypoints + wpoints
else:
waypointNumber += 1
else:
for j in range(rowseg):
if z1 == 0:
x += minidel
else:
x -= minidel
[lat,lon] = self.convertxyToLatLon(x,y)
waypoint = Waypoint()
waypoint.set_Latitude(lat)
waypoint.set_Longitude(lon)
alti = self.getAltitudeLatLon(lat,lon) + self.__safeHeight
if alti < self.__normalSearchAltitude:
waypoint.set_Altitude(self.__normalSearchAltitude)
else:
waypoint.set_Altitude(alti)
waypoint.set_AltitudeType(AltitudeType.MSL)
waypoint.set_Number(waypointNumber)
waypoint.set_NextWaypoint(waypointNumber+1)
waypoint.set_Speed(35)
waypoint.set_SpeedType(SpeedType.Airspeed)
waypoint.set_ClimbRate(15)
waypoint.set_TurnType(TurnType.TurnShort)
waypoint.set_ContingencyWaypointA(0)
waypoint.set_ContingencyWaypointB(0)
waypoints.append(waypoint)
if i == row-1 and j == rowseg-1:
wpoints = self.getBetweenLatLonwithoutVID(x,y,0,0,waypointNumber,1)
waypoints += wpoints
else:
waypointNumber += 1
# x = w-i*self.__resulationOfGrid - z1*(w)
# self.__allGridLocation.append([])
# for j in range(col):
# if i%2 == 0:
# y = h-j*self.__resulationOfGrid - z2*(h)
# else:
# y = j*self.__resulationOfGrid - z2*(h)
# # print(x,y)
# location = Location3D()
# [lat,lon] = self.convertxyToLatLon(x,y)
# location.set_Latitude(lat)
# location.set_Longitude(lon)
# location.set_Altitude(1000)
# self.__allGridLocation[i].append(location)
self.__waypoints[z1*zone+z2+1] = waypoints
zlocation = Location3D()
[lat,lon] = self.convertxyToLatLon(zx,zy)
zlocation.set_Latitude(lat)
zlocation.set_Longitude(lon)
zlocation.set_Altitude(450)
self.__zoneCenter[z1*zone+z2+1] = zlocation
def calculateGridCoordinateAlt(self):
self.__zoneCenter = {}
self.__allGridLocation = []
self.__waypoints = {}
w = self.__searchAreaWidth
h = self.__searchAreaHeight
A = (w*h*4)/self.__noOfZone
a = sqrt(A)
waypointNumber = 1
x = 0
y = 0
[lat,lon] = self.convertxyToLatLon(x,y)
waypoint = Waypoint()
waypoint.set_Latitude(lat)
waypoint.set_Longitude(lon)
alti = self.getAltitudeLatLon(lat,lon)
if alti < self.__normalSearchAltitude:
waypoint.set_Altitude(self.__normalSearchAltitude)
else:
waypoint.set_Altitude(alti + self.__safeHeight)
waypoint.set_AltitudeType(AltitudeType.MSL)
waypoint.set_Number(waypointNumber)
waypoint.set_NextWaypoint(waypointNumber+1)
waypoint.set_Speed(30)
waypoint.set_SpeedType(SpeedType.Airspeed)
waypoint.set_ClimbRate(15)
waypoint.set_TurnType(TurnType.TurnShort)
waypoint.set_ContingencyWaypointA(0)
waypoint.set_ContingencyWaypointB(0)
# zone 1
waypoints = []
waypoints.append(waypoint)
wpoints,waypointNumber = self.getBetweenLatLonwithoutVIDAlt(0,0,a,a,2,0)
waypoints = waypoints + wpoints
wpoints,waypointNumber = self.getBetweenLatLonwithoutVIDAlt(a,a,0,a,waypointNumber,0)
waypoints = waypoints + wpoints
wpoints,waypointNumber = self.getBetweenLatLonwithoutVIDAlt(0,a,a,0,waypointNumber,0)
waypoints = waypoints + wpoints
wpoints,waypointNumber = self.getBetweenLatLonwithoutVIDAlt(a,0,0,0,waypointNumber,1)
waypoints = waypoints + wpoints
self.__waypoints[1] = waypoints
zlocation = Location3D()
[lat,lon] = self.convertxyToLatLon(a/2,a/2)
zlocation.set_Latitude(lat)
zlocation.set_Longitude(lon)
zlocation.set_Altitude(450)
self.__zoneCenter[1] = zlocation
self.__zoneboundaryPoints[1] = [[0,0],[a,a],[0,a],[a,0]]
# zone 2
waypoints = []
waypoints.append(waypoint)
wpoints,waypointNumber = self.getBetweenLatLonwithoutVIDAlt(0,0,-a,a,2,0)
waypoints = waypoints + wpoints
wpoints,waypointNumber = self.getBetweenLatLonwithoutVIDAlt(-a,a,-a,0,waypointNumber,0)
waypoints = waypoints + wpoints
wpoints,waypointNumber = self.getBetweenLatLonwithoutVIDAlt(-a,0,0,a,waypointNumber,0)
waypoints = waypoints + wpoints
wpoints,waypointNumber = self.getBetweenLatLonwithoutVIDAlt(0,a,0,0,waypointNumber,1)
waypoints = waypoints + wpoints
self.__waypoints[2] = waypoints
zlocation = Location3D()
[lat,lon] = self.convertxyToLatLon(-a/2,a/2)
zlocation.set_Latitude(lat)
zlocation.set_Longitude(lon)
zlocation.set_Altitude(450)
self.__zoneCenter[2] = zlocation
self.__zoneboundaryPoints[2] = [[0,0],[-a,a],[0,a],[-a,0]]
# zone 3
waypoints = []
waypoints.append(waypoint)
wpoints,waypointNumber = self.getBetweenLatLonwithoutVIDAlt(0,0,-a,-a,2,0)
waypoints = waypoints + wpoints
wpoints,waypointNumber = self.getBetweenLatLonwithoutVIDAlt(-a,-a,-a,0,waypointNumber,0)#
waypoints = waypoints + wpoints
wpoints,waypointNumber = self.getBetweenLatLonwithoutVIDAlt(-a,0,0,-a,waypointNumber,0)#
waypoints = waypoints + wpoints
wpoints,waypointNumber = self.getBetweenLatLonwithoutVIDAlt(0,-a,0,0,waypointNumber,1)#
waypoints = waypoints + wpoints
self.__waypoints[3] = waypoints
zlocation = Location3D()
[lat,lon] = self.convertxyToLatLon(-a/2,-a/2)
zlocation.set_Latitude(lat)
zlocation.set_Longitude(lon)
zlocation.set_Altitude(450)
self.__zoneCenter[3] = zlocation
self.__zoneboundaryPoints[3] = [[0,0],[-a,-a],[0,-a],[-a,0]]
# zone 4
waypoints = []
waypoints.append(waypoint)
wpoints,waypointNumber = self.getBetweenLatLonwithoutVIDAlt(0,0,a,-a,2,0)
waypoints = waypoints + wpoints
wpoints,waypointNumber = self.getBetweenLatLonwithoutVIDAlt(a,-a,a,0,waypointNumber,0)
waypoints = waypoints + wpoints
wpoints,waypointNumber = self.getBetweenLatLonwithoutVIDAlt(a,0,0,-a,waypointNumber,0)
waypoints = waypoints + wpoints
wpoints,waypointNumber = self.getBetweenLatLonwithoutVIDAlt(0,-a,0,0,waypointNumber,1)
waypoints = waypoints + wpoints
self.__waypoints[4] = waypoints
zlocation = Location3D()
[lat,lon] = self.convertxyToLatLon(a/2,-a/2)
zlocation.set_Latitude(lat)
zlocation.set_Longitude(lon)
zlocation.set_Altitude(450)
self.__zoneCenter[4] = zlocation
self.__zoneboundaryPoints[4] = [[0,0],[a,-a],[0,-a],[a,0]]
def getNextLoiterCenter(self,veicleid,refLocation):
if veicleid == 2:
[xr,yr] = self.convertLatLonToxy(refLocation.get_Latitude(),refLocation.get_Longitude())
x = xr + (self.__searchAreaWidth-abs(xr))/2
[lat,lon] = self.convertxyToLatLon(x,yr)
location = Location3D()
location.set_Latitude(lat)
location.set_Longitude(lon)
location.set_Altitude(refLocation.get_Altitude())
return location
elif veicleid == 3:
[xr,yr] = self.convertLatLonToxy(refLocation.get_Latitude(),refLocation.get_Longitude())
y = yr - (self.__searchAreaHeight-abs(yr))/2
[lat,lon] = self.convertxyToLatLon(xr,y)
location = Location3D()
location.set_Latitude(lat)
location.set_Longitude(lon)
location.set_Altitude(refLocation.get_Altitude())
return location
if veicleid == 4:
[xr,yr] = self.convertLatLonToxy(refLocation.get_Latitude(),refLocation.get_Longitude())
y = yr + (self.__searchAreaHeight-abs(yr))/2
[lat,lon] = self.convertxyToLatLon(xr,y)
location = Location3D()
location.set_Latitude(lat)
location.set_Longitude(lon)
location.set_Altitude(refLocation.get_Altitude())
return location
return refLocation
def isLeavingFireZone(self,veicleid,location):
if self.__lastfireZonelocation:
if veicleid in self.__lastfireZonelocation:
lastFireLocation = self.__lastfireZonelocation[veicleid]
lastFireLocationXY = self.convertLatLonToxy(lastFireLocation.get_Latitude(),lastFireLocation.get_Longitude())
locationXY = self.convertLatLonToxy(location.get_Latitude(),location.get_Longitude())
d = (locationXY[0] - lastFireLocationXY[0])**2 + (locationXY[1] - lastFireLocationXY[1])**2
if d > 250000:
return True
return False
def getBetweenLatLonwithoutVID(self,xs,ys,xe,ye,startwaypointId,connectingwaypointId):
delx = xe-xs
dely = ye-ys
d = delx**2 + dely**2
m = dely/delx
ii = int(round(sqrt(d)/500))
delx /= ii
ii = ii - 1
x = xs
waypointNumber = startwaypointId+1
waypoints = []
x += delx
for i in range(ii):
y = ys + (x-xs)*m + 300
[lat,lon] = self.convertxyToLatLon(x,y)
x += delx
waypoint = Waypoint()
waypoint.set_Latitude(lat)
waypoint.set_Longitude(lon)
alti = self.getAltitudeLatLon(lat,lon)
if alti < self.__normalSearchAltitude:
waypoint.set_Altitude(self.__normalSearchAltitude)
else:
waypoint.set_Altitude(alti + self.__safeHeight)
waypoint.set_AltitudeType(AltitudeType.MSL)
waypoint.set_Number(waypointNumber)
if i == ii-1:
waypoint.set_NextWaypoint(connectingwaypointId)
else:
waypoint.set_NextWaypoint(waypointNumber+1)
waypoint.set_Speed(35)
waypoint.set_SpeedType(SpeedType.Airspeed)
waypoint.set_ClimbRate(15)
waypoint.set_TurnType(TurnType.TurnShort)
waypoint.set_ContingencyWaypointA(0)
waypoint.set_ContingencyWaypointB(0)
waypoints.append(waypoint)
waypointNumber += 1
return waypoints
elif isinstance(lmcpObject, EntityState):
self.__currentEntityState[lmcpObject.ID] = lmcpObject
print(self.__currentEntityState)
print('entity state')
self.__timethreshold = [1.3,2.5]
####experimental
self.__NewSTGt1 = [0,0,0,0,0,0,0,0]
self.__NewSTGt2 = [0,0,0,0,0,0,0,0]
self.__NewSTGt3 = [0,0,0,0,0,0,0,0]
self.__NewSTGt4 = [0,0,0,0,0,0,0,0]
self.__NewSTGoption = [0,0,0,0,0,0,0,0]
self.__NewSTGdt = [2,2,2,2,2,2,2,2]
self.__NewSTGleft = [0,0,0,0,0,0,0,0]
self.__NewSTGforward = [0,0,0,0,0,0,0,0]
self.__NewSTGdtaction = [5,5,5,5,5,5,5,5]
self.__NewSTGheadingangle = [0,0,0,0,0,0,0,0]
self.__NewSTGfirst = [0,0,0,0,0,0,0,0]
self.__NewSTGrefHeading = [0,0,0,0,0,0,0,0]
def surveyNewStrategy(self,airVehicleState):
vid = airVehicleState.ID
currentlocation = airVehicleState.Location
leftstg = self.__veicleStrategiId[vid-1]
print("survey new strategy", vid)
if leftstg == 0:#left
if self.__NewSTGfirst[vid-1] == 0:
self.__NewSTGfirst[vid-1] = 1
self.__NewSTGrefHeading[vid-1] = airVehicleState.Heading
self.__NewSTGheadingangle[vid-1] = (airVehicleState.Heading + 90)%360
self.sendHeadingAngleCommandwithcurrentlocation(vid,self.__NewSTGheadingangle[vid-1],currentlocation)
else:
headingangleError = abs(self.__NewSTGheadingangle[vid-1]-airVehicleState.Heading)
headingangleError = headingangleError if headingangleError < 180 else (360-headingangleError)
print("survey new strategy left", vid, headingangleError)
if headingangleError < 15:
if self.__NewSTGoption[vid-1] == 0 and (self.__simulationTimeSeconds-self.__NewSTGt3[vid-1]) > self.__NewSTGdt[vid-1]:
print('#look forward',vid)
self.__NewSTGforward[vid-1] = 0
self.sendGimbleCommand(vid,0,-45)
self.__NewSTGoption[vid-1] = 1
self.__NewSTGt1[vid-1] = self.__simulationTimeSeconds
elif self.__NewSTGoption[vid-1] == 1 and (self.__simulationTimeSeconds-self.__NewSTGt1[vid-1]) > self.__NewSTGdt[vid-1]:
print('#look left',vid)
if self.__NewSTGforward[vid-1] == 0:
self.__NewSTGforward[vid-1] = 2
self.__NewSTGleft[vid-1] = 0
self.sendGimbleCommand(vid,-90,-45)
self.__NewSTGoption[vid-1] = 2
self.__NewSTGt2[vid-1] = self.__simulationTimeSeconds
elif self.__NewSTGoption[vid-1] == 2 and (self.__simulationTimeSeconds-self.__NewSTGt2[vid-1]) > self.__NewSTGdt[vid-1]:
print('# take action',vid)
if self.__NewSTGleft[vid-1] == 0:
self.__NewSTGleft[vid-1] = 2
self.sendGimbleCommand(vid,0,-45)
self.__NewSTGoption[vid-1] = 0
self.__NewSTGt3[vid-1] = self.__simulationTimeSeconds
if self.__NewSTGleft[vid-1] == 1 and self.__NewSTGforward[vid-1] == 1:
print('#take right',vid)
self.__NewSTGheadingangle[vid-1] = (airVehicleState.Heading + 90)%360
self.__NewSTGdtaction[vid-1] = 7
self.sendHeadingAngleCommandwithcurrentlocation(vid,self.__NewSTGheadingangle[vid-1],currentlocation)
elif self.__NewSTGleft[vid-1] == 1 and self.__NewSTGforward[vid-1] == 2:
print('#go straignt',vid)
self.__NewSTGheadingangle[vid-1] = airVehicleState.Heading
self.__NewSTGdtaction[vid-1] = 7
elif self.__NewSTGleft[vid-1] == 2 and self.__NewSTGforward[vid-1] == 1:
print('#take hard right',vid)
self.__NewSTGheadingangle[vid-1] = (airVehicleState.Heading + 135)%360
self.__NewSTGdtaction[vid-1] = 7
self.sendHeadingAngleCommandwithcurrentlocation(vid,self.__NewSTGheadingangle[vid-1],currentlocation)
elif self.__NewSTGleft[vid-1] == 2 and self.__NewSTGforward[vid-1] == 2:
print('# take hard left',vid)
self.__NewSTGheadingangle[vid-1] = (airVehicleState.Heading - 30)
self.__NewSTGheadingangle[vid-1] = self.__NewSTGheadingangle[vid-1] if self.__NewSTGheadingangle[vid-1] >= 0 else (self.__NewSTGheadingangle[vid-1] + 360)
self.__NewSTGdtaction[vid-1] = 10
self.sendHeadingAngleCommandwithcurrentlocation(vid,self.__NewSTGheadingangle[vid-1],currentlocation)
if (self.__simulationTimeSeconds - self.__NewSTGt3[vid-1]) > self.__NewSTGdtaction[vid-1]:
self.__NewSTGt3[vid-1] = self.__simulationTimeSeconds
self.sendHeadingAngleCommandwithcurrentlocation(vid,self.__NewSTGheadingangle[vid-1],currentlocation)
elif leftstg == 1: #right
if self.__NewSTGfirst[vid-1] == 0:
self.__NewSTGfirst[vid-1] = 1
self.__NewSTGrefHeading[vid-1] = airVehicleState.Heading
self.__NewSTGheadingangle[vid-1] = (airVehicleState.Heading - 90)
self.__NewSTGheadingangle[vid-1] = self.__NewSTGheadingangle[vid-1] if self.__NewSTGheadingangle[vid-1] >= 0 else (self.__NewSTGheadingangle[vid-1] + 360)
self.sendHeadingAngleCommandwithcurrentlocation(vid,self.__NewSTGheadingangle[vid-1],currentlocation)
else:
headingangleError = abs(self.__NewSTGheadingangle[vid-1]-airVehicleState.Heading)
headingangleError = headingangleError if headingangleError < 180 else (360-headingangleError)
print("survey new strategy right", vid, headingangleError)
if headingangleError < 15:
if self.__NewSTGoption[vid-1] == 0 and (self.__simulationTimeSeconds-self.__NewSTGt3[vid-1]) > self.__NewSTGdt[vid-1]:
print('#look forward',vid)
self.__NewSTGforward[vid-1] = 0
self.sendGimbleCommand(vid,0,-45)
self.__NewSTGoption[vid-1] = 1
self.__NewSTGt1[vid-1] = self.__simulationTimeSeconds
elif self.__NewSTGoption[vid-1] == 1 and (self.__simulationTimeSeconds-self.__NewSTGt1[vid-1]) > self.__NewSTGdt[vid-1]:
print('#look right',vid)
if self.__NewSTGforward[vid-1] == 0:
self.__NewSTGforward[vid-1] = 2
self.__NewSTGleft[vid-1] = 0
self.sendGimbleCommand(vid,90,-45)
self.__NewSTGoption[vid-1] = 2
self.__NewSTGt2[vid-1] = self.__simulationTimeSeconds
elif self.__NewSTGoption[vid-1] == 2 and (self.__simulationTimeSeconds-self.__NewSTGt2[vid-1]) > self.__NewSTGdt[vid-1]:
print('# take action',vid)
if self.__NewSTGleft[vid-1] == 0:
self.__NewSTGleft[vid-1] = 2
self.sendGimbleCommand(vid,0,-45)
self.__NewSTGoption[vid-1] = 0
self.__NewSTGt3[vid-1] = self.__simulationTimeSeconds
if self.__NewSTGleft[vid-1] == 1 and self.__NewSTGforward[vid-1] == 1:
print('#take left',vid)
self.__NewSTGheadingangle[vid-1] = (airVehicleState.Heading - 90)
self.__NewSTGheadingangle[vid-1] = self.__NewSTGheadingangle[vid-1] if self.__NewSTGheadingangle[vid-1] >= 0 else (self.__NewSTGheadingangle[vid-1] + 360)
self.__NewSTGdtaction[vid-1] = 7
self.sendHeadingAngleCommandwithcurrentlocation(vid,self.__NewSTGheadingangle[vid-1],currentlocation)
elif self.__NewSTGleft[vid-1] == 1 and self.__NewSTGforward[vid-1] == 2:
print('#go straignt',vid)
self.__NewSTGheadingangle[vid-1] = airVehicleState.Heading
self.__NewSTGdtaction[vid-1] = 7
elif self.__NewSTGleft[vid-1] == 2 and self.__NewSTGforward[vid-1] == 1:
print('#take hard left',vid)
self.__NewSTGheadingangle[vid-1] = (airVehicleState.Heading - 135)
self.__NewSTGheadingangle[vid-1] = self.__NewSTGheadingangle[vid-1] if self.__NewSTGheadingangle[vid-1] >= 0 else (self.__NewSTGheadingangle[vid-1] + 360)
self.__NewSTGdtaction[vid-1] = 7
self.sendHeadingAngleCommandwithcurrentlocation(vid,self.__NewSTGheadingangle[vid-1],currentlocation)
elif self.__NewSTGleft[vid-1] == 2 and self.__NewSTGforward[vid-1] == 2:
print('# take hard right',vid)
self.__NewSTGheadingangle[vid-1] = (airVehicleState.Heading + 30)%360
self.__NewSTGdtaction[vid-1] = 10
self.sendHeadingAngleCommandwithcurrentlocation(vid,self.__NewSTGheadingangle[vid-1],currentlocation)
if (self.__simulationTimeSeconds - self.__NewSTGt3[vid-1]) > self.__NewSTGdtaction[vid-1]:
self.__NewSTGt3[vid-1] = self.__simulationTimeSeconds
self.sendHeadingAngleCommandwithcurrentlocation(vid,self.__NewSTGheadingangle[vid-1],currentlocation)
# for vid in range(1,noOfUAVs):
# vState[vid],sensorStateFront[vid] = smpleHazardDetector.getAirVeicleState(vid)
# smpleHazardDetector.sendGimbleCommand(vid,-sensorRotationAngle,-45)
# time.sleep(dt)
# for vid in range(1,noOfUAVs):
# vState[vid],sensorStateLeft[vid] = smpleHazardDetector.getAirVeicleState(vid)
# smpleHazardDetector.sendGimbleCommand(vid,sensorRotationAngle,-45)
# time.sleep(dt)
# for vid in range(1,noOfUAVs):
# vState[vid],sensorStateRight[vid] = smpleHazardDetector.getAirVeicleState(vid)
# smpleHazardDetector.sendGimbleCommand(vid,0,-45)
# for vid in range(1,noOfUAVs):
# if smpleHazardDetector.getSurveyStatus(vid):
# if not sensorStateLeft[vid] and not sensorStateFront[vid] and not sensorStateRight[vid]:
# print('hard left')
# headingangle = (vState[vid].Heading - 90)
# headingangle = headingangle if headingangle>0 else headingangle+360
# smpleHazardDetector.sendHeadingAngleCommandwithcurrentlocation(vid,headingangle,vState[vid].Location)
# elif (sensorStateLeft[vid] and sensorStateFront[vid] and not sensorStateRight[vid]):
# print('soft right')
# headingangle = (vState[vid].Heading + 45) % 360
# smpleHazardDetector.sendHeadingAngleCommandwithcurrentlocation(vid,headingangle,vState[vid].Location)
# elif (not sensorStateLeft[vid] and sensorStateFront[vid] and not sensorStateRight[vid]):
# print('right')
# headingangle = (vState[vid].Heading + 90) % 360
# smpleHazardDetector.sendHeadingAngleCommandwithcurrentlocation(vid,headingangle,vState[vid].Location)
# elif sensorStateLeft[vid] and sensorStateFront[vid] and sensorStateRight[vid]:
# print('hard right')
# headingangle = (vState[vid].Heading + 135) % 360
# smpleHazardDetector.sendHeadingAngleCommandwithcurrentlocation(vid,headingangle,vState[vid].Location)
# else:
# print('straight')
# time.sleep(5*dt)
# if sensorState:
# print(vid,sensorState)
# smpleHazardDetector.sendGimbleCommand(vid,-45,-45)
# time.sleep(0.5)
# vState,sensorState = smpleHazardDetector.getAirVeicleState(vid)
# if sensorState:
# smpleHazardDetector.sendGimbleCommand(vid,0,-45)
# headingangle = (vState.Heading + 45) % 360
# smpleHazardDetector.sendHeadingAngleCommandwithcurrentlocation(vid,headingangle,vState.Location)
# time.sleep(0.1)
def surveyStrategy(self,veicleid,airVehicleState,veicleLocation): # need works
if self.__veicleStrategiId[veicleid-1] == 0:
# # Right direction strategy
if veicleid in self.__changedirection and self.__changedirection[veicleid] and (self.__simulationTimeSeconds - self.__lasttime[veicleid-1])>20:
self.__desiredheading[veicleid-1] = airVehicleState.Heading
if self.__counter[veicleid-1] == 0:
self.__lasttime[veicleid-1] = self.__simulationTimeSeconds
self.__lasttime1[veicleid-1] = self.__simulationTimeSeconds
self.__desiredheading[veicleid-1] = (airVehicleState.Heading + 145)%360
self.__counter[veicleid-1] = 1
elif self.__counter[veicleid-1] == 1:
self.__desiredheading[veicleid-1] = (airVehicleState.Heading - 145)
self.__desiredheading[veicleid-1] = self.__desiredheading[veicleid-1] if self.__desiredheading[veicleid-1] > 0 else self.__desiredheading[veicleid-1]+360
self.__changedirection[veicleid] = False
self.__counter[veicleid-1] = 0
self.sendHeadingAngleCommandwithcurrentlocation(veicleid,self.__desiredheading[veicleid-1],veicleLocation)
elif veicleid in self.__changedirection and not self.__changedirection[veicleid] and (self.__simulationTimeSeconds - self.__lasttime1[veicleid-1])>60:
print('hard turn Left vid', veicleid)
self.__lasttime1[veicleid-1] = self.__simulationTimeSeconds
self.__desiredheading[veicleid-1] = (airVehicleState.Heading - 45)
self.__desiredheading[veicleid-1] = self.__desiredheading[veicleid-1] if self.__desiredheading[veicleid-1] > 0 else self.__desiredheading[veicleid-1]+360
self.sendHeadingAngleCommandwithcurrentlocation(veicleid,self.__desiredheading[veicleid-1],veicleLocation)
elif (self.__simulationTimeSeconds - self.__LastheadingAngleSendtime[veicleid-1]) > 5 and veicleid in self.__changedirection:
self.__LastheadingAngleSendtime[veicleid-1] = self.__simulationTimeSeconds
self.sendHeadingAngleCommandwithcurrentlocation(veicleid,self.__desiredheading[veicleid-1],veicleLocation)
elif self.__veicleStrategiId[veicleid-1] == 1:
# #Left direction strategy
if veicleid in self.__changedirection and self.__changedirection[veicleid] and (self.__simulationTimeSeconds - self.__lasttime[veicleid-1])>20:
self.__desiredheading[veicleid-1] = airVehicleState.Heading
if self.__counter[veicleid-1] == 0:
self.__lasttime[veicleid-1] = self.__simulationTimeSeconds
self.__lasttime1[veicleid-1] = self.__simulationTimeSeconds
self.__desiredheading[veicleid-1] = (airVehicleState.Heading - 145)
self.__desiredheading[veicleid-1] = self.__desiredheading[veicleid-1] if self.__desiredheading[veicleid-1] > 0 else self.__desiredheading[veicleid-1]+360
self.__counter[veicleid-1] = 1
elif self.__counter[veicleid-1] == 1:
self.__desiredheading[veicleid-1] = (airVehicleState.Heading + 145)%360
self.__changedirection[veicleid] = False
self.__counter[veicleid-1] = 0
self.sendHeadingAngleCommandwithcurrentlocation(veicleid,self.__desiredheading[veicleid-1],veicleLocation)
elif veicleid in self.__changedirection and not self.__changedirection[veicleid] and (self.__simulationTimeSeconds - self.__lasttime1[veicleid-1])>60:
print('hard turn Right vid', veicleid)
self.__lasttime1[veicleid-1] = self.__simulationTimeSeconds
self.__desiredheading[veicleid-1] = (airVehicleState.Heading + 45)%360
self.sendHeadingAngleCommandwithcurrentlocation(veicleid,self.__desiredheading[veicleid-1],veicleLocation)
elif (self.__simulationTimeSeconds - self.__LastheadingAngleSendtime[veicleid-1]) > 5 and veicleid in self.__changedirection:
self.__LastheadingAngleSendtime[veicleid-1] = self.__simulationTimeSeconds
self.sendHeadingAngleCommandwithcurrentlocation(veicleid,self.__desiredheading[veicleid-1],veicleLocation)
def checkSurveyStatus(self,vstate):
airVehicleState = vstate
veicleid = vstate.ID
veicleLocation = vstate.Location
if veicleid in self.__uavsInSarvey and self.__uavsInSarvey[veicleid]:
#self.surveyStrategy(veicleid,airVehicleState,veicleLocation)
self.surveyNewStrategy(airVehicleState)
elif not veicleid in self.__uavisHeadingtoSurveylocation:
zid = self.getZoneIdLocation(veicleLocation)
if self.__firezoneHintLocation:
if zid in self.__firezoneHintLocation:
self.sendGimbleCommand(veicleid,0,-45)
self.sendWaypoint(veicleid,veicleLocation,self.__firezoneHintLocation[zid])
self.__uavisHeadingtoSurveylocation[veicleid] = True
self.__UAVSurvayingZoneId[veicleid] = zid
self.__NoofUAVinZone[zid-1] += 1
if self.__NoofUAVinZone[zid-1]%2 != 0:
self.__veicleStrategiId[veicleid-1] = 1
else:
for zid in self.__firezoneHintLocation.keys():
for i in range(3):
if list(self.__UAVSurvayingZoneId.values()).count(zid) >= 3:
continue
minLoc = Location3D()
mind = 10e20
minvid = 10e20
for vid in self.__currentLocationofUAV.keys():
if not vid in self.__uavsInSearch and not vid in self.__uavisHeadingtoSurveylocation and not vid in self.__uavsInSarvey:
loc = self.__firezoneHintLocation[zid]
d = self.getdistance(loc,self.__currentLocationofUAV[vid])
if d < mind:
mind = d
minLoc = loc
minvid = vid
if mind != 10e20:
self.sendGimbleCommand(minvid,0,-45)
self.sendWaypoint(minvid,self.__currentLocationofUAV[minvid],minLoc)
self.__uavisHeadingtoSurveylocation[minvid] = True
self.__UAVSurvayingZoneId[minvid] = zid
self.__NoofUAVinZone[zid-1] += 1
if self.__NoofUAVinZone[zid-1]%2 != 0:
self.__veicleStrategiId[minvid-1] = 1
def sendLoiterCommand(self, veicleid, location, radius, speed):
#Setting up the mission to send to the UAV
vehicleActionCommand = VehicleActionCommand()
vehicleActionCommand.set_VehicleID(veicleid)
vehicleActionCommand.set_Status(CommandStatusType.Pending)
vehicleActionCommand.set_CommandID(1)
#Setting up the loiter action
loiterAction = LoiterAction()
loiterAction.set_LoiterType(LoiterType.Circular)
loiterAction.set_Radius(radius)
loiterAction.set_Axis(0)
loiterAction.set_Length(0)
loiterAction.set_Direction(LoiterDirection.Clockwise)
loiterAction.set_Duration(100000)
loiterAction.set_Airspeed(speed)
#Creating a 3D location object for the stare point
loiterAction.set_Location(location)
#Adding the loiter action to the vehicle action list
vehicleActionCommand.get_VehicleActionList().append(loiterAction)
#Sending the Vehicle Action Command message to AMASE to be interpreted
self.__client.sendLMCPObject(vehicleActionCommand)
def gotoWaypoint(self,veicleid):
gotoWaypointAction = GoToWaypointAction()
vehicleActionCommand = VehicleActionCommand()
flightDirectorAction = FlightDirectorAction();
vehicleActionCommand.set_VehicleID(veicleid)
vehicleActionCommand.set_Status(CommandStatusType.Pending)
vehicleActionCommand.set_CommandID(1)
gotoWaypointAction.set_WaypointNumber(self.__closesrWaypointID[veicleid])
vehicleActionCommand.get_VehicleActionList().append(gotoWaypointAction)
self.__client.sendLMCPObject(vehicleActionCommand)
def sendHeadingAngleCommand(self,veicleid,headingangle):
vehicleActionCommand = VehicleActionCommand()
vehicleActionCommand.set_VehicleID(veicleid)
vehicleActionCommand.set_Status(CommandStatusType.Pending)
vehicleActionCommand.set_CommandID(1)
flightDirectorAction = FlightDirectorAction();
flightDirectorAction.set_Speed(self.__maxSpeedofUAV[veicleid])
flightDirectorAction.set_SpeedType(SpeedType.Airspeed)
flightDirectorAction.set_Heading(headingangle)
flightDirectorAction.set_Altitude(100)
flightDirectorAction.set_AltitudeType(AltitudeType.MSL)
flightDirectorAction.set_ClimbRate(0)
vehicleActionCommand.get_VehicleActionList().append(flightDirectorAction)
self.__client.sendLMCPObject(vehicleActionCommand)
def sendHeadingAngleCommandwithcurrentlocation(self,veicleid,headingangle,currentlocation):
vehicleActionCommand = VehicleActionCommand()
vehicleActionCommand.set_VehicleID(veicleid)
vehicleActionCommand.set_Status(CommandStatusType.Pending)
vehicleActionCommand.set_CommandID(1)
flightDirectorAction = FlightDirectorAction();
flightDirectorAction.set_Speed(self.__maxSpeedofUAV[veicleid])
flightDirectorAction.set_SpeedType(SpeedType.Airspeed)
flightDirectorAction.set_Heading(headingangle)
flightDirectorAction.set_Altitude(self.getAltitude(currentlocation)+self.__surveySafeHeight)
flightDirectorAction.set_AltitudeType(AltitudeType.MSL)
flightDirectorAction.set_ClimbRate(0)
vehicleActionCommand.get_VehicleActionList().append(flightDirectorAction)
self.__client.sendLMCPObject(vehicleActionCommand)
def sendGimbleCommand(self, veicleid, azimuthangle,elevationangle):
#Setting up the mission to send to the UAV
vehicleActionCommand = VehicleActionCommand()
vehicleActionCommand.set_VehicleID(veicleid)
vehicleActionCommand.set_Status(CommandStatusType.Pending)
vehicleActionCommand.set_CommandID(1)
if azimuthangle >= self.__minAzimuthangle[veicleid] and azimuthangle <= self.__maxAzimuthangle[veicleid]:
azimuthangle = azimuthangle
elif azimuthangle <= self.__minAzimuthangle[veicleid]:
azimuthangle = self.__minAzimuthangle[veicleid]
else:
azimuthangle = self.__maxAzimuthangle[veicleid]
gimbleAngleAction = GimbalAngleAction()
gimbleAngleAction.set_PayloadID(1)
gimbleAngleAction.set_Azimuth(azimuthangle)
gimbleAngleAction.set_Elevation(elevationangle)
gimbleAngleAction.set_Rotation(0)
vehicleActionCommand.get_VehicleActionList().append(gimbleAngleAction)
self.__client.sendLMCPObject(vehicleActionCommand)
def MergeFireZones(self,Zones):
Zids = Zones.keys()
ZoneCenters = []
for zcenter in self.__zoneCenter:
ZoneCenters.append(self.convertLatLonToxy(zcenter.get_Latitude(), zcenter.get_Longitude()))
Nz = len(Zids)
NewZones = {}
Checked = []
for i in range(Nz):
if i not in Checked:
CurrentZoneFirePoints = Zones[Zids[i]]
CurrentZoneCenter = ZoneCenters[i]
CurrentFireCenter = np.mean(CurrentZoneFirePoints,axis=0)
for j in range(Nz):
if j != i:
NextZoneCenter = ZoneCenters[j][:]
NextZoneFirePoints = Zones[Zids[j]]
NextFireCenter = np.mean(NextZoneFirePoints,axis=0)
D = (CurrentFireCenter[0]-NextFireCenter[0])**2 + (CurrentFireCenter[1]-NextFireCenter[1])**2
ThresholdD = (CurrentZoneCenter[0]-NextZoneCenter[0])**2 + (CurrentZoneCenter[1]-NextZoneCenter[1])**2
if D < ThresholdD:
NewZones[Zids[i]] = list(CurrentZoneFirePoints)
NewZones[Zids[i]] += Zones[Zids[j]]
Checked.append(j)
return NewZones
def smokeZoneMission(self,vstate): # needs to be debugged
[x,y] = self.convertLatLonToxy(vstate.Location.get_Latitude(),vstate.Location.get_Longitude())
zid = self.getZoneIdLocation(vstate.Location)
zboundary = self.__zoneboundaryPoints[zid]
print(zboundary)
a = zboundary[3][0]
b = zboundary[3][1]
eps = 10e-5
theta = radians(vstate.Heading+eps) ## needs to be checked latter
tn = tan(theta)
x1 = x + y*tn
y1 = y + x/tn
xs1 = (x+x1)/2
ys1 = y/2
xs2 = x/2
ys2 = (y+y1)/2
x2 = xs2+(b-ys2)/tn
y2 = ys1+(a-xs1)*tn
xs3 = (xs2+x2)/2
ys3 = (ys2+b)/2
xs4 = (xs1+a)/2
ys4 = (ys1+y2)/2
waypointNumber = 1
x = xs1
y = ys1
[lat,lon] = self.convertxyToLatLon(x,y)
waypoint = Waypoint()
waypoint.set_Latitude(lat)
waypoint.set_Longitude(lon)
alti = self.getAltitudeLatLon(lat,lon)
if alti < self.__normalSearchAltitude:
waypoint.set_Altitude(self.__normalSearchAltitude)
else:
waypoint.set_Altitude(alti + self.__safeHeight)
waypoint.set_AltitudeType(AltitudeType.MSL)
waypoint.set_Number(waypointNumber)
waypoint.set_NextWaypoint(waypointNumber+1)
waypoint.set_Speed(30)
waypoint.set_SpeedType(SpeedType.Airspeed)
waypoint.set_ClimbRate(15)
waypoint.set_TurnType(TurnType.TurnShort)
waypoint.set_ContingencyWaypointA(0)
waypoint.set_ContingencyWaypointB(0)
waypoints = []
waypoints.append(waypoint)
wpoints,waypointNumber = self.getBetweenLatLonwithoutVIDAlt(xs1,ys1,xs4,ys4,2,0)
waypoints = waypoints + wpoints
wpoints,waypointNumber = self.getBetweenLatLonwithoutVIDAlt(xs4,ys4,xs2,ys2,waypointNumber,0)
waypoints = waypoints + wpoints
wpoints,waypointNumber = self.getBetweenLatLonwithoutVIDAlt(xs2,ys2,xs3,ys3,waypointNumber,0)
waypoints = waypoints + wpoints
wpoints,waypointNumber = self.getBetweenLatLonwithoutVIDAlt(xs3,ys3,xs1,ys1,waypointNumber,1)
waypoints = waypoints + wpoints
print('smokexonemission p2')
minima = 1e10
minLocid = 1
minLoc = Location3D()
for i in range(len(waypoints)):
loc = waypoints[i]
d = self.getdistance(loc,vstate.Location)
if d < minima:
minima = d
minLoc = loc
minLocid = i+1
if sqrt(minima) < 1000:
return waypoints,minLocid
waypoints1,minLocid = self.getBetweenLatLon(vstate.Location,minLoc,waypointNumber,minima,minLocid,vstate.ID)
waypoints = waypoints1 + waypoints
return waypoints,minLocid
def recursiveSearch(self,i,j,w,h):
# print('in the loop')
gridw = self.__globalMap.shape[0]
gridh = self.__globalMap.shape[1]
if i >= gridw or j >= gridh or i < 0 or j < 0:
return
if self.__dgrid[i,j] == 1:
return
self.__dgrid[i,j] = 1
di1 = min(gridw,i+w)
di2 = max(0,i-w)
dj1 = min(gridh,j+h)
dj2 = max(0,j-h)
# print('after data grid')
#check the percentage here
area1 = self.__globalMap[i:di1,j:dj1]
area2 = self.__globalMap[i:di1,dj2:j]
area3 = self.__globalMap[di2:i,j:dj1]
area4 = self.__globalMap[di2:i,dj2:j]
p1,p2,p3,p4 = 0,0,0,0
if area1.size != 0:
p1 = sum(sum(area1))/float(area1.shape[0]*area1.shape[1])
if area2.size != 0:
p2 = sum(sum(area2))/float(area2.shape[0]*area2.shape[1])
if area3.size != 0:
p3 = sum(sum(area3))/float(area3.shape[0]*area3.shape[1])
if area4.size != 0:
p4 = sum(sum(area4))/float(area4.shape[0]*area4.shape[1])
p = max(1-p1,1-p2,1-p3,1-p4)
if p > self.__glopbalmaxforpercentarea:
self.__glopbalmaxforpercentarea = p
self.__boundaryparameterFornewMission = [i,di1,j,dj1] if p==p1 else [i,di1,dj2,j] if p==p2 else [di2,i,j,dj1] if p==p3 else [di2,i,dj2,j]
# print(self.__boundaryparameterFornewMission)
if p > 0.6:
self.__stopRecursion = True
return
# call all possible grid position
if not self.__stopRecursion:
self.recursiveSearch(i+w,j,w,h)
if not self.__stopRecursion:
self.recursiveSearch(i-w,j,w,h)
if not self.__stopRecursion:
self.recursiveSearch(i,j+h,w,h)
if not self.__stopRecursion:
self.recursiveSearch(i,j-h,w,h)
# print(x1,y1,x2,y2,x3,y3,x4,y4)
# self.__estimatedHazardZone = Polygon()
# [lat,lon]=self.convertxyToLatLon(x1,y1)
# locationpoint = Location3D()
# locationpoint.set_Latitude(lat)
# locationpoint.set_Longitude(lon)
# self.__estimatedHazardZone.get_BoundaryPoints().append(locationpoint)
# [lat,lon]=self.convertxyToLatLon(x2,y2)
# locationpoint = Location3D()
# locationpoint.set_Latitude(lat)
# locationpoint.set_Longitude(lon)
# self.__estimatedHazardZone.get_BoundaryPoints().append(locationpoint)
# [lat,lon]=self.convertxyToLatLon(x3,y3)
# locationpoint = Location3D()
# locationpoint.set_Latitude(lat)
# locationpoint.set_Longitude(lon)
# self.__estimatedHazardZone.get_BoundaryPoints().append(locationpoint)
# [lat,lon]=self.convertxyToLatLon(x4,y4)
# locationpoint = Location3D()
# locationpoint.set_Latitude(lat)
# locationpoint.set_Longitude(lon)
# self.__estimatedHazardZone.get_BoundaryPoints().append(locationpoint)
# self.sendEstimateReport(vstate.ID)
# self.__estimatedHazardZone = Polygon()
#print('before secondary merge')
#print(data.keys())
#loop = len(data.keys())-1
#for lp in range(loop):
# if newKey != 0:
# data = dict(newData)
# print(data)
# print("**********************")
# print('data after secondary merge done phase 1')
# while True:
# keys = list(data.keys())
# flag = True
# for i in range(len(keys)):
# for j in range(i+1,len(keys)):
# if self.checksubset(data[keys[j]],data[keys[i]]):
# flag = False
# del data[keys[j]]
# print('***************deleting data*******************')
# break
# if not flag:
# break
# if flag:
# break
#print('data after secondary merge')
#print(data.keys()) |
994,167 | 58006def2b6ff38b88c8f697f23140fdbc2a0c6a | import sqlite3
import projectcommands as project
import taskcommands as task
def initial_options():
print " 1. Select a Project"
print " 2. Add a Project"
print " 3. View Task Due this Week"
print " 4. View Task Due this Month"
print " 5. View Critical Task"
print " 6. View All Incomplete Task"
print " 7. Quit "
ans = raw_input("Please Choose an option ")
return ans
def project_options():
print " 1. View Incomplete Tasks"
print " 2. View Complete Tasks"
print " 3. Add task to project"
print " 4. Update task"
print " 5. View All tasks"
print " 6. Quit "
ans = raw_input("Please Choose an option ")
return ans
def task_options():
print " 1. View Notes"
print " 2. Update Status"
print " 3. Update Notes"
print " 4. Update Due Date"
print " 5. Quit "
ans = raw_input("Please Choose an option ")
return ans
def main():
while True:
choice = initial_options()
if choice == "1":
project_title = project.view_all_projects()
while True:
choice2 = project_options()
if choice2 == "6":
break
if choice2 == "5":
task_title = project.view_tasks_in_project(project_title)
while True:
choice3 = task_options()
if choice3 == "5":
break
if choice3 == "2":
task.update_status(task_title, project_title)
if choice3 == "3":
task.update_notes(task_title, project_title)
if choice3 == "4":
task.update_due_date(task_title,project_title)
if choice3 == "1":
task.view_notes(task_title, project_title)
if choice2 == "3":
task.add_task(project_title)
if choice == "2":
project.add_project()
if choice == "6":
task.show_all_pending_task()
if choice == "7":
break
if __name__ == "__main__":
main()
|
994,168 | e469f0f0e14ae5b9e16ce6ee22cc0f6ff130d2fb | """
UnitAPI
Edit the variables with your API Token and API Server. You can create an API token in Unit Dashboard. # noqa: E501
The version of the OpenAPI document: dfec2411-22b5-4a3b-8d43-fcf778bd42a5
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from openapi_client.api_client import ApiClient, Endpoint as _Endpoint
from openapi_client.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from openapi_client.model.model200 import Model200
class ApplicationsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __create_business(
self,
content_type,
body,
**kwargs
):
"""Create Business # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_business(content_type, body, async_req=True)
>>> result = thread.get()
Args:
content_type (str):
body (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Model200
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['content_type'] = \
content_type
kwargs['body'] = \
body
return self.call_with_http_info(**kwargs)
self.create_business = _Endpoint(
settings={
'response_type': (Model200,),
'auth': [
'bearer'
],
'endpoint_path': '/applications',
'operation_id': 'create_business',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'content_type',
'body',
],
'required': [
'content_type',
'body',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'content_type':
(str,),
'body':
(str,),
},
'attribute_map': {
'content_type': 'Content-Type',
},
'location_map': {
'content_type': 'header',
'body': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/vnd.api+json; charset=utf-8'
],
'content_type': [
'application/vnd.api+json'
]
},
api_client=api_client,
callable=__create_business
)
def __get(
self,
**kwargs
):
"""Get # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
return self.call_with_http_info(**kwargs)
self.get = _Endpoint(
settings={
'response_type': None,
'auth': [
'bearer'
],
'endpoint_path': '/applications/8',
'operation_id': 'get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [],
},
api_client=api_client,
callable=__get
)
def __getall(
self,
page_limit,
page_offset,
include,
**kwargs
):
"""Get all # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.getall(page_limit, page_offset, include, async_req=True)
>>> result = thread.get()
Args:
page_limit (float):
page_offset (float):
include (str):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['page_limit'] = \
page_limit
kwargs['page_offset'] = \
page_offset
kwargs['include'] = \
include
return self.call_with_http_info(**kwargs)
self.getall = _Endpoint(
settings={
'response_type': None,
'auth': [
'bearer'
],
'endpoint_path': '/applications',
'operation_id': 'getall',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'page_limit',
'page_offset',
'include',
],
'required': [
'page_limit',
'page_offset',
'include',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'page_limit':
(float,),
'page_offset':
(float,),
'include':
(str,),
},
'attribute_map': {
'page_limit': 'page[limit]',
'page_offset': 'page[offset]',
'include': 'include',
},
'location_map': {
'page_limit': 'query',
'page_offset': 'query',
'include': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [],
},
api_client=api_client,
callable=__getall
)
def __upload_document(
self,
file,
**kwargs
):
"""Upload Document # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_document(file, async_req=True)
>>> result = thread.get()
Args:
file (file_type):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['file'] = \
file
return self.call_with_http_info(**kwargs)
self.upload_document = _Endpoint(
settings={
'response_type': None,
'auth': [
'bearer'
],
'endpoint_path': '/applications/5/documents/2',
'operation_id': 'upload_document',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'file',
],
'required': [
'file',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'file':
(file_type,),
},
'attribute_map': {
'file': 'file',
},
'location_map': {
'file': 'form',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [
'multipart/form-data'
]
},
api_client=api_client,
callable=__upload_document
)
def __upload_document_back(
self,
file,
**kwargs
):
"""Upload Document Back # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_document_back(file, async_req=True)
>>> result = thread.get()
Args:
file (file_type):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['file'] = \
file
return self.call_with_http_info(**kwargs)
self.upload_document_back = _Endpoint(
settings={
'response_type': None,
'auth': [
'bearer'
],
'endpoint_path': '/applications/5/documents/2/back',
'operation_id': 'upload_document_back',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'file',
],
'required': [
'file',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'file':
(file_type,),
},
'attribute_map': {
'file': 'file',
},
'location_map': {
'file': 'form',
},
'collection_format_map': {
}
},
headers_map={
'accept': [],
'content_type': [
'multipart/form-data'
]
},
api_client=api_client,
callable=__upload_document_back
)
|
994,169 | 063a7e686045567b529ecf0aafc9a30c57b46f10 | import pytest
import json
from name.models import Location, Name
from django.urls import reverse
# Give all tests access to the database.
pytestmark = pytest.mark.django_db
def test_entry_detail_returns_ok(client, name_fixture):
response = client.get(
reverse('name:detail', args=[name_fixture.name_id]))
assert 200 == response.status_code
def test_entry_detail_returns_gone(client, name_fixture):
name_fixture.record_status = 1
name_fixture.save()
response = client.get(
reverse('name:detail', args=[name_fixture.name_id]))
assert 410 == response.status_code
def test_entry_detail_returns_not_found(client, name_fixture):
name_fixture.record_status = 2
name_fixture.save()
response = client.get(
reverse('name:detail', args=[name_fixture.name_id]))
assert 404 == response.status_code
def test_merged_entry_detail_returns_ok(client, merged_name_fixtures):
merged, primary = merged_name_fixtures
response = client.get(
reverse('name:detail', args=[primary.name_id]))
assert 200 == response.status_code
def test_merged_entry_detail_returns_redirect(client, merged_name_fixtures):
merged, primary = merged_name_fixtures
response = client.get(
reverse('name:detail', args=[merged.name_id]))
assert 302 == response.status_code
def test_label_returns_redirected(client, name_fixture):
response = client.get(
reverse('name:label', args=[name_fixture.name]))
assert 302 == response.status_code
def test_label_returns_not_found_without_query(client):
"""Test label returns Not Found without a query.
This will fail if label does not return with a status
code of 404.
"""
response = client.get(
reverse('name:label', args=['']))
assert 404 == response.status_code
assert b'No matching term found' not in response.content
def test_label_returns_not_found_with_query(client):
"""Test label returns Not Found with a query that does not
match anything.
This will fail if label does not return with a status
code of 404.
"""
response = client.get(
reverse('name:label', args=['&&&&&&&&']))
assert 404 == response.status_code
assert b'No matching term found' in response.content
def test_label_returns_not_found_multiple_names_found(client):
name_name = "John Smith"
Name.objects.create(name=name_name, name_type=Name.PERSONAL)
Name.objects.create(name=name_name, name_type=Name.PERSONAL)
response = client.get(
reverse('name:label', args=[name_name]))
assert 404 == response.status_code
assert b'There are multiple Name objects with' in response.content
def test_export(client, name_fixture):
response = client.get(reverse('name:export'))
expected = 'personal\ttest person\thttp://testserver{}\r\n'.format(
name_fixture.get_absolute_url())
assert expected == response.content.decode()
assert 200 == response.status_code
def test_export_json(client, name_fixture):
response = client.get(reverse('name:export_json'))
expected = [
{
'i': name_fixture.name_id,
'l': 'http://testserver{}'.format(name_fixture.get_absolute_url()),
't': name_fixture.name,
},
]
assert expected == json.loads(response.content.decode())
assert 200 == response.status_code
def test_opensearch(client):
response = client.get(reverse('name:opensearch'))
assert 200 == response.status_code
def test_feed(client):
response = client.get(reverse('name:feed'))
assert 200 == response.status_code
def test_about(client):
response = client.get(reverse('name:about'))
assert 200 == response.status_code
def test_stats_returns_ok(client, name_fixture):
response = client.get(reverse('name:stats'))
assert 200 == response.status_code
def test_stats_returns_ok_with_no_names(client):
response = client.get(reverse('name:stats'))
assert 200 == response.status_code
def test_get_names_returns_ok(client):
response = client.get(reverse('name:search-json'))
assert 200 == response.status_code
def test_get_names_xhr_returns_ok(client):
response = client.get(
reverse('name:search-json'),
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
assert 200 == response.status_code
def test_get_names_xhr_returns_only_10_names(client, twenty_name_fixtures):
response = client.get(
reverse('name:search-json'),
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
names = json.loads(response.content)
assert len(names) == 10
def test_get_names_has_cors_headers(client):
response = client.get(reverse('name:search-json'))
assert response.has_header('Access-Control-Allow-Origin')
assert response.has_header('Access-Control-Allow-Headers')
assert response['Access-Control-Allow-Origin'] == '*'
def test_landing(client):
response = client.get(reverse('name:landing'))
assert 200 == response.status_code
def test_landing_does_not_count_inactive_names(client, status_name_fixtures):
"""Checks that only active names are counted.
The status_name_fixture supplies this test with 3 Name objects of each
Name type, where only one of each Name type is active.
"""
response = client.get(reverse('name:landing'))
context = response.context[-1]['counts']
assert 1 == context['personal']
assert 1 == context['building']
assert 1 == context['event']
assert 1 == context['organization']
assert 1 == context['software']
assert 5 == context['total']
def test_name_json_returns_ok(client, name_fixture):
response = client.get(reverse('name:detail-json', args=[name_fixture]))
assert 200 == response.status_code
def test_name_json_handles_unknown_name(client):
response = client.get(reverse('name:detail-json', args=[0]))
assert 404 == response.status_code
def test_map_returns_ok(client):
response = client.get(reverse('name:map'))
assert 200 == response.status_code
def test_map_json_xhr_returns_payload(client):
name = Name.objects.create(name="Test", name_type=Name.PERSONAL)
Location.objects.create(
status=0,
latitude=33.210241,
longitude=-97.148857,
belong_to_name=name)
response = client.get(
reverse('name:locations-json'),
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
assert name.name_id in response.content.decode()
assert json.loads(response.content)
def test_map_json_xhr_returns_with_no_locations(client, twenty_name_fixtures):
response = client.get(
reverse('name:locations-json'),
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
assert response.context is None
assert response.status_code == 200
def test_map_json_returns_not_found(client, twenty_name_fixtures):
response = client.get(reverse('name:locations-json'))
assert response.status_code == 404
def test_stats_json_returns_ok_with_no_names(client):
response = client.get(reverse('name:stats-json'))
assert response.status_code == 200
def test_stats_json_returns_ok(client, search_fixtures):
response = client.get(reverse('name:stats-json'))
assert response.status_code == 200
def test_stats_json_json_data(client, search_fixtures):
response = client.get(reverse('name:stats-json'))
data = json.loads(response.content)
assert data.get('created', False)
assert data.get('modified', False)
assert data.get('name_type_totals', False)
|
994,170 | 99fa356d934f6b19bf8958fbcdaffa0128db931d | import os
import config
import base64
import requests
import mysql.connector
import telebot
import time
import json
from PIL import Image
from mysql.connector import errorcode
DB_HOST = os.environ.get('DB_HOST')
DB_NAME = os.environ.get('DB_NAME')
DB_USER = os.environ.get('DB_USER')
DB_PASSWORD = os.environ.get('DB_PASSWORD')
YandexAPI = os.environ.get('YandexAPI')
Telebot_token = os.environ.get('Telebot_token')
def get_adress_by_coordinates(coordinates):
params = {
"apikey":YandexAPI,
"format":"json",
"lang":"ru_RU",
"kind":"house",
"geocode": coordinates
}
try:
url = 'https://geocode-maps.yandex.ru/1.x/'
response = requests.get(url, params=params)
json_data = response.json()
address_str = json_data["response"]["GeoObjectCollection"]["featureMember"][0]["GeoObject"]["metaDataProperty"]["GeocoderMetaData"]["AddressDetails"]["Country"]["AddressLine"]
return address_str
except Exception as e:
print("Some troubles with YandexAPI.")
template = "An exception of type {} occured. Arguments:\n{!r}"
mes = template.format(type(e).__name__, e.args)
print(mes)
return ""
try:
mydb = mysql.connector.connect(
host=DB_HOST,
user=DB_USER,
password=DB_PASSWORD,
port="3306",
database=DB_NAME
)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err)
except Exception as e:
template = "An exception of type {} occured. Arguments:\n{!r}"
mes = template.format(type(e).__name__, e.args)
print(mes)
mycursor = mydb.cursor()
token = Telebot_token
bot = telebot.TeleBot(token, threaded=False)
data_place = {}
class Place:
def __init__(self, user_id):
self.user_id = user_id
self.name = ''
self.lon = None
self.lat = None
self.photo = None
@bot.message_handler(commands=['add'])
def add_location(message):
msg = bot.send_message(message.chat.id, "Enter place name, please.")
bot.register_next_step_handler(msg, process_placename_step)
def process_placename_step(message):
try:
user_id = message.from_user.id
place = Place(user_id)
data_place[user_id] = place
place.name = message.text
query = ("SELECT user_id FROM user "
"WHERE user_id LIKE %s ")
value = (place.user_id,)
mycursor.execute(query, value)
user = mycursor.fetchone()
if not user:
sql = ("INSERT INTO ""user (user_id) ""VALUES (%s) ")
val = (place.user_id,)
mycursor.execute(sql, val)
mydb.commit()
msg = bot.send_message(message.chat.id, 'Send your location, please.')
bot.register_next_step_handler(msg, process_location_step)
except Exception as e:
template = "An exception of type {} occured. Arguments:\n{!r}"
mes = template.format(type(e).__name__, e.args)
print(mes)
bot.reply_to(message, type(e).__name__)
def process_location_step(message):
try:
user_id = message.from_user.id
place = data_place[user_id]
place.lon = message.location.longitude
place.lat = message.location.latitude
msg = bot.send_message(message.chat.id, 'Send a photo of the place, please.')
bot.register_next_step_handler(msg, process_placephoto_step)
except Exception as e:
template = "An exception of type {} occured. Arguments:\n{!r}"
mes = template.format(type(e).__name__, e.args)
print(mes)
bot.reply_to(message, type(e).__name__)
def process_placephoto_step(message):
try:
user_id = message.from_user.id
place = data_place[user_id]
photo_id = message.photo[-1].file_id
photo_info = bot.get_file(photo_id)
photo_downloaded = bot.download_file(photo_info.file_path)
place.photo = base64.b64encode(photo_downloaded)
sql = "INSERT INTO place (name,lon,lat,photo,user_id) VALUES (%s,%s,%s, %s ,%s) "
val = (place.name, place.lon, place.lat, place.photo, user_id)
mycursor.execute(sql, val)
mydb.commit()
bot.send_message(message.chat.id, 'Place has been saved!')
data_place.pop(user_id)
except Exception as e:
template = "An exception of type {} occured. Arguments:\n{!r}"
mes = template.format(type(e).__name__, e.args)
print(mes)
bot.reply_to(message, type(e).__name__)
@bot.message_handler(commands=['list'])
def place_list(message):
try:
user_id = message.from_user.id
query = ("SELECT name, lon, lat, photo FROM place "
"WHERE user_id LIKE %s "
"ORDER BY place_id DESC LIMIT 10")
value = (user_id,)
mycursor.execute(query, value)
results = mycursor.fetchall()
if results == []:
bot.send_message(message.chat.id, 'Place_List is empty!')
else:
bot.send_message(message.chat.id, 'Your Place_List:')
counter = 1
for (name, lon, lat, photo_res) in results:
photo_b = photo_res.encode('utf-8')
photo = base64.b64decode(photo_b)
bot.send_message(message.chat.id, '{}. {}'.format(counter, name))
bot.send_photo(message.chat.id, photo)
coordinates = '{},{}'.format(lon, lat)
if get_adress_by_coordinates(coordinates):
bot.send_message(message.chat.id, '{}: {}'.format('Adress', get_adress_by_coordinates(coordinates)))
bot.send_location(message.chat.id, lat, lon)
counter += 1
bot.send_message(message.chat.id, 'Done!')
except Exception as e:
template = "An exception of type {} occured. Arguments:\n{!r}"
mes = template.format(type(e).__name__, e.args)
print(mes)
bot.reply_to(message, type(e).__name__)
@bot.message_handler(commands=['reset'])
def delete_placelist(message):
try:
user_id = message.from_user.id
query = ("DELETE FROM place "
"WHERE user_id LIKE %s ")
value = (user_id,)
mycursor.execute(query, value)
mydb.commit()
bot.send_message(message.chat.id, 'Your Place_List has been deleted!')
except Exception as e:
template = "An exception of type {} occured. Arguments:\n{!r}"
mes = template.format(type(e).__name__, e.args)
print(mes)
bot.reply_to(message, type(e).__name__)
@bot.message_handler()
def handler_message(message):
print(message.text)
bot.send_message(message.chat.id, text='This BestBot will help you with your Place_List.')
# Enable saving next step handlers to file "./.handlers-saves/step.save".
# Delay=2 means that after any change in next step handlers (e.g. calling register_next_step_handler())
# saving will hapen after delay 2 seconds.
bot.enable_save_next_step_handlers(delay=2)
# Load next_step_handlers from save file (default "./.handlers-saves/step.save")
# WARNING It will work only if enable_save_next_step_handlers was called!
bot.load_next_step_handlers()
if __name__ == '__main__':
while True:
try:
bot.polling(none_stop=True, timeout=30)
except Exception as e:
template = "An exception of type {} occured. Arguments:\n{!r}"
mes = template.format(type(e).__name__, e.args)
print(mes)
time.sleep(15)
|
994,171 | 7a3cd2f8a2bce933c4022e4f70ebf86af3220a22 | import logging
import argparse
import yaml
import os
import subprocess
import re
import datetime
import pickle
import sklearn
import xgboost
import pandas as pd
import numpy as np
from src.load_data import load_data
from src.helpers import Timer, fillin_kwargs
from src.generate_features import choose_features, get_target
from sklearn.linear_model import LogisticRegression, LinearRegression
logger = logging.getLogger(__name__)
methods = dict(logistic=LogisticRegression,
linear_regression=LinearRegression,
xgboost=xgboost.XGBClassifier)
train_model_kwargs = ["split_data", "params", "fit", "compile"]
def split_data(X, y, train_size=1, test_size=0, validate_size=0, random_state=24, save_split_prefix=None):
if y is not None:
assert len(X) == len(y)
include_y = True
else:
y = [0] * len(X)
include_y = False
if train_size + test_size + validate_size == 1:
prop = True
elif train_size + test_size + validate_size == len(X):
prop = False
else:
raise ValueError("train_size + test_size + validate_size "
"must equal 1 or equal the number of rows in the dataset")
if prop:
train_size = int(np.round(train_size * len(X)))
validate_size = int(np.round(validate_size * len(X)))
test_size = int(len(X) - train_size - validate_size)
if train_size == 1:
X_train, y_train = X, y
else:
X_train, X_remain, y_train, y_remain = sklearn.model_selection.train_test_split(X, y, train_size=train_size,
random_state=random_state)
X_validate, X_test, y_validate, y_test = sklearn.model_selection.train_test_split(X_remain, y_remain,
test_size=test_size,
random_state=random_state+1)
print(type(X_train))
X = dict(train=X_train)
y = dict(train=y_train)
if len(X_test) > 0:
X["test"] = X_test
y["test"] = y_test
if len(X_validate) > 0:
X["validate"] = X_validate
y["validate"] = y_validate
if save_split_prefix is not None:
for split in X:
pd.DataFrame(X[split]).to_csv("%s-%s-features.csv" % (save_split_prefix, split))
if include_y:
pd.DataFrame(y[split]).to_csv("%s-%s-targets.csv" % (save_split_prefix, split))
logger.info("X_%s and y_%s saved to %s-%s-features.csv and %s-%s-targets.csv",
split, split,
save_split_prefix, split,
save_split_prefix, split)
if not include_y:
y = dict(train=None)
return X, y
def train_model(df, method=None, save_tmo=None, **kwargs):
assert method in methods.keys()
if "choose_features" in kwargs:
X = choose_features(df, **kwargs["choose_features"])
else:
X = df
if "get_target" in kwargs:
y = get_target(df, **kwargs["get_target"])
else:
y = None
kwargs = fillin_kwargs(train_model_kwargs, kwargs)
X, y = split_data(X, y, **kwargs["split_data"])
model = methods[method](**kwargs["params"])
if "validate" in X and "validate" in y:
kwargs["fit"]["eval_set"] = [(X["validate"], y["validate"])]
with Timer("model training", logger) as t:
model.fit(X["train"], y["train"], **kwargs["fit"])
if save_tmo is not None:
with open(save_tmo, "wb") as f:
pickle.dump(model, f)
logger.info("Trained model object saved to %s", save_tmo)
return model
def run_training(args):
with open(args.config, "r") as f:
config = yaml.load(f)
if args.csv is not None:
df = load_data(how="csv", csv=dict(path=args.csv))
elif "load_data" in config:
df = load_data(**config["load_data"])
else:
raise ValueError("Path to CSV for input data must be provided through --csv or "
"'load_data' configuration must exist in config file")
tmo = train_model(df, **config["train_model"])
if args.save is not None:
with open(args.save, "wb") as f:
pickle.dump(tmo, f)
logger.info("Trained model object saved to %s", args.save)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Train model")
parser.add_argument('--config', help='path to yaml file with configurations')
parser.add_argumemt('--csv', default=None, help="Path to CSV for input to model training")
parser.add_argument('--save', default=None, help='Path to where the dataset should be saved to (optional')
args = parser.parse_args()
run_training(args)
|
994,172 | c8118cbe560c792b14afcba647d2c18b8cad232d | from selenium import webdriver
from webdriver_manager.chrome import ChromeDriverManager
from selenium.webdriver.support.ui import Select
import time
url = 'http://cbseaff.nic.in/cbse_aff/schdir_Report/userview.aspx'
driver = webdriver.Chrome(ChromeDriverManager().install())
driver.get(url)
driver.implicitly_wait(10)
driver.find_element_by_id('optlist_3').click()
drp = driver.find_element_by_id('ddlitem')
a=drp.text
a = list(a.split('\n'))
reg = []
for i in range(1,len(a)):
reg.append(a[i])
drp = Select(drp)
for i in range(1,len(reg)+1):
drp.select_by_index(i)
driver.find_element_by_id('search').click()
a = []
while True:
time.sleep(1)
element = driver.find_element_by_xpath('//*[@id="Button1"]')
a.append(driver.find_element_by_xpath('//*[@id="T1"]/tbody/tr/td').text)
print(a)
driver.execute_script("arguments[0].click();", element)
driver.maximize_window()
|
994,173 | b43b6f8e880706551bad0f8fdb85b4ab8728e25b | from django import forms
from django.urls import path, reverse
from django.views import generic
from ryzom_django.html import template
from ryzom_django_mdc import html
class ExampleDocument(html.Html):
title = 'Secure Elections with homomorphic encryption'
# Serves to demonstrate template composition based on multi level nesting
class ExampleCard(html.Div):
def __init__(self, *content, **context):
super().__init__(*content, style='max-width: 20em; margin: auto')
# in general, you wouldn't go through a template_name, but since this is useful
# to design views that you import from external packages, we have this example
# here, it also shows how you can compose by nesting different layout objects
@html.template('form.html', ExampleDocument, ExampleCard)
class ExampleFormViewComponent(html.Div):
title = 'Example form view'
def to_html(self, view, form, **context):
# view and form come from the default context, we're spreading them as
# nice, required variables for this template.
# As you can imagine, having templates in Python not only gives you all
# the programing power, but it also allows you to use a debugger
# breakpoint() which was not possible with traditionnal templates.
content = []
if view.request.method == 'POST' and form.is_valid():
content += [
html.Div(
html.H3('Form post success!'),
html.MDCList(*[
html.MDCListItem(f'{key}: {value}')
for key, value in form.cleaned_data.items()
])
)
]
content += [
html.Form(
html.CSRFInput(view.request),
form,
html.MDCButton(form.submit_label),
method='POST',
)
]
return super().to_html(*content, **context)
class ExampleForm(forms.Form):
submit_label = 'Send'
char = forms.CharField(
help_text='Non required example text input',
)
email = forms.EmailField(
help_text='Valid email required',
)
boolean = forms.BooleanField(
help_text='Required boolean!',
)
checkboxes = forms.MultipleChoiceField(
choices=(('a', 'a'), ('b', 'b')),
widget=forms.CheckboxSelectMultiple,
help_text='Required checkbox multiple',
)
datetime = forms.SplitDateTimeField(
widget=forms.SplitDateTimeWidget(
date_attrs=dict(type='date'),
time_attrs=dict(type='time'),
),
help_text='Required help text',
)
textarea = forms.CharField(
widget=forms.Textarea,
help_text='This is the help text'
)
document = forms.FileField(
widget=forms.FileInput,
help_text='Choose a file'
)
select = forms.ChoiceField(
widget=forms.Select,
choices=(
('Test', (
(1, 'the first one'),
(2, 'the second'),
(3, 'the thirf')
)),
('Re', (
(4, '444'),
(5, '555')
)),
),
initial=5,
help_text='Some help text',
)
select_multiple = forms.MultipleChoiceField(
widget=forms.SelectMultiple,
choices=(
('Test', (
(1, 'the first one'),
(2, 'the second'),
(3, 'the thirf')
)),
('Re', (
(4, '444'),
(5, '555')
)),
),
initial=5,
help_text='Some more help text',
)
# Finally, a Django FormView, there's nothing to see here because of how well
# Ryzom integrates with Django. Of course you're free to make views that do
# some crazy Ryzom rendering, this merely shows how you would hook in the
# default Django rendering on external views that you include and do not want
# to fork: you can just override the default template with @html.template
# instead of by creating html templates.
class ExampleFormView(generic.FormView):
template_name = 'form.html'
form_class = ExampleForm
def form_valid(self, form):
# we don't have a success url, render again on form_valid
return super().get(self.request)
urlpatterns = [path('', ExampleFormView.as_view())]
|
994,174 | 806836765bceafef6d465664c0546e1b32584c20 | class Node(object):
def __init__(self,item):
self.item=item
self.next=None
class SinCycLinkedlist(object):
"""单向循环链表"""
def __init__(self):
self._head=None
"""判断链表是否为空"""
def is_empty(self):
return self._head==None
"""返回链表的长度"""
def length(self):
if self.is_empty():
return 0
count=1
cur=self._head
while cur.next!=self._head:
count=count+1
cur=cur.next
return count
'''遍历链表'''
def travel(self):
if self.is_empty():
return
cur=self._head
print(cur.item,end=' ')
while cur.next!=self._head:
cur=cur.next
print(cur.item, end=' ')
'''在链表头部添加结点'''
def add(self,item):
node=Node(item)
if self.is_empty():
self._head=node
node.next=self._head
else:
node.next=self._head
cur=self._head
while cur.next!=self._head:
cur=cur.next
cur.next=node
self._head=node
'''尾部添加结点'''
def append(self,item):
node=Node(item)
if self.is_empty():
self._head=node
node=self._head
else:
cur=self._head
while cur.next!=self._head:
cur=cur.next
cur.next=node
node.next=self._head
'''在指定位置插入结点'''
def insert(self,pos,item):
if pos<=0:
self.add(item)
elif pos>(self.length()-1):
self.append(item)
else:
count=0
node=Node(item)
cur=self._head
while count<(pos-1):
cur=cur.next
count=count+1
node.next=cur.next
cur.next=node
'''删除一个结点'''
def remove(self,item):
# 若链表为空,则返回空值
if self.is_empty():
return
cur=self._head
pre=None
# 若头节点的元素就是要查找的元素item
if cur.item==item:
# 如果链表不止一个节点
if cur.next!=self._head:
# 先找到尾节点,将尾节点的next指向第二个节点
while cur.next!=self._head:
cur=cur.next
# cur指向了尾节点
cur.next=self._head.next
self._head=self._head.next
else:
# 链表只有一个节点
self._head=None
else:
# 第一个节点不是要删除的
pre=self._head
while cur.next!=self._head:
# 找到了要删除的元素
if cur.item==item:
pre.next=cur.next
return
else:
pre=cur
cur=cur.next
# cur 指向尾节点
if cur.item==item:
pre.next=cur.next
def search(self,item):
if self.is_empty():
return False
cur=self._head
if cur.item==item:
return True
while cur.next!=self._head:
if cur.item==item:
return True
else:
cur=cur.next
return False
if __name__ == "__main__":
ll = SinCycLinkedlist()
ll.add(1)
ll.add(2)
ll.append(3)
ll.insert(2, 4)
ll.insert(4, 5)
ll.insert(0, 6)
print ("length:",ll.length())
ll.travel()
print (ll.search(3))
print (ll.search(7))
ll.remove(1)
print ("length:",ll.length())
ll.travel()
|
994,175 | 2c52f470a229ed9eeb8fe6f692e010a7017a492a | class Solution:
def searchMatrix(self, matrix: [[int]], target: int) -> bool:
list = [itm for row in matrix for itm in row]
left, right = 0, len(list)-1
while left <= right:
mid = left + (right - left) // 2
val = list[mid]
if(val == target):
return True
elif val > target:
right = mid - 1
else:
left = mid + 1
return False
def searchMatrix2(self, matrix: [[int]], target: int) -> bool:
if not matrix or not matrix[0]:
return False
if target < matrix[0][0] or target > matrix[-1][-1]:
return False
m, n = len(matrix), len(matrix[0])
left, right = 0, m*n-1
while left <= right:
mid = left + (right - left) // 2
val = matrix[mid // n][mid % n]
if(val == target):
return True
elif val > target:
right = mid - 1
else:
left = mid + 1
return False
matrix = [
[1, 3, 5, 7],
[10, 11, 16, 20],
[23, 30, 34, 50]
]
solution = Solution()
print(solution.searchMatrix2(matrix, 3))
|
994,176 | 4efaa5985d6f130edd1e713a890c18e33288dab9 | import json
import urllib2
import math
import pyaudio
import sys
import time
def getRate():
content = urllib2.urlopen("http://blockchain.info/de/ticker").read()
data = json.loads(content)
rate = data['USD']['15m']
return(float(rate))
def playBeep(RATE, WAVE):
PyAudio = pyaudio.PyAudio
p = PyAudio()
data = ''.join([chr(int(math.sin(x/((RATE/WAVE)/math.pi))*127+128)) for x in xrange(RATE)])
stream = p.open(format =
p.get_format_from_width(1),
channels = 1,
rate = RATE,
output = True)
for DISCARD in xrange(2):
stream.write(data)
stream.stop_stream()
stream.close()
p.terminate()
def main():
global lastRate
rate = getRate()
freq = 600
if rate > lastRate:
freq = 1000
print("last: %.2f current: %.2f" % (lastRate, rate))
if rate != lastRate:
playBeep(8000, freq)
lastRate = rate
lastRate = getRate()
while True:
main()
time.sleep(60)
|
994,177 | d295206c82596ebb005e1810c7e593b1ac629a85 | import numpy as np
import os
import pickle
def get_baseline_estimates(ratings):
#print('inside get_baseline_estimates')
if not os.path.isfile("baseline_estimates.pkl"):
user_count = ratings.shape[0]
movie_count = ratings.shape[1]
avg_user = []
avg_movie = []
global_mean = 0
ratings_count = 0
for x, row in enumerate(ratings):
nonzero_ratings = 0
sum_ratings = 0
for rating in row:
if rating != 0:
nonzero_ratings += 1
sum_ratings += rating
global_mean += sum_ratings
ratings_count += nonzero_ratings
if nonzero_ratings == 0:
avg_user.append(0)
else:
avg_user.append(float(sum_ratings) / nonzero_ratings)
global_mean /= ratings_count
for i, column in enumerate(np.transpose(ratings)):
nonzero_ratings = 0
sum_ratings = 0
for rating in column:
if rating != 0:
nonzero_ratings += 1
sum_ratings += rating
if nonzero_ratings == 0:
avg_movie.append(0)
else:
avg_movie.append(float(sum_ratings) / nonzero_ratings)
baseline_est = np.zeros(ratings.shape)
for i in range(user_count):
for x in range(movie_count):
if ratings[i, x] != 0:
baseline_est[i, x] = avg_movie[x] + avg_user[i] - global_mean
pickle.dump(baseline_est, open("baseline_estimates.pkl", "wb"))
else:
with open('baseline_estimates.pkl', 'rb') as pickle_file:
baseline_est = pickle.load(pickle_file)
return baseline_est |
994,178 | 6e774a89f9d0c05939def00d1779130b4757fdec | import socket
import hashlib
from Crypto.Cipher import AES
from Crypto.PublicKey import RSA
from Crypto import Random
from Crypto.Random.random import getrandbits
import registrar
import sys
from charm.toolbox.integergroup import RSAGroup
from charm.schemes.pkenc import pkenc_paillier99
from charm.core.math import integer as specialInt
from charm.core.engine.util import objectToBytes,bytesToObject
signedRandomBitsSize=320
voteSize=128
signedBlindedVoteSize=1232
def get_vote():
vote = raw_input("Enter your vote: ") #get vote from user
vote = long(vote)
#Initial setup for Paillier
group = RSAGroup()
pai = pkenc_paillier99.Pai99(group)
#Get public voting public key
f=open("./pyssss/VotingPublic")
data = f.read()
public_key = bytesToObject(data,group)
#Encryption with Paillier
vote = pai.encode(public_key['n'],vote)
ciphervote = pai.encrypt(public_key,vote)
# ciphervotestr = str(str(ciphervote['c']).split('mod')[0])
# print ciphervote['c']
ciphervote = specialInt.serialize(ciphervote['c'])
#pad vote
while (len(ciphervote) < voteSize):
ciphervote += " "
print ciphervote
return ciphervote
def connect_to_server():
registrar.Register()
serv_sock = socket.socket() #create a socket
host = socket.gethostname() #get the host name of the socket
serv_port = int(sys.argv[1]) #initialize the port for the socket
serv_sock.connect((host, serv_port)) #connect the socket
#generate AES key, iv, encryptor and decryptor
serv_AES_key = Random.new().read(16)
serv_AES_iv = Random.new().read(16)
serv_AES_encryptor = AES.new(serv_AES_key, AES.MODE_CBC, serv_AES_iv)
#encrypt AES data with server's RSA public key
f = open("serverpubkey.pem",'r')
serv_pub_key = RSA.importKey(f.read())
enc_AES_key = serv_pub_key.encrypt(serv_AES_key,32)
enc_AES_iv = serv_pub_key.encrypt(serv_AES_iv,32)
serv_sock.send(enc_AES_key[0]) #send the AES key
serv_sock.send(enc_AES_iv[0]) #send the AES iv
#get notary public key
f.close()
f = open('NotaryKey.pem','r')
not_pub_key = RSA.importKey(f.read())
f.close()
#get voter private key
#Could add more input verification
password = raw_input('Please enter your password: ')
while len(password) %16 != 0:
password += '0'
f = open('CurrentVoter.pem','r')
enc_x = f.read()
f.close()
cipher = AES.new(password, AES.MODE_ECB)
x = cipher.decrypt(enc_x)
x = x.rstrip('0')
voter_priv_key = RSA.importKey(x)
#generate AES key and iv
not_AES_key = Random.new().read(16)
not_AES_iv = Random.new().read(16)
not_AES_encryptor = AES.new(not_AES_key, AES.MODE_CBC, not_AES_iv)
#encrypt AES info with notary's public key
enc_AES_key = not_pub_key.encrypt(not_AES_key,32)
enc_AES_iv = not_pub_key.encrypt(not_AES_iv,32)
#initialize network connection to notary
not_sock = socket.socket() #create a socket
not_port = int(sys.argv[2]) #initialize the port for the socket
not_sock.connect((host, not_port)) #connect the socket
#send encrypted AES data to notary
not_sock.send(enc_AES_key[0]) #send the AES key
not_sock.send(enc_AES_iv[0]) #send the AES iv
#get random bits from notary over socket
not_rand_bits = not_AES_encryptor.decrypt(not_sock.recv(16))
#sign random bits from notary with private key
k = getrandbits(64)
signed_rand_bits = str(voter_priv_key.sign(not_rand_bits,k)[0])
#get vote from user and blind
vote = get_vote()
k = getrandbits(64)
blinded_vote = str(not_pub_key.blind(vote,k))
#pad length of s_r_b to bufSize
while (len(signed_rand_bits) < signedRandomBitsSize):
signed_rand_bits+=" "
#send signed random bits and blinded vote back to notary
not_sock.send(not_AES_encryptor.encrypt(signed_rand_bits))
not_sock.send(not_AES_encryptor.encrypt(blinded_vote))
#receive blind signed vote from notary and unblind
blinded_signed_vote = not_AES_encryptor.decrypt(not_sock.recv(signedBlindedVoteSize))
blinded_signed_vote = blinded_signed_vote.strip()
signed_vote = str(not_pub_key.unblind(long(blinded_signed_vote),k))
#send vote to server
serv_sock.send(serv_AES_encryptor.encrypt(vote))
#pad signed vote to 320
while (len(signed_vote) < signedBlindedVoteSize):
signed_vote += " "
#send signed vote
serv_sock.send(serv_AES_encryptor.encrypt(signed_vote))
#print rec'd confirmation message
print serv_sock.recv(1024)
serv_sock.close #close the socket
if __name__ == "__main__":
if (len(sys.argv) != 3):
print("Usage: python voter.py server_port notary_port")
sys.exit(0)
connect_to_server()
|
994,179 | 4564de10ad98465c0b79e78b1b6e02ebaf142259 | from internet_usage import get_usage
from flask import Flask, render_template, Response
import time
app = Flask(__name__)
@app.route('/')
def home():
global usage
usage = get_usage()
return render_template('home.html', usage=usage)
|
994,180 | 7cf4c799271168dd8cf53b68603c9b88ab6cb03d | from collections import defaultdict
from aocd import get_data
from dotenv import load_dotenv
load_dotenv()
t = """mxmxvkd kfcds sqjhc nhms (contains dairy, fish)
trh fvjkl sbzzf mxmxvkd (contains dairy)
sqjhc fvjkl (contains soy)
sqjhc mxmxvkd sbzzf (contains fish)"""
def parse_foods(input: str):
foods = []
all_components, all_allergens = set(), defaultdict(set)
for food in input.splitlines():
components, alergens = food.split(" (contains ")
all_components |= set(components)
alergens = alergens.strip(")").split(", ")
components = components.split()
for alergen in alergens:
all_allergens[alergen] |= set(components)
foods += [(set(components), set(alergens))]
return foods, all_components, all_allergens
def finalize(allergens, allergen):
for target in allergens:
if target != allergen and len(allergens[target]) != 1:
allergens[target] -= allergens[allergen]
if len(allergens[target]) == 1:
finalize(allergens, target)
if __name__ == "__main__":
input = get_data(day=21, year=2020)
foods, components, allergens = parse_foods(input)
probable = {}
for allergen in allergens:
for food in foods:
if allergen in food[1]:
allergens[allergen] &= set(food[0])
if len(allergens[allergen]) == 1:
finalize(allergens, allergen)
count = 0
for ingredients, _ in foods:
for ingredient in ingredients:
if ingredient not in [allergen for types in allergens.values() for allergen in types]:
count +=1
print(count)
print(allergens)
cannonical = []
for allergen, ingredient in allergens.items():
cannonical += [(allergen, ingredient.copy().pop())]
cannonical.sort()
print(",".join([ingredient[1] for ingredient in cannonical]))
|
994,181 | 51d9a0bcd8d26e4444d3a4ff41dd62b00ef93afc | import sys
sys.path.append("..")
import struct
import pickle
from time import ctime
import tcp.tcpclisock as tcp
from PyQt5 import QtCore
HOST = '106.15.225.249'
PORT = 21567
BUFSIZE = 1024
ADDR=(HOST,PORT)
class Loginbackend(QtCore.QObject):
loginresult = QtCore.pyqtSignal(int)
feedback = QtCore.pyqtSignal(int)#1 is sucess ,0 is has already register
def __init__(self):
super(Loginbackend,self).__init__()
self.link = tcp.tcpCliSock()
self.userstate = False;
self.link.link()
def login(self,username,password):
command = self.link.commandHandle(1)
data = {'username':username,'password':password,'state':self.userstate}
packages = self.link.packagesHandle(data)
self.link.send(command+packages)
receive = self.link.receiveCommand()
self.loginresult.emit(receive)
def changeLink(self):
command = self.link.commandHandle(0)
self.link.send(command)
self.link.close()
def RegistertoServer(self,dicts):#发送注册的信息到服务器
command = 9
command =self.link.commandHandle(command)
packages =self.link.packagesHandle(dicts)
self.link.send(command+packages)
feedback = self.link.receiveCommand()#接收注册是否成功的消息并发回给前端
self.feedback.emit(feedback)
def close(self):
self.link.close()
|
994,182 | 222a5cdb31c3df73fbf26c137d303536c2b22572 | #!/usr/bin/env python
import sqlite3
import os
import logging
from modules import utils
class Database(object):
def __init__(self, root):
self.root = root
self.conn = sqlite3.connect("data/main.db")
self.cursor = self.conn.cursor()
self._checkItemTable()
def _checkItemTable(self):
try:
c = self.cursor.execute("""
SELECT id FROM items
LIMIT 1
""")
result = c.fetchone()
except:
self._createItemTable()
def _createItemTable(self):
self.cursor.execute("""
CREATE TABLE items
(
id INTEGER PRIMARY KEY AUTOINCREMENT,
path TEXT,
typeID INTEGER,
title TEXT,
qualityID INTEGER,
added DATETIME DEFAULT CURRENT_TIMESTAMP,
mtime REAL,
length REAL
)
""")
self.conn.commit()
self.indexItems()
def removeItem(self, ID):
logging.info("Removing item: {0}".format(ID))
self.cursor.execute("""
DELETE FROM items
WHERE id=?
""", (ID, ))
self.conn.commit()
def insertItem(self, path, typeID, title, qualityID, mtime, length):
logging.info("Inserting item: {0}".format(path))
ins = self.cursor.execute("""
INSERT INTO items
(path, typeID, title, qualityID, mtime, length)
VALUES (?, ?, ?, ?, ?, ?)
""", (path, typeID, title, qualityID, mtime, length))
self.conn.commit()
return self.getItemByID(ins.lastrowid)
def getItemByID(self, itemID):
query = self.cursor.execute("""
SELECT * FROM items
WHERE id=?
""", (itemID,))
result = query.fetchone()
if result:
return Item(raw=result, root=self.root)
def getItems(self):
query = self.cursor.execute("""
SELECT * FROM items
""")
items = []
for pot in query.fetchall():
try:
items.append(Item(raw=pot, root=self.root))
except OSError:
pass
return items
def getItemsByType(self, typeID):
query = self.cursor.execute("""
SELECT * FROM items
WHERE typeID = ?
""", (typeID, ))
items = []
for pot in query.fetchall():
try:
items.append(Item(raw=pot, root=self.root))
except OSError:
pass
return items
def getItemByPath(self, path):
query = self.cursor.execute("""
SELECT * FROM items
WHERE path = ?
""", (path,))
result = query.fetchone()
if result:
return Item(raw=result, root=self.root)
def scan(self):
items = self.cursor.execute("""
SELECT id, mtime, path FROM items
""").fetchall()
lastmtime = max(items, key=lambda x: x[1])[1]
for ID, mtime, path in items:
if not os.path.exists(path):
self.removeItem(ID)
files = os.listdir(self.root)
chk = lambda x: (os.stat(os.path.join(self.root, x)).st_mtime > lastmtime)
newfiles = filter(chk, files) # = generator
self.indexItems(files=newfiles)
def indexItems(self, files=None):
if not files:
files = os.listdir(self.root)
for fn in files:
path = os.path.join(self.root, fn)
check = self.getItemByPath(path)
if check:
continue
mtime = os.stat(path).st_mtime
length = utils.getLength(path)
result = utils.parseFilePath(fn)
if result.isTv:
if result.hd and result.hd == "720p":
qualityID = 2
elif result.hd and result.hd == "1080p":
qualityID = 3
else:
qualityID = 1
self.insertItem(path, 1, result.title.replace(".", " "), qualityID, mtime, length)
elif result.isFilm:
if result.hd and result.hd == "720p":
qualityID = 2
elif result.hd and result.hd == "1080p":
qualityID = 3
else:
qualityID = 1
self.insertItem(path, 2, result.title.replace(".", " "), qualityID, mtime, length)
elif result.isEbook:
self.insertItem(path, 3, result.title.replace(".", " "), 1, mtime, length)
else:
self.insertItem(path, 0, fn, 0, mtime, length)
def convertTypeID(typeID):
types = {
1: "tv", #icon-facetime-video
2: "film", #icon-film
3: "ebook", #icon-book
4: "application", #icon-hdd
5: "music", #icon-music
0: "unknown", #icon-question-sign
}
if int(typeID) in types:
return types[int(typeID)]
else:
return "unknown"
def convertQualityID(qualityID):
qualities = {
1: "standard definition",
2: "high definition (720p)",
3: "ultra-high definition (1080p)",
4: "DVD",
5: "BluRay",
6: "N/A",
0: "unknown",
}
if int(qualityID) in qualities:
return qualities[int(qualityID)]
else:
return "unknown"
class Item(object):
def __init__(self, ID=None, path=None, typeID=None, title=None, qualityID=None, added=None, mtime=None, length=None, raw=None, root=None):
if raw:
self.ID = raw[0]
path = raw[1]
self.typeID = raw[2]
self.type_str = convertTypeID(self.typeID)
self.title = raw[3]
self.qualityID = raw[4]
self.quality_str = convertQualityID(self.qualityID)
self.added = raw[5]
self.mtime = raw[6]
self.length = raw[7]
else:
self.ID = ID
path = path
self.typeID = typeID
self.type_str = convertTypeID(self.typeID)
self.title = title
self.qualityID = qualityID
self.quality_str = convertQualityID(self.qualityID)
self.added = added
self.mtime = mtime
self.length = length
self.size = utils.getSize(path)
self.path = path.replace(root, "")
self.files = utils.identifyFiles(path)
self.playable = utils.identifyPlayable(self.files)
self.filetree = utils.filetree(self.files, toplevel=True)
self.mediainfo = utils.parseFilePath(self.path)
def __repr__(self):
return "<Item - ID: {ID}, title: {title}, type: {type_str}, quality: {quality_str}".format(**self.__dict__)
|
994,183 | eb929c6a12cefdd3914ce742cea6a6f169a3ae9d | # -*- coding: utf-8 -*-
"""
Created on Fri Mar 12 12:37:17 2021
@author: Administrator
"""
import sys
#from PyQT5.QtWidgets import QApplication,QWidget
#from ui_Widget import Ui_Widget
#class QmyWidget(QWidget):
# def __init__(self,parent=None):
# super().__init__(parent)
# self.ui=Ui_Widget()
# self.ui.setupUi(self)
from PyQt5.QtWidgets import QApplication,QMainWindow
from ui_MainWindow import Ui_MainWindow
class QmyMainWindow(QMainWindow):
def __init__(self,parent=None):
super().__init__(parent)
self.ui=Ui_MainWindow()
self.ui.setupUi(self)
if __name__=="__main__":
app=QApplication(sys.argv)
form=QmyMainWindow()
form.show()
sys.exit(app.exec_()) |
994,184 | b687fed66e6cdd09f16046bf03dc5396405ba854 | from fpdf import FPDF
from datetime import date
import pandas as pd
class PDF(FPDF):
def header(self):
self.image(resources+'header.png', 0, 0, 300,10)
self.ln(10)
def footer(self):
self.image(resources+'footer.PNG', 0,190,282,20.5)
self.set_y(-7)
self.set_font('Arial', '', 8)
self.set_text_color(0,75,126)
self.cell(0, 0, 'Página ' + str(self.page_no()) + '/{nb}', 0, 0, 'C')
def create_analytics_report(day,filename="../report/PPT.pdf"):
pdf = PDF('L','mm','A4')
pdf.alias_nb_pages()
''' First Page '''
pdf.add_page()
# Imagens
pdf.image(resources+'logo.PNG',w=300)
pdf.image(resources+'divider_line.PNG',w=300)
## Título
pdf.ln(h = 10)
pdf.set_text_color(0,75,126)
pdf.set_font('Arial', 'B', 30)
pdf.cell(0,0,"Relatório Mensal", 0, 0, 'C')
## Sub-Título
pdf.ln(h = 20)
pdf.set_text_color(192,80,77)
pdf.set_font('Arial', 'B', 16)
pdf.cell(0,0,f'{month}',0,0,'C')
''' Second Page '''
pdf.add_page()
## Título
pdf.ln(60)
pdf.set_font('Arial', 'B', 24)
pdf.write(5, f"Agradecimentos")
## Data
pdf.ln(10)
pdf.set_font('Arial', '', 16)
pdf.write(4, f'{day}')
''' Third Page '''
pdf.add_page()
## Block 0 - Header
pdf.set_text_color(221,23,23)
pdf.set_font('Arial', 'B', 30)
pdf.cell(32.5, 10,'Título')
pdf.set_font('Arial', '', 28)
pdf.set_text_color(0,75,126)
pdf.write(10, f"Sub-Título")
pdf.ln(12)
pdf.image(resources+'divider_line.PNG',w=300)
## Block 1 - title
pdf.ln(h = 0)
pdf.set_font('Arial', 'U', 15)
pdf.cell(30,5,'Distribution:',align='C')
ybefore = pdf.get_y()
## Block 1 - image
pdf.ln(h = 5)
pdf.image(resources+'age_Dist.png',w=pdf.w/2.5, h=pdf.h/2.5)
## Block 2 - title
pdf.ln(h = 5)
pdf.set_font('Arial', 'U', 15)
pdf.cell(40,5,'Statistical Tests:',align='L')
## Block 2 - table
pdf.ln(h = 10)
values=pd.read_csv(resources+'pres_stats_tab.csv')
pdf.set_font('Arial', 'B', 10)
pdf.set_fill_color(0,75,126)
pdf.set_text_color(255,255,255)
cell_width=[65,25,25]
pdf.cell(cell_width[0],5,' ',1,0,align='L',fill=True)
pdf.cell(cell_width[1],5,values.columns[1],1,0,align='L',fill=True)
pdf.cell(cell_width[2],5,values.columns[2],1,0,align='L',fill=True)
pdf.ln(h = 5)
pdf.set_font('Arial', '', 10)
pdf.set_text_color(0,0,0)
pdf.cell(cell_width[0],5,values.loc[0,'Name'],1,0,align='L')
pdf.cell(cell_width[1],5,str(round(values.loc[0,'Value'],3)) if(values.loc[0,'Value']!='') else values.loc[0,'Value'] ,1,0,align='L')
pdf.cell(cell_width[2],5,str(round(values.loc[0,'p Value'],3)) if(values.loc[0,'p Value']!='') else values.loc[0,'p Value'] ,1,0,align='L')
pdf.ln(h = 5)
pdf.set_font('Arial', '', 10)
pdf.set_text_color(0,0,0)
pdf.cell(cell_width[0],5,values.loc[1,'Name'],1,0,align='L')
pdf.cell(cell_width[1],5,str(round(values.loc[1,'Value'],3)) if(values.loc[1,'Value']!='') else values.loc[1,'Value'],1,0,align='L')
pdf.cell(cell_width[2],5,str(round(values.loc[1,'p Value'],3)) if(values.loc[1,'p Value']!='') else values.loc[1,'p Value'],1,0,align='L')
pdf.ln(h = 5)
pdf.set_font('Arial', '', 10)
pdf.set_text_color(0,0,0)
pdf.cell(cell_width[0],5,values.loc[2,'Name'],1,0,align='L')
pdf.cell(cell_width[1],5,str(round(values.loc[2,'Value'],3)) if(values.loc[2,'Value']!='') else values.loc[2,'Value'],1,0,align='L')
pdf.cell(cell_width[2],5,str(round(values.loc[2,'p Value'],3)) if(values.loc[2,'p Value']!='') else values.loc[2,'p Value'],1,0,align='L')
## Block 3 - title
effective_page_width = pdf.w - 2*pdf.l_margin
posicao_titulo = effective_page_width/1.93 + pdf.r_margin # ((276.9975833333333/1.93)+ 10.001249999999999) == 153.5233139032815
## Block 3 - title
pdf.ln(h = 0)
pdf.set_xy(posicao_titulo, ybefore)
pdf.set_text_color(0,75,126)
pdf.set_font('Arial', 'U', 15)
pdf.cell(30,5,'Segments with Distinctly Different Outcomes:',align='L')
## Block 3 - image
pdf.ln(h = 5)
pdf.image(resources+"age_IV_based_BP.png",x=150,y=50,w=pdf.w/2.5, h=pdf.h/2.5)
# Block 4 - Title
pdf.ln(h = 89)
pdf.set_x(posicao_titulo)
pdf.set_font('Arial', 'U', 15)
pdf.cell(30,5,'Inference:',align='L')
## Block 4 - content
txt_data = pd.read_csv(resources+'age_inference.txt', header=None,sep='\t')
pdf.ln(h = 5)
pdf.set_x(2.5 + posicao_titulo)
pdf.set_text_color(0,0,0)
pdf.set_font('Arial', '', 12)
# # # # # pdf.cell(0,10,txt_data.loc[2,0],0,0,align='L')
''' END '''
pdf.output(filename, 'F')
if __name__ == '__main__':
resources = "../resources/"
day = date.today()
month = day.strftime('%m-%d-%Y')
create_analytics_report(day)
# WIDTH = 190.5
# HEIGHT = 338.67 |
994,185 | 1b1f14fdefb660de046f5680133a3ee0b2398b0c | def encontra_maximo (matriz):
lista0 = matriz[0]
lista1 = matriz[1]
lista2 = matriz[2]
maximo = lista0[0]
for e in lista0:
if e > maximo:
maximo = e
for e1 in lista1:
if e1 > maximo:
maximo = e1
for e2 in lista2:
if e2 > maximo:
maximo = e2
return maximo |
994,186 | d7c86b17a6dacdd8730aad0f8760cabdb5510b53 | import math
import sys
# hundred 3
# thousand 4
# million 7
# billion 10
def execute(arr):
if len(arr) < 4: return False
a, b, c, (d, e, f) = 0, 0, 0, arr[:3]
for i in range(3, len(arr)):
a, b, c, d, e, f = b, c, d, e, f, arr[i]
if e < c - a and f >= d: return True
if c - a <= e <= c and f >= (d if d - b < 0 else d - b): return True
return False
Input = []
for line in sys.stdin:
if line.strip() == '':
break
Input.append(line)
li = Input[0].split(",")
arr = []
for i in range(0,len(li)):
arr.append(int(li[i]))
print(execute(arr))
|
994,187 | 3fdac8bfb9d6dec44aea7d11983874540b231eec | # -*- coding: utf-8 -*-
token = '499870364:AAEsRg6v6kAi9fdUiP-efrt7VakemN4CScI'
|
994,188 | 6609c9094a9ef87a68e7524ced7507353ba80e34 | import pretty_errors
import pandas as pd
import os
import numpy as np
import openpyxl as op
import datetime
# 部门列表
DEPARTS = ["销-2部", "销-3部", "销-5部", "销-6部", "销-8部", "销-9部", "市场部", "国际部", "资-香槟组", "资-Bgo无底薪", "资-Bgo有底薪"]
MAIN_DEPARTS = ["销-2部", "销-3部", "销-5部", "销-6部", "销-8部", "销-9部", "市场部", "国际部", "资源部"]
# 桌面路径
DIR_DESKTOP = os.path.join(os.path.expanduser("~"), 'Desktop').replace("\\", "/")+"/"
# ? 输出文件
week_report = pd.ExcelWriter(DIR_DESKTOP + "/稽核/隆回/周报/周报.xlsx")
main = pd.read_excel(DIR_DESKTOP + "/稽核/隆回/仓库/业绩汇总表.xlsx", sheet_name='汇总')
detail = pd.read_excel(DIR_DESKTOP + "/稽核/隆回/落单明细(检查用).xlsx", sheet_name='Sheet1')
# 周报
weeknum = main['周数'].max()
# 部门 周基本数据:台数 台类型 营业额 业绩 任务 完成率 赠送 部门个人数据
# * 周部门数据对比
week_depart = pd.pivot_table(main.query('周数 in [@weeknum, @weeknum-1] & 部门 in @DEPARTS'), index='部门', columns='周数', values=['房台','实际业绩','营业总收入'], aggfunc={'房台':'count', '实际业绩':np.sum,'营业总收入':np.sum})
# ! 周个人数据对比
week_person = pd.pivot_table(main.query('周数 in [@weeknum, @weeknum-1] & 主部门 in @MAIN_DEPARTS'), index=['主部门','订台人'], columns='周数', values=['房台','实际业绩','营业总收入'], aggfunc={'房台':'count', '实际业绩':np.sum,'营业总收入':np.sum}).reset_index()
# // 赠送数据
donate = pd.pivot_table(detail.query('主部门 in @MAIN_DEPARTS & 类型 =="经理赠送" & (落单人部门 in @MAIN_DEPARTS | 落单人 in ["王秀军2","卢涛","李文"])'), index='主部门', values='金额', aggfunc={'金额':np.sum})
# ? 周完成率
week_rate = pd.pivot_table(main.query('周数==@weeknum & 主部门 in @MAIN_DEPARTS'), index='主部门', values=['周业绩任务', '实际业绩', '周完成率'], aggfunc={'周业绩任务':np.mean, '实际业绩':np.sum, '周完成率':np.sum})
# ? 月完成率
month_rate = pd.pivot_table(main.query('主部门 in @MAIN_DEPARTS'), index='主部门', values=['月业绩任务', '实际业绩', '月完成率'], aggfunc={'月业绩任务':np.mean, '实际业绩':np.sum, '月完成率':np.sum})
# ? 每日营业额
day_data = pd.pivot_table(main, index= ['日期'],values=['实际业绩','主营业务收入', '营业外收入','营业总收入'], aggfunc={'实际业绩':np.sum, '主营业务收入':np.sum, '营业外收入':np.sum,'营业总收入':np.sum}).reset_index()[['日期','实际业绩','主营业务收入', '营业外收入','营业总收入']]
week_depart.to_excel(week_report, sheet_name='周部门数据对比')
week_person.to_excel(week_report, sheet_name='周个人数据对比')
donate.to_excel(week_report, sheet_name='部门赠送数据')
week_rate.to_excel(week_report, sheet_name='周完成率')
month_rate.to_excel(week_report, sheet_name='月完成率')
day_data.to_excel(week_report, sheet_name='每日营业额', index=False)
week_report.save()
week_report.close()
# 部门 月基本数据:台数 台类型 营业额 业绩
# 门店 : 售出酒数量 消耗酒数量 营收 业绩 开台 收支差(资源, 礼宾, 楼面) |
994,189 | 49326e1294c8f303db50c33b173131e4381b27b1 | #simple linear regression to model the relationship between methylation ratio and mutation frequency in various cancers
#goal is to compare the models derived from coding regions vs. all other regions
#normal WGBS data from various tissues is used in conjunction with mutation data of cancers originating in same tissue
import pybedtools
methylation_files = ['liver', 'esophagus', 'breast', 'lung', 'ovary', 'pancreas', 'thymus']
mutation_files = ['LIHC-US', 'ESAD-UK', 'LUAD-US', 'OV', 'PAEN-AU', ]
cds_files = [] #normal WGBS intersected with cds
# cds = pybedtools.BedTool(cds.bed)
for meth, mut, cds_meth in zip(methylation_files, mutation_files, cds_files):
a = pybedtools.BedTool(meth)
b = pybedtools.BedTool(cds_meth)
a.intersect(mut).saveas('' + 'methylation_mutation.bed')
b.intersect(mut).saveas('' + 'methylation_mutation_cds.bed')
|
994,190 | 10527cd9b836dc5e563d3c35fdf7c686773ec47c | import sys
print (__name__, 'path=', sys.path)
import sub.spam # <== Works if move modules to pkg below main file
|
994,191 | 4f3a9f61e4b64862ddbf93dd340b60ada16df306 | from django.contrib import admin
from django.urls import path
# from .import views
from .views import product_list, product_detail, Product_list1, post_list2, displaydata, createpost
urlpatterns = [
path('product/', product_list),
path('product/<int:pk>', product_detail),
path('classproduct/', Product_list1.as_view()),
path('parser/', post_list2),
path('a/', displaydata),
path('', createpost),
]
|
994,192 | 07e7c65fc5550a274827f5079681f6935c85cf96 | #kata link: https://www.codewars.com/kata/52efefcbcdf57161d4000091
#Instruction : The main idea is to count all the occurring characters in a string. If you have a string like aba, then the result should be {'a': 2, 'b': 1}.
#What if the string is empty? Then the result should be empty object literal, {}.
#Code:
def count(string):
list_keys = []
list_values = []
for letter in string:
if list_keys.count(letter) < 1:
list_keys.append(letter)
for letter in list_keys:
list_values.append(string.count(letter))
return dict(zip(list_keys, list_values)) |
994,193 | 630875f504a2334f135946524d237060b0894565 | from django.contrib.auth import get_user_model
from django.urls import reverse
from django.test import TestCase
from rest_framework import status
from rest_framework.test import APIClient
from core.models import Category, Article
# from recipe.serializers import ArticleSerializer
ARTICLES_URL = reverse('recipe:article-list')
ARTICLES_URL_ME = '{}{}'.format(ARTICLES_URL, '?me=true')
def detail_url(id=1):
return reverse('recipe:article-detail', args=[id])
class PublicArticleApiTest(TestCase):
def setUp(self):
self.user = get_user_model().objects.create_user(
'test1@hotmail.com',
'123qwe'
)
self.client = APIClient()
# self.client.force_authenticate(self.user)
def test_list_not_login_requried(self):
res = self.client.get(ARTICLES_URL)
self.assertEqual(res.status_code, status.HTTP_200_OK)
def test_404_article_login_not_required(self):
url = detail_url()
res = self.client.get(url)
self.assertEqual(res.status_code, status.HTTP_404_NOT_FOUND)
def test_dont_create_article(self):
# self.client.force_authenticate(self.user)
category = Category.objects.create(
user=self.user, name="veritabani mysql", short_name='mysql'
)
content = {
'categories': [category.id],
'title': 'How to update for dictionary',
'title_h1': 'Upade for dictionary on Python Programming Language',
'description': 'Bla bla bla',
'content': '............... bla bla ... bla ........',
'user': [self.user.id]
}
res = self.client.post(ARTICLES_URL, content)
self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)
def test_get_article(self):
category = Category.objects.create(
user=self.user, name="veritabani mysql", short_name='mysql'
)
article = Article.objects.create(
title='How to update for dictionary',
title_h1='Upade for dictionary on Python Programming Language',
description='Bla bla bla',
content='............... bla bla ... bla ........',
is_active=True
)
article.categories.add(category)
url = detail_url(article.id)
res = self.client.get(url)
self.assertEquals(res.status_code, status.HTTP_200_OK)
class PrivateArticleApiTest(TestCase):
def setUp(self):
self.user = get_user_model().objects.create_user(
'test12@hotmail.com',
'123qwe'
)
self.client = APIClient()
self.client.force_authenticate(self.user)
def test_create_article(self):
self.is_staff = True
category = Category.objects.create(
user=self.user, name="veritabani mysql", short_name='mysql'
)
content = {
'categories': [category.id],
'title': 'Deneme',
'title_h1': 'Upade for dictionary on Python Programming Language',
'description': 'Bla bla bla',
'content': '............... bla bla ... bla ........',
'user': [self.user.id]
}
res = self.client.post(ARTICLES_URL, content)
self.assertEquals(res.status_code, status.HTTP_201_CREATED)
def test_update_article(self):
self.is_staff = True
category = Category.objects.create(
user=self.user, name="veritabani mysql", short_name='mysql'
)
article = Article.objects.create(
title='How to update for dictionary',
title_h1='Upade for dictionary on Python Programming Language',
description='Bla bla bla',
content='............... bla bla ... bla ........',
is_active=True,
user=self.user
)
content = {
'id': article.id,
'categories': [category.id],
'title': 'Deneme',
'title_h1': 'Upade for dictionary on Python Programming Language',
'description': 'Bla bla bla',
'content': '............... bla bla ... bla ........',
'user': [self.user.id]
}
url = detail_url(article.id)
res = self.client.put(url, content)
self.assertEquals(res.status_code, status.HTTP_200_OK)
self.assertEquals(res.data['title'], content['title'])
def test_dont_delete_article(self):
self.is_staff = True
category = Category.objects.create(
user=self.user, name="veritabani mysql", short_name='mysql'
)
article = Article.objects.create(
title='How to update for dictionary',
title_h1='Upade for dictionary on Python Programming Language',
description='Bla bla bla',
content='............... bla bla ... bla ........',
is_active=True,
user=self.user
)
article.categories.add(category)
url = detail_url(article.id)
res = self.client.delete(url)
self.assertEqual(res.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_filter_me_get(self):
self.is_staff = True
different_user = get_user_model().objects.create_user(
'differentuser@hotmail.com',
'123qwe'
)
category = Category.objects.create(
user=self.user, name="veritabani mysql", short_name='mysql'
)
article = Article.objects.create(
title='How to update for dictionary',
title_h1='Upade for dictionary on Python Programming Language',
description='Bla bla bla',
content='............... bla bla ... bla ........',
is_active=True,
user=self.user
)
article.categories.add(category)
article = Article.objects.create(
title='Python for and wihle syntax',
title_h1='Bla bla',
description='Bla bla bla',
content='............... bla bla ... bla ........',
is_active=True,
user=different_user
)
article.categories.add(category)
article = Article.objects.create(
title='Mongoos and Nodejs',
title_h1='Bla bla bla',
description='Bla bla bla',
content='............... bla bla ... bla ........',
is_active=True,
user=self.user
)
article.categories.add(category)
res = self.client.get(ARTICLES_URL_ME)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data), 2)
|
994,194 | ba4f4ed873afa4038c26d92f1760e3a9bedab053 | from .forcast import PostForcastInteractor
from .prize import GetPrizeInteractor
from .rdb import CreateTableInteractor, DropTableInteractor, ITableInteractor
from .user import CreateUserInteractor, GetUserInteractor
__all__ = [
"CreateTableInteractor",
"DropTableInteractor",
"ITableInteractor",
"PostForcastInteractor",
"GetPrizeInteractor",
"CreateUserInteractor",
"GetUserInteractor",
]
|
994,195 | c2090d8c90b91d87ace17a0ffdeda8640929908b | # ------------------------------------------------------
# profile.py
# ------------------------------------------------------
# handles the profile functionality for each dog walker or dog owner
# ------------------------------------------------------
# Last updated - 2019-01-12
# ------------------------------------------------------
# Created by: Tal Eylon, Avihoo Menahem, Amihai Kalev
# ------------------------------------------------------
# import the required libraries & code
import db_handler
import logging
###########################
## This global function receives: email, owner_type
## The function will return all relevant data for
## the main profile page of the dog owner.
###########################
def owner_main_page(email, owner_type):
data = {}
try:
#### DATABASE CONNECTION ####
db_connection = db_handler.DbHandler()
db_connection.connectDb()
#### DEFINE CURSOR OBJECT ####
cursor = db_connection.getCursor()
#### STEP 1: FIND DOG OWNER'S NAME ####
sql = """ SELECT p_name FROM Person WHERE email = %s """
cursor.execute(sql, (email,))
data['dog_owner_name'] = cursor.fetchone()[0] ## Dog Owner's Name
#### STEP 2: FIND DOG OWNER'S TOTAL NUMBER OF DOGS ####
sql = """ SELECT COUNT(Dog_ID) FROM Has_Dogs WHERE email = %s """
cursor.execute(sql, (email,))
data['total_number_of_dogs'] = cursor.fetchone()[0] ## Total number of dogs that belong to the dog owner
#### STEP 3: GET DOG OWNER'S ACCOUNT TYPE ####
if owner_type == 1:
data['dog_owner_type'] = "Regular"
else:
data['dog_owner_type'] = "Premium"
#### STEP 4: GET DOG OWNER'S LIST OF DOGS ####
sql = """ SELECT Dog_Name, Dog_Gender, Dog_Age, s.Dog_Type
FROM Has_Dogs as hd JOIN Dogs as d ON hd.Dog_ID = d.Dog_ID
JOIN Species as s ON d.Type_ID = s.Type_ID
WHERE hd.email = %s
GROUP BY Dog_Name, Dog_Gender, Dog_Age, s.Dog_Type """
row_count = cursor.execute(sql, (email,))
data['dogs'] = cursor.fetchall()
data['num_of_dogs'] = row_count #catch the number of rows received in query --> number of dogs that the owner has
except Exception as e: #in case of an error, catch it and print it to the log
logging.info("Error: "+str(e))
return False
finally: #### IMPORTANT ####
if db_connection.status(): # Make sure to disconnect from DB no matter what
db_connection.disconnectDb() ##DISCONNECT##
return data
###########################
## This global function receives: email
## The function will return all relevant data for
## the main profile page of the dog walker.
###########################
def walker_main_page(email):
data = {}
try:
#### DATABASE CONNECTION ####
db_connection = db_handler.DbHandler()
db_connection.connectDb()
#### DEFINE CURSOR OBJECT ####
cursor = db_connection.getCursor()
#### STEP 1: FIND DOG WALKER'S NAME ####
sql = """ SELECT p_name FROM Person WHERE email = %s """
cursor.execute(sql, (email,))
data['dog_walker_name'] = cursor.fetchone()[0] ## Dog Walker's Name
#### STEP 2: FIND DOG OWNER'S TOTAL WEEK REVENUE ####
sql = """ SELECT SUM(dw.day_price)
FROM Taking_Dogs as td LEFT JOIN Dog_Walker as dw ON dw.email = td.email
WHERE td.email = %s """
cursor.execute(sql, (email,))
data['total_week_revenue'] = cursor.fetchone()[0] ## Total number of dogs that belong to the dog owner
#### STEP 3: GET THE SPECIES THE DOGWALKER IS WILLING TO TAKE ####
sql = """ SELECT Dog_Type
FROM Takes_Only as tk JOIN Species as s ON tk.type_id = s.type_id
WHERE tk.email = %s """
cursor.execute(sql, (email,))
data['takes_only'] = cursor.fetchall()
#### STEP 4: GET THE DAYS THE DOGWALKER IS WILLING TO WORK IN ####
sql = """ SELECT Day_Of_Work FROM Specific_Working_Days
WHERE email = %s """
cursor.execute(sql, (email,))
data['specific_working_days'] = cursor.fetchall()
logging.info("Specific Working Days: "+str(data))
#### STEP 5: GET DOG WALKER'S CUSTOMERS LIST DISTINCTLY, ORDERED BY THE PERSON'S NAME ####
sql = """ SELECT re1.Dog_Owner_Name, re1.Dog_Owner_Email, re1.Dog_Owner_Phone_Number, re1.Dog_Owner_City
FROM Person as p RIGHT JOIN (SELECT td.Email as Email_Walker, p.P_Name as Dog_Owner_Name, dow.Email as Dog_Owner_Email,
dow.Registration_Date as Dog_Owner_Registration_Date, p.Phone_number as Dog_Owner_Phone_Number,
p.City as Dog_Owner_City, d.Dog_Name as DogName, td.Day_Of_Work as DayOfWork,
dw.Day_Price as Revenue
FROM Taking_Dogs as td
JOIN Has_Dogs as hd on hd.Dog_ID=td.Dog_ID
JOIN Dog_Owner as dow on dow.Email=hd.Email
JOIN Person as p on dow.Email=p.Email
JOIN Dogs as d ON hd.Dog_ID = d.Dog_ID
JOIN Dog_Walker as dw ON td.email = dw.email) AS re1 ON p.Email=re1.Email_walker
WHERE p.email=%s
GROUP BY re1.Dog_Owner_Name, re1.Dog_Owner_Email, re1.Dog_Owner_Phone_Number, re1.Dog_Owner_City
ORDER BY re1.Dog_Owner_Name"""
row_count = cursor.execute(sql, (email,)) #catch the number of rows received in query --> number of customers
data['customers'] = cursor.fetchall()
data['customers_num'] = row_count
#### ADD THE LIST OF DAYS ####
data['days'] = days
except Exception as e: #in case of an error, catch it and print it to the log
logging.info("Error: "+str(e))
return False
finally: #### IMPORTANT ####
if db_connection.status(): # Make sure to disconnect from DB no matter what
db_connection.disconnectDb() ##DISCONNECT##
return data
###########################
## This global function receives: email
## The function will return all relevant data for
## the schedule presentation of a dog walker.
###########################
def walker_schedule(email):
data = {}
try:
#### DATABASE CONNECTION ####
db_connection = db_handler.DbHandler()
db_connection.connectDb()
#### DEFINE CURSOR OBJECT ####
cursor = db_connection.getCursor()
### STEP 1: GET DOG WALKER'S SCHEDULE ####
# receive the list of dog owners that the dog walker scheduled to take for each day of the week.
data = {'day_count': []}
total_dogs_to_take = 0
work_schedule = {}
for each_day in range(1,8): #from sunday to saturday (excluding num 8)
sql = """ SELECT re1.dog_name, Person.City, Person.p_name, Person.phone_number, Price
FROM (SELECT Taking_Dogs.Email AS Dog_walker_email, Dogs.Dog_ID,Dogs.Dog_Name, Species.Dog_Type, Taking_Dogs.Day_of_Work, dw.Day_Price as Price
FROM Taking_Dogs JOIN Dogs ON Taking_Dogs.Dog_ID=Dogs.dog_ID
JOIN Species ON Dogs.type_id=Species.type_id
JOIN Dog_Walker as dw ON Taking_Dogs.email = dw.email) as re1
JOIN Has_Dogs ON re1.dog_id=Has_Dogs.Dog_ID
JOIN Person ON Has_Dogs.Email= Person.Email
WHERE re1.dog_walker_email=%s AND re1.day_of_work=%s """
row_count = cursor.execute(sql, (email,each_day,))
list_of_dogs_to_take = cursor.fetchall()
work_schedule[each_day] = list_of_dogs_to_take
total_dogs_to_take += int(row_count)
data['day_count'].append(row_count)
#### STEP 2: FIND DOG OWNER'S TOTAL WEEK REVENUE ####
sql = """ SELECT SUM(dw.day_price)
FROM Taking_Dogs as td LEFT JOIN Dog_Walker as dw ON dw.email = td.email
WHERE td.email = %s """
cursor.execute(sql, (email,))
data['total_revenue'] = cursor.fetchone()[0]
#### STEP 3: FIND MAX DOGS PER DAY ####
sql = """ SELECT Max_DogsPerDay FROM Dog_Walker WHERE email = %s """
cursor.execute(sql, (email,))
data['max_dogsperday'] = cursor.fetchone()[0]
data['work_schedule'] = work_schedule # assign work schedule to the data dict
data['days'] = days # assign the list of days to the data dict
data['total_dogs_to_take'] = total_dogs_to_take
except Exception as e: #in case of an error, catch it and print it to the log
logging.info("Error: "+str(e))
return False
finally: #### IMPORTANT ####
if db_connection.status(): # Make sure to disconnect from DB no matter what
db_connection.disconnectDb() ##DISCONNECT##
return data
###########################
## This global function returns the species from the database.
###########################
def show_species():
try:
#### DATABASE CONNECTION ####
db_connection = db_handler.DbHandler()
db_connection.connectDb()
#### DEFINE CURSOR OBJECT ####
cursor = db_connection.getCursor()
#### GET LIST OF SPECIES ####
sql = """ SELECT * FROM Species """
cursor.execute(sql)
data = cursor.fetchall()
logging.info(str(data))
except Exception as e: #in case of an error, catch it and print it to the log
logging.info("Error: "+str(e))
return False
finally: #### IMPORTANT ####
if db_connection.status(): # Make sure to disconnect from DB no matter what
db_connection.disconnectDb() ##DISCONNECT##
return data
###########################
## This class handles the new dog addition.
## Initialized by: a list of Dog Name, Dog Gender, Dog Age and Type ID
## along with the dog owner's email.
###########################
class NewDog:
def __init__(self, lst, email):
self.dog_name = lst[0]
self.dog_gender = lst[1]
self.dog_age = lst[2]
self.type_id = lst[3]
self.email = email
#### this function handles the addition process.
def add(self):
try:
#### DATABASE CONNECTION ####
db_connection = db_handler.DbHandler()
db_connection.connectDb()
#### DEFINE CURSOR OBJECT ####
cursor = db_connection.getCursor()
#### SQL QUERY AND EXECUTE: ####
logging.info("Dog Info: "+self.dog_name+","+self.dog_gender+","+self.dog_age+","+self.type_id) # send to the log the new dog details
### STEP 1: first insert: dogs table ###
sql = """ INSERT INTO Dogs(Dog_Name, Dog_Gender, Dog_Age, Type_ID) VALUES (%s,%s,%s,%s) """
cursor.execute(sql,(self.dog_name,self.dog_gender,self.dog_age,self.type_id,))
#### COMMIT ####
db_connection.commit() ## ADD THE DOG
### STEP 2: RETRIEVE THE NEW DOG ID ###
sql = """ SELECT Max(Dog_ID) FROM Dogs """
cursor.execute(sql)
dog_id = cursor.fetchone()
logging.info("new dog_id: "+str(dog_id))
### STEP 3: second insert: Has Dogs table ###
sql = """ INSERT INTO Has_Dogs VALUES (%s,%s) """
cursor.execute(sql,(self.email,dog_id,))
## COMMIT ##
db_connection.commit()
except Exception as e: #in case of an error, catch it and print it to the log
logging.info("Error: "+str(e))
return False #return False in case of an error
finally:
if db_connection.status(): # Make sure to disconnect from DB no matter what
db_connection.disconnectDb() ##DISCONNECT##
return True #return True if registration was successfull
###########################
## This global function receives: email
## The function will return all relevant data for
## the presentation of all unregistered dogs to a dog walker
## that the dog owner have.
###########################
def getUnregisteredDogs(email):
try:
#### DATABASE CONNECTION ####
db_connection = db_handler.DbHandler()
db_connection.connectDb()
#### DEFINE CURSOR OBJECT ####
cursor = db_connection.getCursor()
#### GET LIST OF ALL UNREGISTERED DOGS ####
sql = """ SELECT Dogs.Dog_Name, hd.Dog_ID, Dogs.Type_ID
FROM Has_Dogs as hd LEFT JOIN Dogs ON hd.Dog_ID = Dogs.Dog_ID
LEFT JOIN Taking_Dogs as td ON Dogs.Dog_ID = td.Dog_ID
WHERE hd.email = %s AND NOT EXISTS (SELECT Dog_ID
FROM Taking_Dogs as td WHERE td.Dog_ID = hd.Dog_ID)"""
cursor.execute(sql, (email,))
data = cursor.fetchall()
logging.info("Unregistered Dogs: "+str(data))
except Exception as e: #in case of an error, catch it and print it to the log
logging.info("Error: "+str(e))
return False
finally: #### IMPORTANT ####
if db_connection.status(): # Make sure to disconnect from DB no matter what
db_connection.disconnectDb() ##DISCONNECT##
return data
def getCities(): #Retrieve currently available dog walkers' cities from the database
try:
#### DATABASE CONNECTION ####
db_connection = db_handler.DbHandler()
db_connection.connectDb()
#### DEFINE CURSOR OBJECT ####
cursor = db_connection.getCursor()
#### GET LIST OF ALL DOGWALKERS ####
sql = """ SELECT DISTINCT city FROM Person """
cursor.execute(sql)
data = cursor.fetchall() #return without column's name
except Exception as e: #in case of an error, catch it and print it to the log
logging.info("Error: "+str(e))
return False
finally: #### IMPORTANT ####
if db_connection.status(): # Make sure to disconnect from DB no matter what
db_connection.disconnectDb() ##DISCONNECT##
return data
###########################
## This global function receives: email
## The function will return all relevant data for
## the presentation of all assigned dog walkers
## that the dog owner have.
###########################
def getRelevantDogWalkers(email): ## ASSIGNED DOGWALKERS FOR DOG OWNER
try:
#### DATABASE CONNECTION ####
db_connection = db_handler.DbHandler()
db_connection.connectDb()
#### DEFINE CURSOR OBJECT ####
cursor = db_connection.getCursor()
#### STEP 1: GET LIST OF ALL ASSGINED DOGWALKERS ####
sql = """ SELECT d.dog_name, p.p_name, p.phone_number, td.day_of_work, dw.day_price
FROM Has_Dogs as hd JOIN Taking_Dogs as td ON hd.dog_id = td.dog_id
JOIN Dog_Walker as dw ON dw.email = td.email
JOIN Person as p ON dw.email = p.email
JOIN Dogs as d ON d.dog_id = hd.dog_id
WHERE hd.email = %s
ORDER BY td.day_of_work """
row_count = cursor.execute(sql, (email,))
data = cursor.fetchall()
#### STEP 2: GET THE TOTAL COST THAT THE DOG OWNER NEEDS TO PAY ####
sql = """ SELECT Sum(dw.Day_Price)
FROM Has_Dogs as hd JOIN Taking_Dogs as td ON hd.dog_id = td.dog_id
JOIN Dog_Walker as dw ON dw.email = td.email
JOIN Person as p ON dw.email = p.email
JOIN Dogs as d ON d.dog_id = hd.dog_id
WHERE hd.email = %s """
cursor.execute(sql, (email,))
total_cost = cursor.fetchone()[0]
logging.info("Assigned Dog Walkers: "+str(data)+"\n Total Cost: "+str(total_cost)+"Num of Dog walkers:"+str(row_count))
except Exception as e: #in case of an error, catch it and print it to the log
logging.info("Error: "+str(e))
return False
finally: #### IMPORTANT ####
if db_connection.status(): # Make sure to disconnect from DB no matter what
db_connection.disconnectDb() ##DISCONNECT##
return [data,total_cost,row_count]
### MAIN DAYS LIST ###
days = ["Sunday", "Monday", "Tuesday", "Wednseday", "Thursday", "Friday", "Saturday"] |
994,196 | bfea72d6c6057ee692df0e0f68d170f8e7d8c1d2 | import random
from IPython import embed
import time
seed = round(time.time())
random.seed(seed)
print("Seed is: "+str(seed))
with open('HUGEthumbdrive.img', 'rb') as f:
HUGEthumbdriveData = bytearray(f.read())
print(len(HUGEthumbdriveData))
numToSplit = 50000
chunksOfData = [HUGEthumbdriveData[i:i+numToSplit] for i in range(0, len(HUGEthumbdriveData), numToSplit)]
random.shuffle(chunksOfData)
rereadChunks = bytearray("")
for i in range(0,len(chunksOfData)):
if len(chunksOfData[i]) < numToSplit:
print("Problem detected with size on chunk "+str(i))
with open('chunk'+str(i)+'.dat', 'wb') as f:
f.write(str(chunksOfData[i]))
with open('chunk'+str(i)+'.dat', 'rb') as f:
rereadChunks += bytearray(f.read())
chunksOfDataNew = [rereadChunks[i:i+numToSplit] for i in range(0, len(rereadChunks), numToSplit)]
print(chunksOfDataNew == chunksOfData)
def decrypt(message, key):
for i in range(0,len(message)):
message[i] = str(message[i])
random.seed(key)
l = list(range(len(message)))
random.shuffle(l)
return "".join(message[i] for i, x in sorted(enumerate(l), key=lambda x: x[1]))
output = decrypt(chunksOfDataNew,seed)
with open('Reassembled.img', 'wb') as f:
out = ''.join(map(str, output))
f.write(out)
print(out == HUGEthumbdriveData)
|
994,197 | bd3bce261447055f2873445b966882d92479bc83 | import numpy as np
import tensorflow as tf
from tensorflow import keras
class QSP(keras.layers.Layer):
"""Parameterized quantum signal processing layer.
The `QSP` layer implements the quantum signal processing circuit with trainable QSP angles.
The input of the layer is/are theta(s) where x = cos(theta), and w(x) is X rotation in the QSP sequence.
The output is the real part of the upper left element in the resulting unitary that describes the whole sequence.
This is Re[P(x)] in the representation of the QSP unitary from Gilyen et al.
Input is of the form:
[[theta1], [theta2], ... ]
Output is of the form:
[[P(x1)], [P(x2)], ...]
The layer requires the desired polynomial degree of P(x)
"""
def __init__(self, poly_deg=0, measurement="z"):
"""
Params
------
poly_deg: The desired degree of the polynomial in the QSP sequence.
the layer will be parameterized with poly_deg + 1 trainable phi.
measurement :
measurement basis using the Wx model, {"x", "z"}
"""
super(QSP, self).__init__()
self.poly_deg = poly_deg
phi_init = tf.random_uniform_initializer(minval=0, maxval=np.pi)
self.phis = tf.Variable(
initial_value=phi_init(shape=(poly_deg + 1, 1), dtype=tf.float32),
trainable=True,
)
self.measurement = measurement
def call(self, th):
batch_dim = tf.gather(tf.shape(th), 0)
# tiled up X rotations (input W(x))
px = tf.constant([[0.0, 1], [1, 0]], dtype=tf.complex64)
px = tf.expand_dims(px, axis=0)
px = tf.repeat(px, [batch_dim], axis=0)
rot_x_arg = tf.complex(real=0.0, imag=th)
rot_x_arg = tf.expand_dims(rot_x_arg, axis=1)
rot_x_arg = tf.tile(rot_x_arg, [1, 2, 2])
wx = tf.linalg.expm(tf.multiply(px, rot_x_arg))
# tiled up Z rotations
pz = tf.constant([[1.0, 0], [0, -1]], dtype=tf.complex64)
pz = tf.expand_dims(pz, axis=0)
pz = tf.repeat(pz, [batch_dim], axis=0)
z_rotations = []
for k in range(self.poly_deg + 1):
phi = self.phis[k]
rot_z_arg = tf.complex(real=0.0, imag=phi)
rot_z_arg = tf.expand_dims(rot_z_arg, axis=0)
rot_z_arg = tf.expand_dims(rot_z_arg, axis=0)
rot_z_arg = tf.tile(rot_z_arg, [batch_dim, 2, 2])
rz = tf.linalg.expm(tf.multiply(pz, rot_z_arg))
z_rotations.append(rz)
u = z_rotations[0]
for rz in z_rotations[1:]:
u = tf.matmul(u, wx)
u = tf.matmul(u, rz)
# assume we are interested in the real part of p(x) and the real part of q(x) in
# the resulting qsp unitary
if self.measurement == "z":
return tf.math.real(u[:, 0, 0]), tf.math.imag(u[:, 0, 0])
elif self.measurement == "x":
return tf.math.real(u[:, 0, 0]), tf.math.imag(u[:, 0, 1])
else:
raise ValueError(
"Invalid measurement basis: {}".format(self.measurement))
def construct_qsp_model(poly_deg, measurement="z"):
"""Helper function that compiles a QSP model with mean squared error and adam optimizer.
Params
------
poly_deg : int
the desired degree of the polynomial in the QSP sequence.
measurement :
measurement basis using the Wx model, {"x", "z"}
Returns
-------
Keras model
a compiled keras model with trainable phis in a poly_deg QSP sequence.
"""
theta_input = tf.keras.Input(shape=(1,), dtype=tf.float32, name="theta")
qsp = QSP(poly_deg, measurement=measurement)
real_parts = qsp(theta_input)
model = tf.keras.Model(inputs=theta_input, outputs=real_parts)
optimizer = tf.keras.optimizers.Adam(learning_rate=0.1)
loss = tf.keras.losses.MeanSquaredError()
model.compile(optimizer=optimizer, loss=loss)
return model
|
994,198 | 03dfafb622fefd7d2410132ecbecc766706fc4d0 | #!/urs/bin/env python
from argparse import ArgumentParser
import logging
from nltk import sent_tokenize, word_tokenize, pos_tag
from nltk.stem import WordNetLemmatizer
import psycopg2
logger = logging.getLogger()
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.INFO)
# This part-of-speech map does not include articles and particles
pos_map = {
'CD': 'n',
'EX': 'v',
'JJ': 'a',
'JJR': 'a',
'JJS': 'a',
'MD': 'v',
'NN': 'n',
'NNS': 'n',
'NNP': 'n',
'NNPS': 'n',
'PRP': 'n',
'PRP$': 'a',
'RB': 'r',
'RBR': 'r',
'RBS': 'r',
'UH': 'n',
'VB': 'v',
'VBD': 'v',
'VBG': 'v',
'VBN': 'v',
'VBP': 'v',
'VBZ': 'v',
'WDT': 'a',
'WP': 'n',
'WP': 'a',
'WRB': 'r',
}
class Word():
_all = []
def __init__(self, name, quantity=0):
if name not in self._all:
self._all.append(name)
self.id = self._all.index(name)
self.name = name
self.quantity = quantity
self.wiss = BagOfWords() # Bag of words met in same sentences
def __lt__(self, value):
if self.quantity == value.quantity:
return self.id < value.id
return self.quantity > value.quantity
def __str__(self):
return '{:8s}: {}'.format(self.name, self.quantity)
class BagOfWords():
def __init__(self):
self._bow = {}
self.keys = self._bow.keys
self.values = self._bow.values
def add(self, word, quantity=1, wiss=None):
if word in self._bow:
self._bow[word].quantity += quantity
else:
self._bow[word] = Word(word, quantity)
if wiss:
self._bow[word].wiss.merge(wiss, exclude=word)
def merge(self, bag, exclude=None):
for word in bag.values():
if word.name != exclude:
self.add(word.name, word.quantity, word.wiss)
def sorted(self):
return sorted(self.values())
def in_same_sentence(self, username):
wiss = self._bow[username].wiss.keys()
bag = BagOfWords()
for word in self.values():
if word.name in wiss:
bag.add(word.name, word.quantity)
return bag
def read_users(db):
if not db:
return {line.strip(): hash(line) for line in open('user.example.txt').readlines()}
cur = psycopg2.connect(db).cursor()
cur.execute('SELECT name, id from user')
rows = cur.fetchall()
return dict(rows)
def main():
parser = ArgumentParser()
parser.add_argument('filename', help='path to textfile.txt (default: STDIN).', default='/dev/stdin')
parser.add_argument('--db', help='datebase connection params.', default=None)
args = parser.parse_args()
users = read_users(args.db)
text = open(args.filename).read()
sentences = sent_tokenize(text)
bag = BagOfWords()
lemmatizer = WordNetLemmatizer()
for sentence in sentences:
bag_s = BagOfWords()
words = word_tokenize(sentence)
for word, tag in pos_tag(words):
if word.isalnum() and tag in pos_map:
bag_s.add(lemmatizer.lemmatize(word, pos=pos_map[tag]))
for word in bag_s.values():
if word.name in users:
word.wiss.merge(bag_s, exclude=word.name)
bag.merge(bag_s)
owner = None
owner_words = []
bag_sorted = bag.sorted()
for word in bag_sorted:
if word.name in users:
owner = {word.name: users[word.name]}
owner_words = word.wiss.keys()
break
logger.info('File owner is {}'.format(owner))
logger.info('\nTop 10 popular words:')
for word in bag_sorted[:10]:
logger.info(word)
if not owner:
return
logger.info('\nTop 5 popular owner words:')
i = 0
for word in bag_sorted:
if word.name in owner_words:
i += 1
logger.info(word)
if i >= 5:
break
if __name__ == '__main__':
main()
|
994,199 | acf2f19b60551fb41b336692c118666604dfc749 | """
modified by mtc-20
"""
import RPi.GPIO as GPIO
import time
class AlphaBot(object):
def __init__(self,in1=12,in2=13,ena=6,in3=20,in4=21,enb=26,s1=27,s2=22):
self.IN1 = in1
self.IN2 = in2
self.IN3 = in3
self.IN4 = in4
self.ENA = ena
self.ENB = enb
self.S1 = 27
self.S2 = 22
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(self.IN1,GPIO.OUT)
GPIO.setup(self.IN2,GPIO.OUT)
GPIO.setup(self.IN3,GPIO.OUT)
GPIO.setup(self.IN4,GPIO.OUT)
GPIO.setup(self.ENA,GPIO.OUT)
GPIO.setup(self.ENB,GPIO.OUT)
GPIO.setup(self.S1,GPIO.OUT)
GPIO.setup(self.S2,GPIO.OUT)
self.stop()
self.PWMA = GPIO.PWM(self.ENA,500)
self.PWMB = GPIO.PWM(self.ENB,500)
self.PWMP = GPIO.PWM(self.S1,50)
self.PWMT = GPIO.PWM(self.S2,50)
self.PWMA.start(50)
self.PWMB.start(50)
## self.PWMP.start(10)
## self.PWMT.start(7)
## time.sleep(0.5)
self.servo_switch(True)
#self.PWMT.stop()
#self.PWMP.stop()
print('[Alpha_INFO]: Motors initialised')
def forward(self):
GPIO.output(self.IN1,GPIO.HIGH)
GPIO.output(self.IN2,GPIO.LOW)
GPIO.output(self.IN3,GPIO.LOW)
GPIO.output(self.IN4,GPIO.HIGH)
def stop(self):
GPIO.output(self.IN1,GPIO.LOW)
GPIO.output(self.IN2,GPIO.LOW)
GPIO.output(self.IN3,GPIO.LOW)
GPIO.output(self.IN4,GPIO.LOW)
def backward(self):
GPIO.output(self.IN1,GPIO.LOW)
GPIO.output(self.IN2,GPIO.HIGH)
GPIO.output(self.IN3,GPIO.HIGH)
GPIO.output(self.IN4,GPIO.LOW)
def left(self):
GPIO.output(self.IN1,GPIO.LOW)
GPIO.output(self.IN2,GPIO.LOW)
GPIO.output(self.IN3,GPIO.LOW)
GPIO.output(self.IN4,GPIO.HIGH)
def right(self):
GPIO.output(self.IN1,GPIO.HIGH)
GPIO.output(self.IN2,GPIO.LOW)
GPIO.output(self.IN3,GPIO.LOW)
GPIO.output(self.IN4,GPIO.LOW)
def setPWMA(self,value):
self.PWMA.ChangeDutyCycle(value)
def setPWMB(self,value):
self.PWMB.ChangeDutyCycle(value)
def setPWMP(self, angle):
assert angle in range(0,181)
value = (12.5/180.0)*angle + 3.5
#print(value)
self.PWMP.ChangeDutyCycle(value)
#self.PWMP.start(value)
print('Set Pan to {} deg'.format(angle))
time.sleep(1)
#self.PWMP.stop()
def setPWMT(self, angle):
assert angle in range(0,181)
value = (7.5/180)*angle + 2.5
#print(value)
#self.PWMT.start(value)
self.PWMT.ChangeDutyCycle(value)
print('Set Tilt to {} deg'.format(angle))
time.sleep(1)
#self.PWMT.stop()
def servo_switch(self, status):
if status:
self.PWMP.start(10)
self.PWMT.start(7)
time.sleep(2)
else:
print('[Alpha_INFO]: Switching off servos')
self.PWMP.stop()
self.PWMT.stop()
def setMotor(self, left, right):
if((right >= 0) and (right <= 100)):
GPIO.output(self.IN1,GPIO.HIGH)
GPIO.output(self.IN2,GPIO.LOW)
self.PWMA.ChangeDutyCycle(right)
elif((right < 0) and (right >= -100)):
GPIO.output(self.IN1,GPIO.LOW)
GPIO.output(self.IN2,GPIO.HIGH)
self.PWMA.ChangeDutyCycle(0 - right)
if((left >= 0) and (left <= 100)):
GPIO.output(self.IN3,GPIO.HIGH)
GPIO.output(self.IN4,GPIO.LOW)
self.PWMB.ChangeDutyCycle(left)
elif((left < 0) and (left >= -100)):
GPIO.output(self.IN3,GPIO.LOW)
GPIO.output(self.IN4,GPIO.HIGH)
self.PWMB.ChangeDutyCycle(0 - left)
if __name__ == '__main__':
Ab = AlphaBot()
time.sleep(2)
Ab.stop()
Ab.setPWMP(170)
Ab.setPWMT(160)
## Ab.servo_switch(False)
## time.sleep(5)
## print('Switching servos back on')
## Ab.servo_switch(True)
time.sleep(2)
print('New pose')
Ab.setPWMP(10)
GPIO.cleanup()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.