index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
19,600 | 8970dd4214bd32efb2abd933a0d6a2fffc26e61e | # Generated by Django 3.2.3 on 2021-05-30 12:11
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('board', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Advisor',
fields=[
('person_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='board.person')),
('school', models.CharField(max_length=120)),
],
bases=('board.person',),
),
]
|
19,601 | b8712f98e7ac7dba1261ac06b026922b2b516cf0 | from __future__ import print_function, division, absolute_import, unicode_literals
from builtins import * # noqa
from gmusicapi import Mobileclient
import urllib.request
email = "TblZmusicbot"
password = "TblZmusicbot123"
def ask_for_credentials():
api = Mobileclient()
logged_in = False
attempts = 0
while not logged_in and attempts < 3:
logged_in = api.login(email, password, Mobileclient.FROM_MAC_ADDRESS)
attempts += 1
return api
def GMauth():
api = ask_for_credentials()
if not api.is_authenticated():
print("Sorry, those credentials weren't accepted.")
return
print('Successfully logged in.')
return api
def getsong(api, song_name, dir):
song = api.search(song_name, 1)['song_hits'][0]['track']
url = api.get_stream_url(song['storeId'], device_id=None, quality=u'hi')
urllib.request.urlretrieve(url, dir + song_name + '.mp3')
def logout(api):
api.logout()
print('Successfully logged out.')
|
19,602 | 1aca500ecfedf56c0c7edfc05b6ff9e0c4da86aa | #!/usr/bin/python3
import threading
import random
import time
deadlock = True
'''
Class philosopher which define each philosopher.
'''
class Philosopher(threading.Thread):
def __init__(self, name, LeftFork, RightFork):
threading.Thread.__init__(self)
self.name = name
self.LeftFork = LeftFork
self.RightFork = RightFork
def run(self):
while 1:
print(f'{self.name} is thinking about our destiny.')
time.sleep(random.uniform(3, 13))
print(f'{self.name} Want to eat dinner')
self.tryGetForks()
'''
Method describe how philosopher take forks to eat dinner
'''
def tryGetForks(self):
Lfork, Rfork = self.LeftFork, self.RightFork
while True:
Lfork.acquire(True)
locked = Rfork.acquire(False)
if locked:
break
if deadlock is False:
Lfork.release()
print(f'{self.name} changes destiny')
Lfork, Rfork = Rfork, Lfork
else:
print(f'{self.name} waiting for fork ')
self.eating()
Rfork.release()
Lfork.release()
'''
Method describe eat process
'''
def eating(self):
print(f'{self.name} starts eating')
time.sleep(random.uniform(1, 10))
print(f'{self.name} Mmmm, delicious dinner, so what I was thinking about?')
def Test():
forks = [threading.Lock() for n in range(5)]
names = ('Immanuel Kant', 'Carl Gustav Jung', 'Arystoteles', 'Schopenhauer', 'Benedykt XVI')
philosophers = [Philosopher(names[i], forks[i % 5], forks[(i + 1) % 5]) for i in range(5)]
random.seed(1234321)
for p in philosophers:
p.start()
if __name__ == "__main__":
Test() |
19,603 | 8877f86cd1f9b978a454774ff36a9d8c53d3a06a | # Linh Truong
# A01081792
# 03/06/2019
import doctest
from character import get_row, get_column
def pokemon_map():
"""Interpret a square map with user location and available spots.
>>> pokemon_map()
๐ฑ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ
๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ
๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ
๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ
๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ
๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ
๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ
๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ ๐ฒ
"""
map_positions = [["๐ฒ" for i in range(8)] for x in range(8)]
pokemon_row = get_row()
pokemon_column = get_column()
map_positions[pokemon_column][pokemon_row] = "๐ฑ"
for row in map_positions:
print(" ".join(row))
def main():
doctest.testmod()
if __name__ == '__main__':
main()
|
19,604 | 339a27aeccc4f19c6fcbcc37ff8653f6905a5116 | from django.contrib import admin
from apps.inscripcion.models import Alumno, Curso, Matricula
# Register your models here.
admin.site.register(Alumno)
admin.site.register(Curso)
admin.site.register(Matricula) |
19,605 | 4990bf97a29d89a9d88e18b756e642ed5e1dfc60 | from PIL import Image
import tensorflow as tf
def mse(result, true):
result = tf.image.rgb_to_grayscale(result) * 255.0
true = tf.image.rgb_to_grayscale(true) * 255.0
return tf.reduce_mean((result - true)**2)
psnr_score, ssim_score = [], []
for i in range(1,600):
mask = tf.expand_dims(tf_load_image('./output/masks/fg.png', 3), 0)
stylized = tf.expand_dims(tf_load_image("/scratch/dmenini/nst-for-us-imaging/img/lq_test/"+ str(i)+".png", 3), 0)
style = tf.expand_dims(tf_load_image("/scratch/dmenini/nst-for-us-imaging/img/hq_test/"+ str(i)+".png", 3), 0)
h, w = stylized.shape[1], stylized.shape[2]
#style = tf.image.resize(style, (h, w))
#mask = tf.image.resize(mask, (h, w))
#style = tf.image.crop_to_bounding_box(style, 0, 193, 1000, 1000)
#mask = tf.image.crop_to_bounding_box(mask, 0, 193, 1000, 1000)
stylized = tf.multiply(mask, stylized)
style = tf.multiply(mask, style)
mse_score = mse(stylized, style)
psnr_score.append(tf.image.psnr(stylized, style, max_val=1.0).numpy()[0])
ssim_score.append(tf.image.ssim(stylized, style, max_val=1.0).numpy()[0])
psnr = np.mean(np.array(psnr_score))
ssim = np.mean(np.array(ssim_score))
print("MSE = {:.7f} \tPSNR = {:.7f} \tSSIM = {:.7f}".format(mse_score, psnr, ssim)) |
19,606 | aeb2d1a1bdc401e4fb0f4bf33ce6dd3e8dcc01c7 | Using the custom if/elif/else logic you have already created for
lab #18, it is your job to improve the code in the following ways:
- Copy your code into a new file - custom_func.py
- Create a csv file with at least 3 rows of test data in it
*CSV data can be only one column wide*
- In your custom_func.py script, read the information in from the csv file
- Place your existing if/elif/else code in a function
- Run your function against each row of information in your csv file
Rocket-Scientists Only:
- Place your logic inside of a try/except block and account for ValueErrors
- Add another csv file with more data. Combine the data from both files.
- Instead of printing items to the screen, sort them, and print them out in groups
|
19,607 | ae85de5188db76c48d304d48cff45500c96e81a4 | #!/usr/bin/env python
# -*- encoding: UTF-8 -*-
# Requisiรงรตes de JSON para atualizar estatisticas do Push-server
import simplejson
import urllib2
class PushStream(object):
_instance = None
def __new__(cls, *args, **kwargs):
if not cls._instance:
# TODO: Passar parametros e fazer a conexรฃo logo na primeira instancia
cls._instance = super(PushStream, cls).__new__(cls, *args, **kwargs)
return cls._instance
loaded = False
class CACHE_TYPES():
NONE = 0
PER_VIEW = 1
PER_TIME = 2
class config():
# Connection settings
schema = "http"
hostname = "127.0.0.1"
port = 80
publisher = "pub/"
subscriber = "sub/"
stats = "channels-stats/"
connected = False
cache_type = CACHE_TYPES.PER_VIEW
class data():
hostname = None
time = None
channels = 0
broadcast_channels = 0
published_messages = 0
subscribers = 0
stored_messages = 0
workers = []
channels_list = {}
class worker():
pid = 0
subscribers = 0
class channel(object):
name = ''
published_messages = 0
stored_messages = 0
subscribers = 0
def __str__(self):
return self.name
def connect(self, **kwargs):
"""
Conecta com o XML referente ao pushstream server,
deve povoar as propriedades da classe `data`
Os kwargs de configuraรงรฃo sรฃo referentes a classe `config`
Se o XML nรฃo for encontrado, ou retornar algum tipo de erro,
o urllib2 vai disparar um exception, deve ser tratado por try-except
"""
for key in kwargs:
# Sรณ recnonecta se mudar alguma configuraรงรฃo
if getattr(self.config, key) != kwargs[key]:
self.loaded = False
setattr(self.config, key, kwargs[key])
self._load()
def server_url(self):
return "%s://%s%s/" % (
self.config.schema,
self.config.hostname,
((":%d" % (self.config.port)) if self.config.port else '')
)
def _load(self):
if self.loaded:
return self
# TODO: implementar cache
request = urllib2.Request("%s%s?id=ALL" %
(self.server_url(), self.config.stats))
opener = urllib2.build_opener()
file_stream = opener.open(request)
json = simplejson.load(file_stream)
self.data.hostname = json.get("hostname")
self.data.time = json.get("time")
self.data.channels = int(json.get("channels"))
self.data.broadcast_channels = int(json.get("broadcast_channels"))
self.data.channels_list = {}
for channel in json.get("infos"):
channel_obj = self.channel()
channel_obj.name = channel.get("channel")
channel_obj.published_messages = int(channel.get("published_messages"))
channel_obj.stored_messages = int(channel.get("stored_messages"))
channel_obj.subscribers = int(channel.get("subscribers"))
self.data.channels_list[str(channel_obj)] = channel_obj
# Dados totais
self.data.published_messages += channel_obj.published_messages
self.data.stored_messages += channel_obj.stored_messages
self.data.subscribers += channel_obj.subscribers
self.loaded = True
return self
|
19,608 | 7ddda9fed0965e7a8e73c1c4a00dc6030114a471 | #!/usr/bin/python
import sys
import urllib
import json
data = {
pred : sys.argv[1],
obj : sys.argv[2]
}
response = urllib.urlopen("http://www.rdfclip.com:8000/api/query",data=json.dumps(data))
print response
|
19,609 | 2aefba1b7353c82c2c9bc7b4c4051bd5e7c3515e | ##################################################################################################################
# EmailGeneration.py
##################################################################################################################
# Author: Christina Kim
# Date: 19 June 2017
# Description: Trigger an email notification with error log files as an attachment
##################################################################################################################
import smtplib
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from email.mime.application import MIMEApplication
import os, os.path
import shutil
def triggerEmails(errorFileList, errorLogPath, subj, sender, recipientList, content, server, port, senderUserName, senderPw):
recipientsString = ','.join(recipientList)
#errorFileList = os.listdir(errorLogPath)
msg = MIMEMultipart()
msg['Subject'] = subj
msg['From'] = sender
msg['To'] = recipientsString
msg.attach(MIMEText(content))
#Add erro log files as an attachment
for errorFile in errorFileList or []:
with open(os.path.join(errorLogPath, errorFile), "rb") as fil:
part = MIMEApplication(
fil.read(),
)
part['Content-Disposition'] = 'attachment; filename="%s"' % errorFile
msg.attach(part)
mail = smtplib.SMTP(server, port)
mail.ehlo()
mail.starttls()
mail.login(senderUserName, senderPw)
mail.sendmail(sender, recipientList, msg.as_string())
mail.quit()
return errorLogPath
# Move the errorLog files to the Emailed folder after an email notification is sent out
def moveFiles(errorFileList, errorLogPath):
for errorFile in errorFileList:
if (os.path.isfile(os.path.join(errorLogPath, errorFile))):
shutil.move(errorLogPath + "\\" + errorFile, errorLogPath + "\\Emailed\\" + errorFile)
# This is the main funtion to call the other functions in this module
def generateEmailNotifications(errorLogPath, subj, sender, recipients, message, server, port, senderUserName, senderPw):
fileList = os.listdir(errorLogPath)
errorFileList = []
#numberOfFiles = len([name for name in fileList if os.path.isfile(os.path.join(errorLogPath, name))])
#Create a list with the error log files
for name in fileList:
if os.path.isfile(os.path.join(errorLogPath, name)):
errorFileList.append(name)
numberOfFiles = len(errorFileList)
#Trigger an email only if there is any error log file generated.
if (numberOfFiles > 0):
errorLogPath = triggerEmails(errorFileList, errorLogPath, subj, sender, recipients, message, server, port, senderUserName, senderPw)
moveFiles(errorFileList, errorLogPath)
|
19,610 | 81e710bd2dfa0018fd3a05cb3ae8cabd054d727b | import os
import csv
import argparse
from pathlib import Path
import numpy as np
from keras.models import load_model
from dataset_generator_segment import BreakfastActionTrainDataGenerator, BreakfastActionTestDataGenerator
from utils import read_dict
DIR_PATH = ''
PARTITION_PATH = os.path.join(DIR_PATH, 'data/segment_partition.csv')
parser = argparse.ArgumentParser()
parser.add_argument("file_path", type=Path)
p = parser.parse_args()
# Load model
if (p.file_path.exists()):
model = load_model(p.file_path.as_posix())
model.summary()
else:
exit("The given file path does not exist: ", p.file_path)
# Data generator for test
input_dim = 400
partition = read_dict(PARTITION_PATH)
test_generator = BreakfastActionTestDataGenerator(partition['testing'],
batch_size=1,
input_dim=input_dim)
# Predict using model (returns probabilities)
print("Getting predictions...")
predictions = model.predict_generator(test_generator,
use_multiprocessing=True,
workers=4,
verbose=2)
# Save raw predictions
model_name = p.file_path.as_posix().split("runs/", 1)[1] # model name will have the .hdf5 extension
timestr = time.strftime("%Y%m%d_%H%M%S")
print("Writing predictions...")
prediction_file_path = os.path.join(DIR_PATH, 'results/predictions_' + model_name + timestr + '.npy')
np.save(prediction_file_path, predictions)
print("predictions saved at ", prediction_file_path)
# Get final predictions (labels)
prediction_labels = np.argmax(predictions, axis=2)
# Create file according to submission format
print("Writing prediction labels...")
SUBMISSION_PATH = os.path.join(DIR_PATH, 'results/predictions_' + model_name + timestr + '.csv')
with open(SUBMISSION_PATH, 'w', newline='') as submission_file:
writer = csv.writer(submission_file)
writer.writerow(["Id", "Category"])
for (i, label) in enumerate(prediction_labels):
writer.writerow([i, label[0]])
submission_file.close()
print("Saved predictions to: ", SUBMISSION_PATH) |
19,611 | acdbc7d236216ba37665142621dc469dcef71235 | print("Start")
num = input("Enter a Number: ") # str
num = int(num) # int
if num < 10: # num > 10
print("Given Number is smaller than 10")
print("Bye")
|
19,612 | 17242ebcbaf89030cef3a6af46eb9e81ce38e583 | a = int(input("Please enter an input: "))
def prime(a):
if(a == 1 or a == 2):
print("It is a prime number.")
for x in range(2, a):
if (a % x == 0):
print("It is not a prime number.")
print("It is a prime number.")
prime(a) |
19,613 | 412284925ac177f6e97fc68dfab4415eba57f3c0 | # Generate a test suite, using python-dateutil as the reference Easter
# computing implementation.
from dateutil.easter import (EASTER_JULIAN, EASTER_ORTHODOX, EASTER_WESTERN,
easter)
elm_month_names = [
"Jan",
"Feb",
"Mar",
"Apr",
"May",
"Jun",
"Jul",
"Aug",
"Sep",
"Oct",
"Nov",
"Dec",
]
def elm_date(d):
return "{ year = %d, month = %s, day = %d }" % (
d.year,
elm_month_names[d.month - 1],
d.day,
)
# Test against each Easter computing method
methods = [
(EASTER_JULIAN, "julian"),
(EASTER_ORTHODOX, "orthodox"),
(EASTER_WESTERN, "western"),
]
# Test against some arbitrary year ranges
year_ranges = [(1200, 1300), (1700, 1850), (1920, 2100)]
print(
"""\
module Tests exposing (..)
import Expect exposing (Expectation)
import Fuzz exposing (Fuzzer, list, int, string)
import String
import Test exposing (..)
import Time exposing (Month(..))
import Easter exposing (Date, EasterMethod(..))
all : Test
all =
describe "Easter tests"
[ suite "Julian" Julian julianEasters
, suite "Orthodox" Orthodox orthodoxEasters
, suite "Western" Western westernEasters
]
suite : String -> EasterMethod -> List Date -> Test
suite methodName method easters =
describe (methodName ++ " easters") <|
List.map (genTest method) easters
genTest : EasterMethod -> Date -> Test
genTest method date =
test (String.fromInt date.year) <|
\\() -> (Expect.equal (Easter.easter method date.year) date)
"""
)
for method, method_name in methods:
first = True
print("%sEasters =" % method_name)
print(" [ ", end="")
for start_year, end_year in year_ranges:
for year in range(start_year, end_year + 1):
date = easter(year, method)
if not first:
print("\n , ", end="")
print(elm_date(date), end="")
first = False
print(" ]")
print("")
print("")
|
19,614 | e5757cedd5b37f2df0d830fdedc8906941b60452 | #!/usr/bin/python
import os
count = 0
while count < 10:
os.system('./attenuation run.mac' )
os.system('mv particles.txt particle_' + str(count) + '.txt')
os.system('mv event_info.txt event_' + str(count) + '.txt')
count = count + 1
os.system('mv particle_*.txt results/particle')
os.system('mv event_*.txt results/event')
|
19,615 | 5857250feb5759587c333b30833dde6ff9b990a0 | import cv2
import numpy as np
# read data
datapath = "/Users/EleanorLeung/Documents/thesis"
a = cv2.imread(datapath+"/notebooks/6_ideal.png")
b = cv2.imread(datapath+"/week4/x451_0.png")
imgray_a = cv2.cvtColor(a,cv2.COLOR_BGR2GRAY)
ret_a,thresh_a = cv2.threshold(imgray_a,127,255,0)
imgray_b = cv2.cvtColor(b,cv2.COLOR_BGR2GRAY)
ret_b,thresh_b = cv2.threshold(imgray_b,127,255,0)
# find contours
_, ca, _ = cv2.findContours(thresh_a, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_SIMPLE)
_, cb, _ = cv2.findContours(thresh_b, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_SIMPLE)
print(np.shape(ca[0]), np.shape(cb[0]))
# generate distance --> Hausdorff OR ShapeContext
hd = cv2.createHausdorffDistanceExtractor()
sd = cv2.createShapeContextDistanceExtractor()
d1 = hd.computeDistance(ca[0],cb[0])
d2 = sd.computeDistance(ca[0],cb[0])
print(d1, " ", d2) |
19,616 | 3a0bec84f94583cacfc6fdd8af053452ab8ff5a2 | # Stubs for django.utils.termcolors (Python 3.6)
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
from typing import Any
color_names: Any
foreground: Any
background: Any
RESET: str
opt_dict: Any
def colorize(text: str = ..., opts: Any = ..., **kwargs): ...
def make_style(opts: Any = ..., **kwargs): ...
NOCOLOR_PALETTE: str
DARK_PALETTE: str
LIGHT_PALETTE: str
PALETTES: Any
DEFAULT_PALETTE = DARK_PALETTE
def parse_color_setting(config_string): ...
|
19,617 | ee4ab725229008c606d4af664eb7ef396bf8d4f9 | import csv
import sys
import collections
import matplotlib.pyplot as plt
import math
def analyze_emotion(csv_file_path, emotion):
emotion_levels = get_emotion_levels(csv_file_path, emotion)
total_emotion_value = 0
max_emotion_value = sys.float_info.min
for key, value in emotion_levels.items():
total_emotion_value += value
if value > max_emotion_value:
max_emotion_value = value
average_emotion_value = float(total_emotion_value/len(emotion_levels))
print('average emotion value: ', average_emotion_value)
print('max emotion value: ', max_emotion_value)
dist_from_mean_sum = 0
for key, value in emotion_levels.items():
dist_from_mean_sum += (value - average_emotion_value)**2
std_deviation = math.sqrt(dist_from_mean_sum/(len(emotion_levels)-1))
print('standard deviation: ', std_deviation)
areas_of_interest = set()
for key, value in emotion_levels.items():
if value - (3 * std_deviation) > 0:
areas_of_interest.add(int(int(key)/1000))
areas_of_interest = sorted(areas_of_interest)
for x in areas_of_interest:
print(str(int(x / 60)) + ':' + str(x % 60))
def convert_delimited_txt_to_csv(txt_file_path, csv_file_path):
in_txt = csv.reader(open(txt_file_path, "rb"), delimiter='\t')
out_csv = csv.writer(open(csv_file_path, 'wb'))
out_csv.writerows(in_txt)
def get_emotion_levels(file_path, emotion):
with open(file_path) as csvfile:
data = csv.DictReader(csvfile)
values = {}
vals = []
for row in data:
if int(row['NoOfFaces']) == 1 and \
float(row[emotion + ' Evidence']) > 0:
values[row['Timestamp']] = float(row[emotion + ' Evidence'])
# vals.append(float(row[emotion + ' Evidence']))
return values
if __name__ == '__main__':
analyze_emotion(
'/Users/timkanake/Desktop/imotionsdata.csv', 'Confusion')
|
19,618 | 34f59d5b16eb5013eb86de7bc2d9ad52eb175b6f | import RPi.GPIO as GPIO # import RPi.GPIO module
import time
last_edge = time.perf_counter()
read_freq = 0
def handle(pin):
global last_edge
global read_freq
now = time.perf_counter()
read_freq = 1/(now-last_edge)
last_edge = now
GPIO.setmode(GPIO.BCM) # choose BCM or BOARD
#GPIO.setup(port_or_pin, GPIO.IN) # set a port/pin as an input
#GPIO.setup(22, GPIO.OUT) # set a port/pin as an output
GPIO.setup(4, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
#GPIO.output(22, 1)
GPIO.add_event_detect(4, GPIO.RISING, handle)
try:
while True: # this will carry on until you hit CTRL+C
# if GPIO.input(4): # if port 25 == 1
# print("Port 4 is 1/HIGH/True - LED ON")
# else:
# print("Port 4 is 0/LOW/False - LED OFF")
print('Frequency {}'.format(read_freq))
time.sleep(0.1) # wait 0.1 seconds
finally: # this block will run no matter how the try block exits
GPIO.cleanup() # clean up after yourself
|
19,619 | 2a322e82fd19276cb8fd233217ec626de8b2b112 | import random
import main
|
19,620 | 5a1b51d507c9081546483cbde5383cef16ab80c4 | #Deep Models : ์ ๋ณด์ ์ผ๋ฐํ -> ์ฐธ์๋ ๋ ์ ์๋ค. -> ๋น๋๊ธฐ๋ ๋ ์ ์๋ค -> ๋ ๊ฐ๋ฅผ ๊ฐ์ง ๋๋ฌผ์ ๋ ์ ์๋ค.
#Wide Models : ์ ๋ณด์ ์๊ธฐ
#Wide & Deep Learning : ์ถ์ฒ ์์คํ
, ๊ฒ์ ๋ฐ ์์ ๋ฌธ์ ๊ฐ์ ๋ง์ ์์ ๋ฒ์ฃผํ ํน์ง์ด ์๋ ๋ฐ์ดํฐ๋ฅผ ์ฌ์ฉํ ๋ ์ฌ์ฉ๋๋ค.
#๋ณต์กํ ํจํด๊ณผ ๊ฐ๋จํ ๊ท์น ๋ชจ๋ ํ์ตํ ์ ์๋ค.
#๋จ wide Deep Learning์ keras ํจ์ํ api์ ์ด์ฉํด์ ๋ง๋ค์ด์ผ ๋๋ค.
from sklearn.datasets import fetch_california_housing
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import callbacks
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.layers import (Input, Dense, Concatenate)
Rawdict = fetch_california_housing()
#DataFrame์ผ๋ก Rawdict์ ๋ฐ์ดํฐ๋ฅผ ๊ฐ์ ธ์จ๋ค.
Cal_DF = pd.DataFrame(Rawdict.data, columns = Rawdict.feature_names)
"""
#๋ฐ์ดํฐ์
์ ๊ฐ์ ธ์ค๋ฉด ๊ผญ ๋ฐ์ดํฐ ํ์ธ๊ณผ ๋ฐ์ดํฐ ํ์
์ ํ์ธํด์ผ๋๋ค. ๊ทธ๋ฆฌ๊ณ ๊ฒฐ์ธก๊ฐ์ ํ์ธํด์ผ๋๋ค.
#๋ง์ฝ ๊ฒฐ์ธก๊ฐ์ด ์๊ธด๋ค๋ฉด ํด๋น ๋ถ๋ถ์ ์ญ์ ํด์ผ๋๋ค.
print(Cal_DF)
print(Cal_DF.dtypes)
print(Cal_DF.isnull().sum()
"""
#train_test_split(array, test_size, shuffle) = sklearn์ผ๋ก ๋ถ๋ฌ์จ Rawdict ๋ฐ์ดํฐ๋ฅผ ์ด์ฉํด (dataset, label) ๋ ๊ฐ์ ๋ฐ์ดํฐ๋ฅผ ๋์์ ๋ฃ๊ณ ๋๋๋ค.
X_train_all, X_test, y_train_all, y_test = train_test_split(Rawdict.data, Rawdict.target, test_size = 0.3)
X_train, X_valid, y_train, y_valid = train_test_split(X_train_all, y_train_all, test_size = 0.2)
"""
#๋ฐ์ดํฐ ๋ชจ์
print("Train shape", X_train.shape)
print("Validation", X_valid.shape)
print("Test set", X_test.shape)
"""
scaler = StandardScaler()
#StandardScaler() : ํ์ค ์ ๊ท๋ถํฌ, MinMaxScaler(): ์ต์,์ต๋ ์ค์ผ์ผ ๋ณํ
#MaxAbsScaler() : ์ต๋ ์ ๋๊ฐ 1๋ก ๋ณํ (์ด์์น ์ํฅ์ด ํผ), RobustScaler() : StandardScaler๋ณด๋ค ํ์คํ ํ ๋์ผํ ๊ฐ์ ๋๊ฒ ๋ถํฌ
X_train = scaler.fit_transform(X_train)
#fit_transform : ๋ฐ์ดํฐ์
ํ์ค ์ ๊ท๋ถํฌํ. ๋จ dataset์ ํ๊ท ๊ณผ ํ์คํธ์ฐจ๋ฅผ ๊ธฐ์ค์ผ๋ก ์ ์ฅํ๊ฒ ๋จ.
X_valid = scaler.transform(X_valid)
#transform() : ๋ฐ์ดํฐ์
์ ํ์ค ์ ๊ท๋ถํฌํ. ํ๊ท ๊ณผ ํ์คํธ์ฐจ๋ fit ๋ dataset์ ๋ฐ๋ฅธ๋ค.
X_test = scaler.transform(X_test)
#Model ์์ฑ
inputData = Input(shape=X_train.shape[1:])
hidden1 = Dense(30, activation="relu")(inputData)
hidden2 = Dense(30, activation="relu")(hidden1)
concat = Concatenate()([inputData, hidden2])
output = Dense(1)(concat)
model = keras.Model([inputData], outputs=[output])
#Concatenate()([array1],[array2]) : axis์ ์ด๋ป๊ฒ ์ก๋๋์๋ฐ๋ผ ์ถ๋ ฅ๋๋ array์ ๋ชจ์์ ๋ค๋ฅด๊ฒ ๋ง๋ค ์ ์๋ค.
#๋ชจ๋ธ ์ปดํ์ผ
model.compile(optimizer=Adam(learning_rate=0.005),
loss = "msle",
metrics=["accuracy"])
#ํ์ต
early_stop = keras.callbacks.EarlyStopping(monitor='val_loss', patience=30, restore_best_weights=True)
history = model.fit(X_train, y_train,
epochs=100,
batch_size=32,
validation_data =(X_valid, y_valid),
callbacks=[early_stop])
print(model.evaluate(X_test, y_test))
|
19,621 | 080aa57455268806c7d031876eeb36b1c37c574f | #! /usr/bin/env python
"""
Radial distribution code
length - Angstroms
mass - AMU
volume - Angstroms^3
"""
# Dr. Travis Kemper
# NREL
# Initial Date 6/30/2014
# travis.kemper@nrel.gov
import numpy as np
import datetime, os
import binascii
import struct
import sys
import array
from itertools import izip
import json, math , sys
#MDanalysis
try:
from MDAnalysis import *
from MDAnalysis.core.distances import * ##distance_array
#import MDAnalysis.core.units # for bulk water density
except:
import sys
print "MDAnalysis module not build/configured correctly"
sys.exit(0)
# Streamm toolkit modules
from structureContainer import StructureContainer
from buildingblocks import Buildingblock
from particles import Particle, ParticleContainer
from bonds import Bond, BondContainer
from angles import Angle, AngleContainer
from dihedrals import Dihedral, DihedralContainer
from impropers import Improper, ImproperContainer
from parameters import ParameterContainer
from parameters import ljtype, LJtypesContainer
from parameters import bondtype, BondtypesContainer
from parameters import angletype,AngletypesContainer
from parameters import dihtype, DihtypesContainer
from parameters import imptype, ImptypesContainer
from periodictable import periodictable
import units
import mpiBase
def get_options():
"""
Set options
"""
import os, os.path
from optparse import OptionParser
usage = "usage: %prog [options] \n"
parser = OptionParser(usage=usage)
parser.add_option("-v","--verbose", dest="verbose", default=False,action="store_true", help="Verbose output ")
parser.add_option("-o","--output_id", dest="output_id", default="rdf",type="string",help=" prefix for output files ")
#
# Input files
#
parser.add_option("--in_cply", dest="in_cply", type="string", default="", help="Input cply file")
parser.add_option("--in_gro", dest="in_gro", type="string", default="", help="Input gromacs structure file (.gro) ")
parser.add_option("--in_data", dest="in_data", type="string", default="", help="Input lammps structure file (.data) ")
parser.add_option("--in_dcd", dest="in_dcd", type="string", default="", help="Input trajectory file in compressed dcd format ")
parser.add_option("--in_xtc", dest="in_xtc", type="string", default="", help="Input xtc file with atoms listed as atom type numbers")
#
# Groups
#
parser.add_option("--group_chains", dest="group_chains", default=True,action="store_true", help="Group by molecules ")
parser.add_option("--group_residues", dest="group_residues", default=False,action="store_true", help="Group by molecules ")
# parser.add_option("--group_ptma", dest="group_ptma", default=False,action="store_true", help="Group TEMPO molecules ")
parser.add_option("--add_dr",dest="add_dr",type=float, default=0 , help="Add length to the || r_i - r_mol_cms || value ")
#
# Frames
#
parser.add_option("--frame_o", dest="frame_o", type=int, default=0, help=" Initial frame to read")
parser.add_option("--frame_f", dest="frame_f", type=int, default=10000, help=" Final frame to read")
parser.add_option("--frame_step", dest="frame_step", type=int, default=1, help=" Read every nth frame ")
parser.add_option("--readall_f", dest="readall_f", default=False,action="store_true", help=" Read to end of trajectory file (negates frame_f value)")
#
#
# Filters
#
parser.add_option("--symbol", dest="symbol", type="string", default="", help=" select atoms of group by (atomic) symbol ")
parser.add_option("--label", dest="label", type="string", default="", help="select atoms of group by label ")
parser.add_option("--fftype", dest="fftype", type="string", default="", help="select atoms of group by force field type ")
parser.add_option("--chain", dest="chain", type="string", default="", help="select atoms of group by chain/molecule number ")
parser.add_option("--resname", dest="resname", type="string", default="", help="select atoms of group by residue name ")
parser.add_option("--residue", dest="residue", type="string", default="", help="select atoms of group by resudue number ")
parser.add_option("--ring", dest="ring", type="string", default="", help="select atoms of group by particlesn a ring ")
parser.add_option("--xyz_cmas", dest="xyz_cmas", default=False,action="store_true", help="print xyz file for each group with the center of mass")
parser.add_option("--grp_time", dest="grp_time", default=False,action="store_true", help="print time series data for each group")
parser.add_option("--void", dest="void", default=False,action="store_true", help="calculate voids ")
parser.add_option("--dl", dest="dl", default=False,action="store_true", help="Calculate the end to end length ")
parser.add_option("--void_grid_n", dest="void_grid_n", type=int, default=10, help=" Cut off for void grid ")
parser.add_option("--add_cov_radii", dest="add_cov_radii", default=False,action="store_true", help="Include grip points within cov_radii of each particle ")
parser.add_option("--add_vdw_radii", dest="add_vdw_radii", default=False,action="store_true", help="Include grip points within vdw_radii of each particle ")
parser.add_option("--add_fix_radii", dest="add_fix_radii", default=False,action="store_true", help="Include grip points within a fixed radius of each particle ")
parser.add_option("--fix_radii", dest="fix_radii", type=float, default=10, help=" Fixed radius to add to each particle with add_fix_radii on ")
parser.add_option("--add_gl", dest="add_gl", default=False,action="store_true", help="Add a box length to the extended particle size to force overlap between to types for surface analysis ")
(options, args) = parser.parse_args()
return options, args
def addtagDic(dic_i,tag,tag_str,setint=False):
"""
Take a string from input split it into values and add it to a dictionary list
"""
if( len( tag_str ) ):
dic_i[tag] = []
for id_s in tag_str.split():
if( setint ):
dic_i[tag].append(int(id_s))
else:
dic_i[tag].append(id_s)
return dic_i
def create_search(search_dic,f_symb,f_label,f_fftype,f_residue,f_resname,f_chain,f_ring):
"""
Create a dictionary to pass to particle search
"""
search_dic = addtagDic(search_dic,"symbol",f_symb)
search_dic = addtagDic(search_dic,"label",f_label)
search_dic = addtagDic(search_dic,"fftype",f_fftype)
search_dic = addtagDic(search_dic,"residue",f_residue,setint=True)
search_dic = addtagDic(search_dic,"resname",f_resname)
search_dic = addtagDic(search_dic,"chain",f_chain,setint=True)
search_dic = addtagDic(search_dic,"ring",f_ring,setint=True)
return search_dic
def read_lmpdata( strucC , parmC , data_file, coordupdate=False):
"""
Read Lammps data file
Arguments:
strucC (StructureContainer)
parmC (ParameterContainer)
data_file (str) data file
ReturnS:
strucC (StructureContainer)
parmC (ParameterContainer)
"""
debug = False
verbose = True
set_chain_numbers = True
if( not set_chain_numbers ):
print " Warning not reading in chain numbers!!! "
# Load periodic table
pt = periodictable()
F = open(data_file , 'r' )
lines = F.readlines()
F.close()
#
# Read in data header with number of parameters
#
for line in lines:
col = line.split()
if ( len(col) >=2 ):
# Read in number of each topolgical component
if( col[1] == "atoms" ):
n_atoms = int( col[0] )
elif( col[1] == "bonds" ):
n_bonds = int( col[0] )
elif( col[1] == "angles" ):
n_angles = int( col[0] )
elif( col[1] == "dihedrals" ):
n_dihedrals = int( col[0] )
elif( col[1] == "impropers" ):
n_impropers = int( col[0] )
if ( len(col) >= 3 ):
# Read in number of each parameter type
if( col[1] == "atom" and col[2] == "types" ):
n_atypes = int( col[0] )
elif( col[1] == "bond" and col[2] == "types" ):
n_btypes = int( col[0] )
elif( col[1] == "angle" and col[2] == "types" ):
n_angtypes = int( col[0] )
elif( col[1] == "dihedral" and col[2] == "types" ):
n_dtypes = int( col[0] )
elif( col[1] == "improper" and col[2] == "types" ):
n_imptypes = int( col[0] )
# Read in box size
if ( len(col) >= 4 ):
if( col[2] == "xlo" and col[3] == "xhi" ):
strucC.latvec[0][0] = float( col[1] ) - float( col[0] )
if( col[2] == "ylo" and col[3] == "yhi" ):
strucC.latvec[1][1] = float( col[1] ) - float( col[0] )
if( col[2] == "zlo" and col[3] == "zhi" ):
strucC.latvec[2][2] = float( col[1] ) - float( col[0] )
# Prind debug
if( verbose ):
print " atoms ",n_atoms
print " n_bonds ",n_bonds
print " n_angles ",n_angles
print " n_dihedrals ",n_dihedrals
print " n_impropers ",n_impropers
print ""
print "n_atom_types",n_atypes
print "n_bond_types",n_btypes
print "n_angle_types",n_angtypes
print "n_dihedral_types",n_dtypes
print "n_imp_dihedral_types",n_imptypes
# Check to see if a previous read has occured
pt_overwrite = False
if( len(strucC.ptclC) > 0 ):
pt_overwrite = True
# Check of conistent number of atoms
if( pt_overwrite ):
if( len(strucC.ptclC) != n_atoms):
print " %d atoms in passed structure "%(len(strucC.ptclC))
print " %d atoms in data file "%(n_atoms)
sys.exit(" Inconsistent number of atoms " )
else:
#
# Initialize particle container
#
for pid_i in range(n_atoms):
pt_i = Particle( )
strucC.ptclC.put(pt_i)
bonds_overwrite = False
if( len(strucC.bondC) > 0 ):
bonds_overwrite = True
if( len(strucC.bondC) != n_bonds):
print " %d bonds in passed structure "%(len(strucC.bondC))
print " %d bonds in data file "%(n_bonds)
sys.exit(" Inconsistent number of bonds " )
angles_overwrite = False
if( len(strucC.angleC) > 0 ):
angles_overwrite = True
if( len(strucC.angleC) != n_angles):
print " %d angles in passed structure "%(len(strucC.angleC))
print " %d angles in data file "%(n_angles)
sys.exit(" Inconsistent number of angles " )
dih_overwrite = False
if( len(strucC.dihC) > 0 ):
dih_overwrite = True
if( len(strucC.dihC) != n_dihedrals):
print " %d dihedrals in passed structure "%(len(strucC.dihC))
print " %d dihedrals in data file "%(n_dihedrals)
sys.exit(" Inconsistent number of dihedrals " )
imp_overwrite = False
if( len(strucC.impC) > 0 ):
imp_overwrite = True
if( len(strucC.impC) != n_impropers):
print " %d impropers in passed structure "%(len(strucC.impC))
print " %d impropers in data file "%(n_impropers)
sys.exit(" Inconsistent number of impropers " )
#
# Intialize
# - read in boolean to off
#
read_Masses = 0
read_Pair = 0
read_Bond_coeff = 0
read_Angle_coeff = 0
read_Dihedral_coeff = 0
read_Improper_coeff = 0
read_Atoms = 0
read_Bonds = 0
read_Angles = 0
read_Dihedrals = 0
read_Impropers = 0
# - lists as indecise can be out of order in data file
ATYPE_REF = n_atypes*[""]
ATYPE_MASS = np.zeros(n_atypes)
ATYPE_EP = np.zeros(n_atypes)
ATYPE_SIG = np.zeros(n_atypes)
BTYPE_REF = n_btypes*[2*[""]]
BONDTYPE_R0 = np.zeros(n_btypes)
BONDTYPE_K = np.zeros(n_btypes)
ANGTYPE_REF = n_angtypes*[3*[""]]
ANGLETYPE_R0 = np.zeros(n_angtypes)
ANGLETYPE_K = np.zeros(n_angtypes)
DTYPE_REF = n_dtypes*[4*[""]]
DIHTYPE_C = np.zeros((n_dtypes,4))
DIHTYPE_F = np.zeros(n_dtypes)
DIHTYPE_K = np.zeros(n_dtypes)
DIHTYPE_PN = np.zeros(n_dtypes)
DIHTYPE_PHASE = np.zeros(n_dtypes)
IMPTYPE_REF = n_imptypes*[4*[""]]
IMPTYPE_F = np.zeros(n_imptypes)
IMPTYPE_E0 = np.zeros(n_imptypes)
IMPTYPE_K = np.zeros(n_imptypes)
MOLNUMB = n_atoms*[0]
ATYPE_IND = n_atoms*[0]
CHARGES = np.zeros(n_atoms)
R = n_atoms*[np.zeros(3)]
ATYPE = n_atoms*[""]
BONDS = n_bonds*[[0,0]]
BTYPE_IND = n_bonds*[0]
ANGLES = n_angles*[[0,0,0]]
ANGTYPE_IND = n_angles*[0]
DIH = n_dihedrals*[[0,0,0,0]]
DTYPE_IND = n_dihedrals*[0]
#
# Check if values exist and need to be updated or don't and need to be created
#
ljtyp_update = False
ljtyp_cnt = 0
if( len(parmC.ljtypC) > 0 ):
print " LJ types will be updated "
ljtyp_update = True
btyp_update = False
btyp_cnt = 0
if( len(parmC.btypC) > 0 ):
print " Bond types will be updated "
btyp_update = True
atyp_update = False
atyp_cnt = 0
if( len(parmC.atypC) > 0 ):
print " Angle types will be updated "
atyp_update = True
dtyp_update = False
dtyp_cnt = 0
if( len(parmC.dtypC) > 0 ):
print " Dihedral types will be updated "
dtyp_update = True
imptyp_update = False
imptyp_cnt = 0
if( len(parmC.imptypC) > 0 ):
print " Improper dihedrals types will be updated "
imptyp_update = True
#
# Read in data parameters
#
for line in lines:
col = line.split()
if( read_Masses and len(col) >= 2 ):
cnt_Masses += 1
ind = int(col[0]) - 1
ATYPE_MASS[ind] = float(col[1])
if( len(col) >= 4 ):
ATYPE_REF[ind] = col[3]
ptype1 = col[3]
else:
ATYPE_REF[ind] = "??"
ptype1 = "??"
mass_i = float(col[1])
if( ljtyp_update ):
ljtyp_cnt = ind + 1
if( ljtyp_cnt > len(parmC.ljtypC) ):
print "Mass index %d larger then length of previously read ljtypC %d"%(ind,len(parmC.ljtypC))
ljtyp_i = parmC.ljtypC[ljtyp_cnt]
ljtyp_i.setmass(mass_i)
else:
ljtyp_i = ljtype(ptype1)
ljtyp_i.setmass(mass_i)
parmC.ljtypC.put(ljtyp_i)
# Turn of mass read
if(cnt_Masses == n_atypes ):
read_Masses = 0
if( read_Pair and len(col) >= 3 ):
cnt_Pair += 1
ind = int(col[0]) - 1
ATYPE_EP[ind] = float(col[1])
ATYPE_SIG[ind] = float(col[2])
epsilon = float(col[1])
sigma = float(col[2])
ljtyp_ind = int(col[0])
ljtyp_i = parmC.ljtypC[ljtyp_ind]
ljtyp_i.setparam(epsilon,sigma)
# Turn pair parameter read off
if(cnt_Pair >= n_atypes ):
read_Pair = 0
if( read_Bond_coeff and len(col) >= 3 ):
cnt_Bond_coeff += 1
#AT_i = int(col[0])
#AT_j = int(col[1])
b_ind = int( col[0]) - 1
if( b_ind > n_btypes ):
error_line = " Error in data file index of bond parameter exceeds number of bond parameters specified with bond types "
sys.exit(error_line)
BTYPE_REF[b_ind][0] = "??"
BTYPE_REF[b_ind][1] = "??"
BONDTYPE_K[b_ind] = float(col[1])
BONDTYPE_R0[b_ind] = float(col[2])
ptype1 = "??"
ptype2 = "??"
lmpindx = int( col[0])
kb = float(col[1])
r0 = float(col[2])
btype = "harmonic"
g_type = 1
if( btyp_update ):
btyp_cnt = b_ind + 1
btyp_i = parmC.btypC[btyp_cnt]
btyp_i.setharmonic(r0,kb)
btyp_i.set_g_indx(g_type)
btyp_i.set_lmpindx(lmpindx)
else:
btyp_i = bondtype(ptype1,ptype2,btype)
btyp_i.setharmonic(r0,kb)
btyp_i.set_g_indx(g_type)
btyp_i.set_lmpindx(lmpindx)
parmC.btypC.put(btyp_i)
if( cnt_Bond_coeff >= n_btypes ):
read_Bond_coeff = 0
if( read_Angle_coeff and len(col) >= 3 ):
cnt_Angle_coeff += 1
#AT_i = int(col[0])
#AT_j = int(col[1])
a_ind = int( col[0]) - 1
if( a_ind > n_angtypes ):
print sys.exit(" Error in data file index of angle parameter exceeds number of angle parameters specified with angle types ")
ANGTYPE_REF[a_ind][0] = "??"
ANGTYPE_REF[a_ind][1] = "??"
ANGTYPE_REF[a_ind][2] = "??"
ANGLETYPE_K[a_ind] = float(col[1])
ANGLETYPE_R0[a_ind] = float(col[2])
ptype1 = "??"
ptype2 = "??"
ptype3 = "??"
lmpindx = int( col[0])
theta0 = float( col[2] ) # degrees
kb = float( col[1] )
atype = "harmonic"
gfunc_type = 1
if( atyp_update ):
atyp_cnt = a_ind + 1
atyp_i = parmC.atypC[atyp_cnt]
atyp_i.set_g_indx(gfunc_type)
atyp_i.set_lmpindx(lmpindx)
atyp_i.setharmonic(theta0,kb)
else:
atyp_i = angletype(ptype1,ptype2,ptype3,atype)
atyp_i.set_g_indx(gfunc_type)
atyp_i.set_lmpindx(lmpindx)
atyp_i.setharmonic(theta0,kb)
parmC.atypC.put(atyp_i)
if( cnt_Angle_coeff >= n_angtypes ):
read_Angle_coeff = 0
if( read_Dihedral_coeff and len(col) >= 3 ):
cnt_Dihedral_coeff += 1
#AT_i = int(col[0])
#AT_j = int(col[1])
d_ind = int( col[0]) - 1
if( debug): print " reading dih type ",d_ind," cnt ",cnt_Dihedral_coeff," of ",n_dtypes
if( d_ind > n_dtypes ):
error_line = " Error in data file index of dihedral parameter %d exceeds number of dihedral parameters %d "%(d_ind , n_dtypes)
error_line += " specified with dihedral types "
print sys.exit(error_line)
DTYPE_REF[d_ind][0] = "??"
DTYPE_REF[d_ind][1] = "??"
DTYPE_REF[d_ind][2] = "??"
DTYPE_REF[d_ind][3] = "??"
# Assume OPLS dihedral type
DIHTYPE_F[d_ind] = 3
DIHTYPE_C[d_ind][0] = float(col[1])
DIHTYPE_C[d_ind][1] = float(col[2])
DIHTYPE_C[d_ind][2] = float(col[3])
DIHTYPE_C[d_ind][3] = float(col[4])
ptype1 = "??"
ptype2 = "??"
ptype3 = "??"
ptype4 = "??"
# Set parameters according to type
gfunc_type = 3
dtype = "opls"
lmpindx = int( col[0] )
k1 = float( col[1] )
k2 = float( col[2] )
k3 = float( col[3] )
k4 = float( col[4] )
if( dtyp_update ):
dtyp_cnt = d_ind + 1
dtyp_i = parmC.dtypC[dtyp_cnt]
dtyp_i.set_g_indx(gfunc_type)
dtyp_i.set_lmpindx(lmpindx)
dtyp_i.setopls(k1,k2,k3,k4)
else:
dtyp_i = dihtype(ptype1,ptype2,ptype3,ptype4,dtype)
dtyp_i.set_g_indx(gfunc_type)
dtyp_i.set_lmpindx(lmpindx)
dtyp_i.setopls(k1,k2,k3,k4)
parmC.dtypC.put(dtyp_i)
if( cnt_Dihedral_coeff >= n_dtypes ):
read_Dihedral_coeff = 0
if( read_Improper_coeff and len(col) >= 3 ):
cnt_Improper_coeff += 1
#AT_i = int(col[0])
#AT_j = int(col[1])
imp_ind = int( col[0]) - 1
if( debug): print " reading imp dih type ",imp_ind," cnt ",cnt_Improper_coeff," of ",n_imptypes
if( imp_ind > n_imptypes ):
error_line = " Error in data file index of improper parameter %d exceeds number of improper parameters %d "%(imp_ind , n_imptypes)
error_line += " specified with dihedral types "
print sys.exit(error_line)
IMPTYPE_REF[imp_ind][0] = "??"
IMPTYPE_REF[imp_ind][1] = "??"
IMPTYPE_REF[imp_ind][2] = "??"
IMPTYPE_REF[imp_ind][3] = "??"
# Assume OPLS dihedral type
IMPTYPE_F[imp_ind] = 2
KE = float(col[1])
Eo = float(col[2])
IMPTYPE_E0[imp_ind] = Eo
IMPTYPE_K[imp_ind] = KE
ptype1 = "??"
ptype2 = "??"
ptype3 = "??"
ptype4 = "??"
# Set parameters according to type
g_indx = 2
dtype = "improper"
lmpindx = int( col[0] )
if( imptyp_update ):
imptyp_cnt = imp_ind + 1
imptyp_i = parmC.imptypC[imptyp_cnt]
imptyp_i.set_g_indx(g_indx)
imptyp_i.setimp(Eo,KE)
imptyp_i.set_lmpindx(lmpindx)
else:
imptyp_i = imptype(ptype1,ptype2,ptype3,ptype4,dtype)
imptyp_i.set_g_indx(g_indx)
imptyp_i.setimp(Eo,KE)
imptyp_i.set_lmpindx(lmpindx)
parmC.imptypC.put(imptyp_i)
if( cnt_Improper_coeff >= n_imptypes ):
read_Improper_coeff = 0
if( read_Atoms and len(col) >= 7 ):
cnt_Atoms += 1
ind = int( col[0]) - 1
if( ind > n_atoms ):
print sys.exit(" Error in data file index of atoms exceeds number of atoms specified with atoms ")
chain_i = int(col[1])
lmptype_i = int(col[2]) #- 1
indx = int(col[2]) - 1
q_i = float(col[3])
m_i = ATYPE_MASS[indx]
fftype_i = ATYPE_REF[indx]
el = pt.getelementWithMass(m_i)
if( el.symbol == "VS" ):
el.symbol = ATYPE_l[atom_indx].strip()
fftype_i = "VS"
m_i = 0.0
# HACK !!
if( ATYPE_MASS[indx] == 9.0 ):
el.symbol = "LP"
fftype_i = "LP"
m_i = 0.0
r_i = [ float(col[4]),float(col[5]),float(col[6])]
type_i = str(lmptype_i)
pt_i = strucC.ptclC[ind+1]
pt_i.position = r_i
add_dict = pt_i.tagsDict
# Not in cply file
add_dict["lmptype"] = lmptype_i
if( not coordupdate ):
# Do not over write data from cply file
pt_i.charge = q_i
pt_i.mass = m_i
add_dict["chain"] = chain_i
add_dict["symbol"] = el.symbol
add_dict["number"] = el.number
add_dict["mass"] = el.mass
add_dict["cov_radii"] = el.cov_radii
add_dict["vdw_radii"] = el.vdw_radii
add_dict["fftype"] = fftype_i
#
add_dict["ffmass"] = ATYPE_MASS[indx]
add_dict["qgroup"] = chain_i
add_dict["residue"] = chain_i
add_dict["resname"] = "MOLR"
add_dict["label"] = add_dict["symbol"]
pt_i.setTagsDict(add_dict)
if( cnt_Atoms >= n_atoms ):
read_Atoms = 0
if(read_Bonds and len(col) >= 4 ):
cnt_Bonds += 1
ind = int( col[0]) - 1
if( ind > n_bonds ):
print sys.exit(" Error in data file index of bonds exceeds number of bonds specified with bonds ")
BTYPE_IND[ind] = int(col[1] ) - 1
BONDS[ind] = [int(col[2]) - 1 , int(col[3]) - 1 ]
i_o = int(col[2])
j_o = int(col[3])
if( bonds_overwrite ):
bondObj = strucC.bondC[cnt_Bonds]
bondObj.pgid1 = i_o
bondObj.pgid2 = j_o
bondObj.set_lmpindx(int(col[1] ))
else:
bondObj = Bond( i_o, j_o )
bondObj.set_lmpindx(int(col[1] ))
strucC.bondC.put(bondObj)
if( cnt_Bonds >= n_bonds ):
read_Bonds = 0
if(read_Angles and len(col) >= 5 ):
cnt_Angles += 1
ind = int( col[0]) - 1
ANGTYPE_IND[ind] = int(col[1] ) - 1
ANGLES[ind] = [int(col[2]) - 1, int(col[3]) - 1, int(col[4]) - 1 ]
k_o = int(col[2])
i_o = int(col[3])
j_o = int(col[4])
if( cnt_Angles >= n_angles ):
read_Angles = 0
if( angles_overwrite ):
angleObj = strucC.angleC[cnt_Angles]
angleObj.pgid1 = k_o
angleObj.pgid2 = i_o
angleObj.pgid3 = j_o
angleObj.set_lmpindx(int(col[1] ))
else:
angleObj = Angle( k_o,i_o, j_o )
angleObj.set_lmpindx(int(col[1] ))
strucC.angleC.put(angleObj)
if(read_Dihedrals and len(col) >= 6 ):
cnt_Dihedrals += 1
ind = int( col[0]) - 1
DTYPE_IND[ind] = int(col[1] ) - 1
DIH[ind] = [int(col[2]) - 1,int(col[3]) - 1, int(col[4]) - 1,int(col[5]) - 1]
k_o = int(col[2])
i_o = int(col[3])
j_o = int(col[4])
l_o = int(col[5])
if( dih_overwrite ):
dObj = strucC.dihC[cnt_Dihedrals]
dObj.pgid1 = k_o
dObj.pgid2 = i_o
dObj.pgid3 = j_o
dObj.pgid4 = l_o
dObj.set_lmpindx(int(col[1] ))
else:
dObj = Dihedral( k_o,i_o, j_o,l_o )
dObj.set_lmpindx(int(col[1] ))
strucC.dihC.put(dObj)
if( cnt_Dihedrals >= n_dihedrals ):
read_Dihedrals = 0
if(read_Impropers and len(col) >= 2 ):
cnt_Impropers += 1
ind = int( col[0]) - 1
k_o = int(col[2])
i_o = int(col[3])
j_o = int(col[4])
l_o = int(col[5])
if( imp_overwrite ):
impObj = strucC.impC[cnt_Impropers]
impObj.pgid1 = k_o
impObj.pgid2 = i_o
impObj.pgid3 = j_o
impObj.pgid4 = l_o
impObj.set_lmpindx(int(col[1] ))
impObj.set_type("improper")
else:
impObj = Improper( k_o,i_o, j_o,l_o )
impObj.set_lmpindx(int(col[1] ))
impObj.set_type("improper")
strucC.impC.put(impObj)
if( cnt_Impropers >= n_impropers ):
read_Impropers = 0
if ( len(col) >= 1 ):
if( col[0] == "Masses" ):
read_Masses = 1
cnt_Masses = 0
if( col[0] == "Atoms" ):
read_Atoms = 1
cnt_Atoms = 0
if( col[0] == "Bonds" ):
read_Bonds = 1
cnt_Bonds = 0
if( col[0] == "Angles" ):
read_Angles = 1
cnt_Angles = 0
if( col[0] == "Dihedrals" ):
read_Dihedrals = 1
cnt_Dihedrals = 0
if( col[0] == "Impropers" ):
read_Impropers = 1
cnt_Impropers = 0
if ( len(col) >= 2 ):
if( col[0] == "Pair" and col[1] == "Coeffs" ):
read_Pair = 1
cnt_Pair = 0
if( col[0] == "Bond" and col[1] == "Coeffs" ):
read_Bond_coeff = 1
cnt_Bond_coeff = 0
if( col[0] == "Angle" and col[1] == "Coeffs" ):
read_Angle_coeff = 1
cnt_Angle_coeff = 0
if( col[0] == "Dihedral" and col[1] == "Coeffs" ):
read_Dihedral_coeff = 1
cnt_Dihedral_coeff = 0
if( col[0] == "Improper" and col[1] == "Coeffs" ):
read_Improper_coeff = 1
cnt_Improper_coeff = 0
# cnt_Bonds += 1
# ind = int( col[0]) - 1
# BTYPE_IND[ind] = int(col[1] ) - 1
# BONDS[ind][0] = int(col[2])
# if( cnt_Bonds >= n_atoms ):
# read_Bonds = 0
#
#
if( ljtyp_update ):
if( ljtyp_cnt != len(parmC.ljtypC) ):
print " Number of LJ types read in %d does not match previously read %d "%(ljtyp_cnt,len(parmC.ljtypC))
if( debug):
for ind in range(len(ATYPE_MASS)):
print ind+1,ATYPE_MASS[ind]
for ind in range(len(ATYPE_EP)):
print ind+1,ATYPE_EP[ind],ATYPE_SIG[ind]
for ind in range(n_btypes):
print ind+1,BONDTYPE_R0[ind],BONDTYPE_K[ind]
for ind in range(n_angtypes):
print ind+1,ANGLETYPE_R0[ind],ANGLETYPE_K[ind]
for ind in range(n_dtypes):
print ind+1,DIHTYPE_C[ind]
debug =0
if( debug):
for ind in range(len(BONDS)):
print ind+1,BONDS[ind]
if(debug):
sys.exit("debug 1 ")
#
#
return (strucC,parmC)
def read_gro(strucC,in_gro,coordupdate=False,set_chaintoresidue=False,verbose=False,debug = False):
"""
Read gromacs structure file
Arguments:
struc_o (StructureContainer)
in_gro (str) GROMACS .gro file
Returns:
struc_o (StructureContainer)
"""
# atomicpy functions
try:
with open(in_gro,'r') as F:
Lines = F.readlines()
F.close()
except IOError:
print " Specified .gro file ",in_gro," does not exisit "
sys.exit("Invalid file ")
# Check to see if a previous read has occured
pt_update = False
n_pt = int( Lines[1])
if( len(strucC.ptclC) > 0 ):
pt_update = True
# Check of conistent number of atoms
pt_cnt = len(strucC.ptclC)
if( pt_cnt != n_pt):
print " Current structure has %d atoms and %s has %d"%(pt_cnt,in_gro,n_pt)
sys.exit(" Inconsistent number of atoms " )
#
# Read in .gro file
#
line_cnt = 0
ptcl_cnt = 0
for line in Lines :
line_cnt = line_cnt + 1
if( line_cnt > 2 and len(line) >= 44 and ptcl_cnt < n_pt): # skip header
# Set particle i
ptcl_cnt += 1
#
residue_i = int(line[0:5].strip())
resname_i = line[5:10].strip()
g = line[10:15].strip()
particle_i = int(line[15:20].strip())
x = units.convert_nm_angstroms( float( line[20:28] ))
y = units.convert_nm_angstroms( float(line[28:36]))
z = units.convert_nm_angstroms( float(line[36:44]))
#r_i = numpy.array( [float(x)*10,float(y)*10,float(z)*10] )
r_i = [x,y,z]
if(debug):
print " particle ",ptcl_cnt,g,r_i
if( pt_update ):
pt_i = strucC.ptclC[ptcl_cnt]
else:
pt_i = Particle( )
pt_i.position = r_i
if( not coordupdate ):
add_dict = pt_i.tagsDict
add_dict["residue"] = int(residue_i)
add_dict["resname"] = resname_i
add_dict["label"] = str(g)
# set as defualts
add_dict["fftype"] = "??"
add_dict["qgroup"] = 1
if( set_chaintoresidue ):
add_dict["chain"] = int(residue_i)
else:
add_dict["chain"] = 1
pt_i.setTagsDict(add_dict)
if( not pt_update ):
strucC.ptclC.put(pt_i)
#
# Get lattice vector from last line
#
line = Lines[-1]
col = line.split()
n_vec = int( len(col))
if( n_vec == 3 ):
strucC.latvec[0][0] = units.convert_nm_angstroms(float( col[0] ) )
strucC.latvec[1][1] = units.convert_nm_angstroms(float( col[1] ) )
strucC.latvec[2][2] = units.convert_nm_angstroms(float( col[2] ) )
if( n_vec == 9 ):
strucC.latvec[0][0] = units.convert_nm_angstroms(float( col[0] ) )
strucC.latvec[1][1] = units.convert_nm_angstroms(float( col[1] ) )
strucC.latvec[2][2] = units.convert_nm_angstroms(float( col[2] ) )
strucC.latvec[0][1] = units.convert_nm_angstroms(float( col[3] ) )
strucC.latvec[0][2] = units.convert_nm_angstroms(float( col[4] ) )
strucC.latvec[1][0] = units.convert_nm_angstroms(float( col[5] ) )
strucC.latvec[1][2] = units.convert_nm_angstroms(float( col[6] ) )
strucC.latvec[2][0] = units.convert_nm_angstroms(float( col[7] ) )
strucC.latvec[2][1] = units.convert_nm_angstroms(float( col[8] ) )
if( debug ):
print " Box size ",strucC.latvec[0][0],strucC.latvec[1][1],strucC.latvec[2][2]," angstorms "
def write_gro(strucC,data_file):
"""
Write gromacs structure file
"""
#
latvec = strucC.getLatVec()
gro_lines = " com2gro \n"
gro_lines += " %-2i \n" %( int(len(strucC.ptclC)) )
atom_indx = 0
for pid, pt_i in strucC.ptclC:
atom_indx += 1
if( atom_indx > 10000): atom_indx = 1
r_i = pt_i.position
r_i_nm = [units.convert_angstroms_nm(r_i[0]) ,units.convert_angstroms_nm(r_i[1]) ,units.convert_angstroms_nm(r_i[2]) ]
gro_lines += "%5d%-5s%5s%5d%8.3f%8.3f%8.3f\n" % (atom_indx,pt_i.tagsDict["resname"][:5],pt_i.tagsDict["label"][:5],atom_indx,r_i_nm[0],r_i_nm[1],r_i_nm[2] )
if( atom_indx > 99999 ):
atom_indx = 1
gro_lines += " %8.5f %8.5f %8.5f %8.5f %8.5f %8.5f %8.5f %8.5f %8.5f \n" % (units.convert_angstroms_nm(latvec[0][0]),units.convert_angstroms_nm(latvec[1][1]),units.convert_angstroms_nm(latvec[2][2]),units.convert_angstroms_nm(latvec[0][1]),units.convert_angstroms_nm(latvec[0][2]),units.convert_angstroms_nm(latvec[1][0]),units.convert_angstroms_nm(latvec[1][2]),units.convert_angstroms_nm(latvec[2][0]),units.convert_angstroms_nm(latvec[2][1]))
F = open( data_file, 'w' )
F.write(gro_lines)
F.close()
def sigma_m(N,ave,ave_sq):
"""
Calculate the standard deviation of the mean for a confidence
interval of 90%. Will return zero for single valued data sets
"""
import numpy
# Some website that probably does not exist
# http://mathworld.wolfram.com/Studentst-Distribution.html
# http://www.itl.nist.gov/div898/handbook/eda/section3/eda3672.htm
#
v = N - 1 # Degrees of freedom
# Set appropriate Students t prefactor
#if( v > 100 ):
# Coefint_pre = 1.64487
#el
if( v > 30 ):
Coefint_pre = 1.66023
elif( v > 10 ):
Coefint_pre = 1.69726
elif( v > 5 ):
Coefint_pre = 1.81246
elif( v == 5 ):
Coefint_pre = 2.01505
elif( v == 4 ):
Coefint_pre = 2.13185
elif( v == 3 ):
Coefint_pre = 2.35336
elif( v == 2 ):
Coefint_pre = 2.91999
elif( v == 1 ):
Coefint_pre = 6.31375
if( N > 1 ):
v_sqrt = numpy.sqrt( N - 1 )
sigma = numpy.sqrt( ( ave_sq ) - (ave)**2 ) # Standard deviation
sigma_mean = Coefint_pre*sigma/v_sqrt
else:
sigma = 0.0 # Set zero for unknow error
sigma_mean = 0.0 # Set zero for unknow error
return sigma,sigma_mean
def asphericity(Rnm_eg):
"""
Calculate the asphericity from the eiganvalues of the radius of gyration tensor
"""
num = (Rnm_eg[0] - Rnm_eg[2])**2 + (Rnm_eg[1] - Rnm_eg[2])**2 + (Rnm_eg[0] - Rnm_eg[1])**2
dem = 2*(Rnm_eg[0] + Rnm_eg[1] + Rnm_eg[2])**2
Asphere = num/dem
return Asphere
def splitNumber (num):
lst = []
while num > 0:
lst.append(num & 0xFF)
num >>= 8
return lst[::-1]
def main():
"""
Calculate voids
"""
prop_dim = 3
# Load periodic table
pt = periodictable()
prtCl_time = False
debug = False
debug2 = False
#
# Formated ouput varables
#
sperator_line = "\n---------------------------------------------------------------------"
# Initialize mpi
p = mpiBase.getMPIObject()
# MPI setup
rank = p.getRank()
size = p.getCommSize()
options, args = get_options()
if( options.group_residues ):
options.group_chains = False
if( rank == 0 ):
# record initial time
t_i = datetime.datetime.now()
# Open log files
log_file = options.output_id + ".log"
log_out = open(log_file,"w")
time_series = open(str("%s.time"%(options.output_id)),"w")
time_series.write("# Averages per frame of all considered groups ")
time_series.write("\n # frame ,r_sq_gy_t,Rgyeigval_sq_gy_t[0],Rgyeigval_sq_gy_t[1],Rgyeigval_sq_gy_t[2],asphere_t ")
p.barrier()
calc_void = True
if( calc_void ):
n_gird_poits = options.void_grid_n*options.void_grid_n*options.void_grid_n
log_line = " Grid has a length %d "%options.void_grid_n
log_line = " Grid has %d points "%n_gird_poits
if( rank == 0 and options.verbose ):
print log_line
try:
void_grid = np.zeros([options.void_grid_n,options.void_grid_n,options.void_grid_n],dtype=numpy.int8)
except MemoryError:
if( rank == 0 ):
mem_GB = n_gird_poits/1e9
error_line = " Size of array too large "
error_line += "\n Estimated memory usage %f GB "%(mem_GB)
print error_line
sys.exit("")
# void_grid_proc = np.zeros([options.void_grid_n,options.void_grid_n,options.void_grid_n],dtype=numpy.int)
p.barrier()
#
# Initialize blank system
#
struc_o = StructureContainer()
param_o = ParameterContainer()
if( rank == 0 ):
log_line = sperator_line
log_line += "\n Reading in input files "
log_line += sperator_line
log_out.write(log_line)
print log_line
cply_read = False
if( len(options.in_cply) > 0 ):
if( rank == 0 and options.verbose ):
print " Reading cply reference {} ".format(options.in_cply)
struc_o.read_cply(options.in_cply)
cply_read = True
if( len(options.in_data) ):
if( rank == 0 and options.verbose ): print " Reading LAMMPS data file {} ".format(options.in_data)
read_lmpdata( struc_o , param_o , options.in_data, coordupdate=cply_read)
if( len(options.in_gro) ):
if( rank == 0 and options.verbose ): print " Reading GROMACS gro file {} ".format(options.in_gro)
read_gro(struc_o,options.in_gro, coordupdate=cply_read)
else:
options.in_gro = "{}.gro".format(options.output_id)
if( rank == 0 ):
if( options.verbose ):
print " No .gro file read in will print a {} for md analysis read in ".format(options.in_gro)
write_gro(struc_o,options.in_gro)
if( len( struc_o.ptclC) == 0 ):
error_line = " No input "
sys.exit(error_line)
# Build nieghbor list
rerun_bonded = False
if( len(struc_o.bondC) <= 0 or rerun_bonded ):
struc_o.bondC.clear()
struc_o.ptclC.guess_radii()
struc_o.build_bonded_nblist(max_nn=12.0,radii_buffer=1.25)
struc_o.nblist_bonds()
else:
struc_o.bondC_nblist()
# Print system properties
if( rank == 0 ):
print struc_o
log_out.write(str(struc_o))
#
# Filter particles
#
ptclC_o = struc_o.ptclC
search_o = dict()
search_o = create_search(search_o,options.symbol,options.label,options.fftype,options.residue,options.resname,options.chain,options.ring)
if( rank == 0 ):
if( options.verbose ): print " Filter input by ",search_o
list_f = ptclC_o.getParticlesWithTags(search_o)
sum_f = len(list_f)
#
# If multi-core split the number of angles onto each core
#
split_list = True
if( split_list ):
myChunk_i = p.splitListOnProcs(list_f)
#
log_line = "Processor %d has %d particles "%(rank,len(list_f))
# log_out.write(log_line)
if( options.verbose ):
print log_line
else:
myChunk_i = list_f
if( rank == 0 and options.verbose ):
print " %d particles found in search "%(sum_f)
#
# Read in trajectory
#
traj_read = True
if( len(options.in_xtc ) and len(options.in_gro) ):
if( rank == 0 ):
if( options.verbose ): print " Reading %s and %s "%(options.in_xtc,options.in_gro)
universe = Universe(options.in_gro, options.in_xtc)
elif( len(options.in_dcd ) and len(options.in_gro) ):
if( rank == 0 ):
if( options.verbose ): print " Reading %s and %s "%(options.in_gro,options.in_dcd)
universe = Universe( options.in_gro , options.in_dcd)
elif(len(options.in_gro) ):
if( rank == 0 ):
if( options.verbose ): print " Reading %s "%(options.in_gro)
universe = Universe(options.in_gro)
else:
traj_read = False
error_line = "No Trajectory read in "
#print error_line
sys.exit(error_line)
p.barrier()
if( traj_read ):
n_frames = len(universe.trajectory)
if( options.verbose ):
print " Trajector read in with %d frames "%(n_frames)
# Set all resnum to -1 so they wont be selected by default
for pid_i, ptcCl in struc_o.ptclC:
universe.atoms[pid_i-1].resnum = -1
# Get paticle and bond structures
ptclC_o = struc_o.ptclC
bondC_o = struc_o.bondC
if( traj_read ):
resname_sel = str("resname * ")
un = universe.selectAtoms(resname_sel)
coord = un.coordinates()
# Save current directory as home
home_dir = os.getcwd()
calc_frames = 0
volume_frames = []
density_frames = []
# LV = struc_o.getLatVec()
# Loop over frames
# Allocate distance matrix
# dist = numpy.zeros((n_i,n_j), dtype=numpy.float64)
p.barrier()
if( traj_read ):
for ts in universe.trajectory:
if( options.frame_o <= ts.frame ):
if( ts.frame <= options.frame_f or options.readall_f ):
if( ts.frame%options.frame_step == 0 ):
calc_frames += 1
volume_frames.append( ts.volume )
box = ts.dimensions
LV = struc_o.LCtoLV( box )
density_frames.append(struc_o.getDensity() )
n_dim = 3
void_grid_l = np.zeros(n_dim)
void_grid_l[0] = box[0]/float(options.void_grid_n)
void_grid_l[1] = box[1]/float(options.void_grid_n)
void_grid_l[2] = box[2]/float(options.void_grid_n)
log_line = " Reading frame %d with grid size %f %f %f "%( ts.frame,void_grid_l[0],void_grid_l[1],void_grid_l[2])
log_line += "\n Box %f %f %f "%( box[0], box[1], box[2])
print log_line
coor_i = un.coordinates()
debugpos = False
if( debugpos ) :
print "box[0]",box[0]
print "box[1]",box[1]
print "box[2]",box[2]
#sys.exit("debugpos 1 ")
pid_cnt = 0
for pid_i in myChunk_i: #, ptcCl in struc_o.ptclC:
grid_p_found = False
pid_cnt += 1
r_i = coor_i[pid_i-1]
if( debugpos ) :
print " R ",r_i
# grid_point = get_grid_point(r_i,void_grid_l,box)
grid_point = np.zeros(n_dim)
for d in range(n_dim):
# reflect r_i into first quadrent
if( r_i[d] < 0.0 ): r_i[d] += box[d]
if( r_i[d] > box[d] ): r_i[d] += -1.0*box[d]
# grid_point[d] = int(round(r_i[d]/void_grid_l[d],0)) - 1
grid_point[d] = math.floor(r_i[d]/void_grid_l[d])
if( debugpos ) :
print " d ",d,r_i[d],void_grid_l[d],grid_point[d]
if( debugpos ) :
print " grid ",grid_point
#void_grid_proc[grid_point[0],grid_point[1],grid_point[2]] = 1
void_grid[grid_point[0],grid_point[1],grid_point[2]] = 1
log_line = " particle %d/%d percent %f "%(pid_cnt,len(myChunk_i),100.0*pid_cnt/len(myChunk_i))
# log_out.write(log_line)
if( rank == 0 and options.verbose ):
print log_line
if( options.add_cov_radii or options.add_vdw_radii or options.add_fix_radii or options.add_gl):
# add_radi = 0.0
if( options.add_cov_radii ):
el = pt.getelementWithSymbol(struc_o.ptclC[pid_i].tagsDict["symbol"])
add_radi = el.cov_radii
elif( options.add_vdw_radii ):
el = pt.getelementWithSymbol(struc_o.ptclC[pid_i].tagsDict["symbol"])
add_radi = el.vdw_radii
elif( options.add_fix_radii ):
add_radi = options.fix_radii
# Find number of grid points to extend to
# @ * * * * * |
# r_i (n) add_radi
grid_debug = False
add_n = np.zeros(n_dim,dtype=int)
add_radi_sq = 0.0
mult_list = [] #np.zeros(n_dim,dtype=int)
for d in range(n_dim):
add_l = add_radi
if( options.add_gl ): add_l += void_grid_l[d]
add_n[d] = int(math.floor(add_l/void_grid_l[d]))
mult_list.append( list( i for i in range(-add_n[d],add_n[d]+1 ) ) )
add_radi_sq += add_l*add_l
if( grid_debug ):
print " r_i ",r_i
print " void_grid_l ",void_grid_l
print " add_radi ",add_radi
print " add_n ",add_n
print " mult_list ",mult_list
print " grid_point ",grid_point
r_i_plus = np.zeros(n_dim)
for x_mult in mult_list[0]:
dr_iplus_x = x_mult*void_grid_l[0]
r_i_plus[0] = r_i[0] + dr_iplus_x
#if( abs(r_i_plus[0] - r_i[0] ) < add_radi ):
for y_mult in mult_list[1]:
dr_iplus_y = y_mult*void_grid_l[1]
r_i_plus[1] = r_i[1] + dr_iplus_y
#if( abs(r_i_plus[1] - r_i[1] ) < add_radi ):
for z_mult in mult_list[2]:
dr_iplus_z = z_mult*void_grid_l[2]
r_i_plus[2] = r_i[2] + dr_iplus_z
#if( abs(r_i_plus[2] - r_i[2] ) < add_radi ):
dr_iplus_sq = dr_iplus_x*dr_iplus_x + dr_iplus_y*dr_iplus_y + dr_iplus_z*dr_iplus_z
if( dr_iplus_sq <= add_radi_sq ):
grid_point = np.zeros(n_dim)
for d in range(n_dim):
# reflect r_i into first quadrent
if( r_i_plus[d] < 0.0 ): r_i_plus[d] += box[d]
if( r_i_plus[d] > box[d] ): r_i_plus[d] += -1.0*box[d]
# grid_point[d] = int(round(r_i[d]/void_grid_l[d],0)) - 1
grid_point[d] = math.floor(r_i_plus[d]/void_grid_l[d])
if( grid_point[d] >= options.void_grid_n ): grid_point[d] = grid_point[d] - options.void_grid_n
# void_grid_proc[grid_point[0],grid_point[1],grid_point[2]] = 1
void_grid[grid_point[0],grid_point[1],grid_point[2]] = 1
log_line = " extended point by %f A factors %f %f %f position %f %f %f "%(add_radi,x_mult,y_mult,x_mult,r_i_plus[0],r_i_plus[1],r_i_plus[2])
#log_out.write("\n"+log_line)
#if( grid_debug ):
# print log_line
if( grid_debug ):
print " r_i_plus ",r_i_plus, " ",x_mult,y_mult,z_mult
dr = np.zeros(n_dim)
for d in range(n_dim):
dr[d] = r_i_plus[d] - r_i[d]
print " dr ",dr," grid_point ",grid_point
print " dr2 ",math.sqrt(dr_iplus_sq)," grid_point ",grid_point
if( grid_debug ):
sys.exit("grid_debug")
"""
for x_indx in range(options.void_grid_n):
for y_indx in range(options.void_grid_n):
for z_indx in range(options.void_grid_n):
grid_point_x = float(x_indx)*void_grid_l_x
grid_point_y = float(y_indx)*void_grid_l_y
grid_point_z = float(z_indx)*void_grid_l_z
grid_point = np.array([grid_point_x,grid_point_y,grid_point_z])
#ptcCl.position = coor_i[pid_i-1]
dr_gridp_sq = pbcs.delta_r_c(coor_i[pid_i-1],grid_point,LV)
if( np.abs(dr_gridp_sq[0]) <= grid_l_x ):
if( np.abs(dr_gridp_sq[1]) <= grid_l_y ):
if( np.abs(dr_gridp_sq[2]) <= grid_l_z ):
if( debugpos):
print " grid_point ",grid_point
print " ptcCl.position ", coor_i[pid_i-1]
print " dr_gridp_sq ",dr_gridp_sq
void_grid_proc[x_indx,y_indx,z_indx] = 1
grid_p_found = True
if( grid_p_found ):
if( debugpos):
print " grid point set to true exit loop"
if( rank == 0 ):
log_line = " Grid point %d %d %d is occupied" % (grid_point_x,grid_point_y,grid_point_z)
log_out.write("\n"+log_line)
if( options.verbose ):
print log_line
break
"""
if( rank == 0 ):
log_line = "Frame %4d with volume %f " % (ts.frame, ts.volume)
log_out.write("\n"+log_line)
if( options.verbose ):
print log_line
p.barrier() # Barrier for MPI_COMM_WORLD
if( rank == 0 ):
time_series.close()
mulit_proc = False
if( mulit_proc ):
for x_indx in range(options.void_grid_n):
for y_indx in range(options.void_grid_n):
for z_indx in range(options.void_grid_n):
void_grid[x_indx,y_indx,z_indx] = p.allReduceSum(void_grid_proc[x_indx,y_indx,z_indx])
if( options.verbose and rank == 0 ):
print " Finding averages "
#
# Find averages
#
debug = 0
w_fancy = True
if( rank == 0 ):
for d in range(n_dim):
print " B ",d,box[d]
gxmol_out = open(str("%s.xyz"%(options.output_id)),"w")
grid_line = " %d "%(n_gird_poits)
grid_line += "\n void grid "
gxmol_out.write(grid_line)
graw_out = open(str("%s.raw"%(options.output_id)),"w")
#binary_list = array.array(dtype=float)
binary_list = [] #array.array()
hs="5B7F888489FEDA"
graw_out = open("%s.raw"%(options.output_id),"w+b" )
n_points = 0
gb_out = open("%s.b"%(options.output_id),"w" )
for x_indx in range(options.void_grid_n):
for y_indx in range(options.void_grid_n):
for z_indx in range(options.void_grid_n):
grid_number = int(void_grid[x_indx,y_indx,z_indx])
gb_out.write(" %d %d %d %d \n"%(grid_number,x_indx,y_indx,z_indx ))
# grid_number.astype(numpy.int8)
binary_list.append( grid_number)
#grid_line = " %f "%( void_grid[x_indx,y_indx,z_indx] )
#graw_out.write(struct.pack(str(void_grid[x_indx,y_indx,z_indx])))
graw_out.write(bytes(splitNumber(grid_number)))
if( w_fancy ):
grid_point_x = float(x_indx )*void_grid_l[0] + void_grid_l[0]/2.0
grid_point_y = float(y_indx )*void_grid_l[1] + void_grid_l[1]/2.0
grid_point_z = float(z_indx )*void_grid_l[2] + void_grid_l[2]/2.0
grid_point = np.array([grid_point_x,grid_point_y,grid_point_z])
for d in range(n_dim):
if( grid_point[d] < box[d]/-2.0 ): grid_point[d] += box[d]
if( grid_point[d] > box[d]/2.0 ): grid_point[d] += -1.0*box[d]
p_type = "H"
if( void_grid[x_indx,y_indx,z_indx] > 0 ):
# print " occupied ",x_indx,y_indx,z_indx
p_type = "Ar"
n_points += 1
grid_line = " %s %f %f %f "%(p_type,grid_point[0],grid_point[1],grid_point[2])
gxmol_out.write("\n"+grid_line)
#bytearray(b'{\x03\xff\x00d')
b_list = array.array('f',binary_list)
gbin_out = open(str("%s.bin"%(options.output_id)),"w")
b_list.tofile(gbin_out) # must pass an *actual* file
gbin_out.flush()
gb_out.close()
graw_out.close()
gxmol_out.close()
"""
with open("%sv2.raw"%(options.output_id),"w+b" ) as graw_out2:
graw_out2.write((''.join(chr(i) for i in binary_list)).encode('ascii'))
graw_out2.close()
with open("%sv3.raw"%(options.output_id),"w+b" ) as graw_out2:
graw_out2.write(bytes(binary_list))
graw_out2.close()
with open("%sv3.raw"%(options.output_id),"w+b" ) as graw_out2:
graw_out2.write(bytearray(binary_list))
graw_out2.close()
with open("%sv4.raw"%(options.output_id),"w+b" ) as graw_out2:
for number in binary_list:
graw_out2.write(bytes(splitNumber(number)))
graw_out2.close()
"""
log_line = " Number of points found %d out of %d "%(n_points,n_gird_poits)
log_out.write("\n"+log_line)
if( options.verbose ):
print log_line
log_out.close()
if __name__=="__main__":
main()
|
19,622 | 48a3a2d5ba353f9d438cd771752edaa21241fc55 | import torch.nn as nn
from torch.autograd import Variable
import torch.optim as optim
import numpy as np
import io, os
from torch.utils.data import Dataset, DataLoader
import pickle
from IPython import embed
from tensorboardX import SummaryWriter
import argparse
import random
import torch
from torch.autograd import Variable
import h5py
from torchvision import datasets, models, transforms
import math
import shutil
import matplotlib.pyplot as plt
import seaborn as sn
import pandas as pd
import librosa
import librosa.display
import cv2
import random
from scipy.io import wavfile
from sklearn.metrics import confusion_matrix
# from plot_confusion_matrix import make_confusion_matrix
######################## Helper functions ######################
class sample_data(Dataset):
def __init__(self, data_in,data_ord):
self.data_in = data_in
self.data_ord = data_ord
def __len__(self):
return len(self.data_in)
def __getitem__(self, idx):
## only for test mode
audio_dir_1, label_1 = self.data_in[idx, 0], self.data_in[idx, 2]
audio_dir_2, label_2 = self.data_in[idx, 4], self.data_in[idx, 6]
time_1 = float(self.data_in[idx, 3])
time_2 = float(self.data_in[idx, 7])
audio1, sr = librosa.load(audio_dir_1, mono=False)
# find time of click's peak?
start_1 = 10925 + np.argmax(abs(audio1[1 , 10925 : 11035])) # why dim 1 and not 0?
audio2, sr = librosa.load(audio_dir_2, mono=False)
start_2 = 10925 + np.argmax(abs(audio2[1 , 10925 : 11035]))
audio = np.concatenate((audio1[:, start_2 : start_2 + 300], audio2[:, start_1 : start_1 +300]), axis=1)
if int(label_1) == int(label_2):
label = 1
else:
label = 0
## return audio, label, click_1_file_dir, click_1_time, click_2_file_dir, click_2_time
return (audio, label, audio_dir_1, time_1, audio_dir_2, time_2)
###### Model #################################
class SoundNet(nn.Module):
def __init__(self):
super(SoundNet, self).__init__()
self.conv1 = nn.Conv2d(1, 16, kernel_size=(64, 1), stride=(2, 1),
padding=(32, 0))
self.batchnorm1 = nn.BatchNorm2d(16, eps=1e-5, momentum=0.1)
self.relu1 = nn.ReLU(True)
self.maxpool1 = nn.MaxPool2d((8, 1), stride=(8, 1))
self.conv2 = nn.Conv2d(16, 32, kernel_size=(32, 1), stride=(2, 1),
padding=(16, 0))
self.batchnorm2 = nn.BatchNorm2d(32, eps=1e-5, momentum=0.1)
self.relu2 = nn.ReLU(True)
self.maxpool2 = nn.MaxPool2d((8, 1), stride=(8, 1))
self.conv3 = nn.Conv2d(32, 64, kernel_size=(16, 1), stride=(2, 1),
padding=(8, 0))
self.batchnorm3 = nn.BatchNorm2d(64, eps=1e-5, momentum=0.1)
self.relu3 = nn.ReLU(True)
self.conv4 = nn.Conv2d(64, 128, kernel_size=(8, 1), stride=(2, 1),
padding=(4, 0))
self.batchnorm4 = nn.BatchNorm2d(128, eps=1e-5, momentum=0.1)
self.relu4 = nn.ReLU(True)
self.conv5 = nn.Conv2d(128, 256, kernel_size=(4, 1), stride=(2, 1),
padding=(2, 0))
self.batchnorm5 = nn.BatchNorm2d(256, eps=1e-5, momentum=0.1)
self.relu5 = nn.ReLU(True)
self.maxpool5 = nn.MaxPool2d((4, 1), stride=(4, 1))
self.conv6 = nn.Conv2d(256, 512, kernel_size=(4, 1), stride=(2, 1),
padding=(2, 0))
self.batchnorm6 = nn.BatchNorm2d(512, eps=1e-5, momentum=0.1)
self.relu6 = nn.ReLU(True)
self.conv7 = nn.Conv2d(512, 1024, kernel_size=(4, 1), stride=(2, 1),
padding=(2, 0))
self.batchnorm7 = nn.BatchNorm2d(1024, eps=1e-5, momentum=0.1)
self.relu7 = nn.ReLU(True)
self.conv8_objs = nn.Conv2d(1024, 1000, kernel_size=(8, 1),
stride=(2, 1))
self.conv8_scns = nn.Conv2d(1024, 401, kernel_size=(8, 1),
stride=(2, 1))
def forward(self, waveform):
x = self.conv1(waveform.unsqueeze(1).permute(0,1,3,2))
x = self.batchnorm1(x)
x = self.relu1(x)
x = self.maxpool1(x)
x = self.conv2(x)
x = self.batchnorm2(x)
x = self.relu2(x)
x = self.maxpool2(x)
x = self.conv3(x)
x = self.batchnorm3(x)
x = self.relu3(x)
x = self.conv4(x)
x = self.batchnorm4(x)
x = self.relu4(x)
x = x.reshape(x.shape[0],-1)
return x
class value_net(nn.Module):
def __init__(self, symmetric=True):
super(value_net, self).__init__()
self.linear = nn.Linear(512, 2)
def forward(self, input_audio):
output = self.linear(input_audio)
return output
############################### Main method: click separator in test mode ######################
def run_click_separator_test_mode(audio_rootname, sep_model_version, sep_model_load_dir, exp_name, det_model_version,
start, end):
'''
Run click separator model (in test mode) to get same/diff whale predictions for all pairs of clicks in specified window of audio file 'audio_rootname'
- sep_model_version: click separator version name, to be used in naming directory to save predictions
- sep_model_load_dir: directory from which to load trained click separator model version
- exp_name: experiment name, not important.
- det_model_version: click detector version used earlier in the pipeline
- start (int): start time of window (in sec)
- end (int): end time of window (in sec)
Effect: saves all-pairs predictions in batches (usually only 1 batch) in pickle files in the following directory:
'/data/vision/torralba/scratch/ioannis/clustering/custom_test_pick_preds/'
+ det_model_version + '/' + audio_rootname + '/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '.p'
'''
############ Admin work (directories) ###################################################
if not os.path.exists('./ckpts'):
os.makedirs('./ckpts')
if not os.path.exists(os.path.join('./ckpts', exp_name)):
os.makedirs(os.path.join('./ckpts',exp_name))
###### Dataset Loading and Splitting##########
data_directory = '/data/vision/torralba/scratch/ioannis/clustering/click_separator_training/correct_data_same_click_diff_click_correct_times.p'
total_data = pickle.load(open(data_directory,"rb"))
data_ordered_dir = '/data/vision/torralba/scratch/ioannis/clustering/click_separator_training/file_ordered_correct_times.p'
file_ordered = pickle.load(open(data_directory,"rb"))
#######################################################################################################
# audio_rootname = 'sw061b001'
# start = 0
# end = 235
print('------Running click separator on detected clicks------\n')
print('Clicks: ', start, '-', end-1, '\n')
main_dir = '/data/vision/torralba/scratch/ioannis/clustering/'
# test_pick = main_dir + 'custom_test_pick_preds/' + audio_rootname + '/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '.p'
test_pick = main_dir + 'custom_test_pick_preds/' + det_model_version + '/' + audio_rootname + '/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '.p'
audio_recordings_test = pickle.load(open(test_pick,"rb"))
# preds_save_dir = main_dir + 'detections_click_sep_preds/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '/'
preds_save_dir = main_dir + 'detections_click_sep_preds/' + det_model_version + '/' + audio_rootname + '_clicks_' + str(start) + '_' + str(end) + '/'
if not os.path.exists(preds_save_dir):
os.makedirs(preds_save_dir)
############ End of admin work (directories) ###################################################
np.random.seed(0)
torch.manual_seed(0)
seq = SoundNet()
# seq = clickdetector()
seq.cuda()
# seq = nn.DataParallel(seq)
valnet = value_net()
valnet.cuda()
# valnet = nn.DataParallel(valnet)
# optimizer2 = optim.Adam(valnet.parameters(), lr=args.lr, weight_decay=args.weightdecay)
# optimizer = optim.Adam(seq.parameters(), lr=args.lr, weight_decay=args.weightdecay)
# criterion = nn.CrossEntropyLoss()
test_dataset = sample_data(audio_recordings_test, file_ordered)
print('test dataset length: ', len(test_dataset))
test_dataloader = DataLoader(test_dataset, batch_size = len(test_dataset),
shuffle = False, num_workers = 20)
# predictions = []
checkpoint = torch.load(sep_model_load_dir) # NEED TO CHANGE
seq.load_state_dict(checkpoint['state_dict'])
valnet.load_state_dict(checkpoint['state_dict_valnet'])
seq.eval()
valnet.eval()
for i_batch, sample_batched in enumerate(test_dataloader): ### NEEDS CHANGEEEEEEEEE
print(i_batch)
# optimizer.zero_grad()
# optimizer2.zero_grad()
audio = sample_batched[0].type(torch.cuda.FloatTensor)
label = sample_batched[1].type(torch.cuda.FloatTensor)
click_1_file_dir, click_1_time, click_2_file_dir, click_2_time = sample_batched[2:] ## NEW
out = valnet(seq(audio))
## NEW ##
out = out.cpu().data.numpy()
labels_out = np.argmax(out,axis = 1)
label = label.cpu().data.numpy()
preds = np.array([list(click_1_file_dir), list(click_1_time),
list(click_2_file_dir), list(click_2_time),
labels_out, label], dtype=object)
preds = preds.T
print('predictions np array shape: ', preds.shape)
preds_dir = preds_save_dir
pickle.dump(preds, open(preds_dir + 'batch_' + str(i_batch) + '.p', "wb"))
cf_matrix_test = confusion_matrix(label, labels_out)
acc = 0
tp, fp, fn, tn = 0, 0, 0, 0
for i in range(labels_out.shape[0]):
if labels_out[i] == label[i]:
acc += 1
if labels_out[i] == 1 and label[i] == 1:
tp += 1
if labels_out[i] == 0 and label[i] == 0:
tn += 1
if labels_out[i] == 1 and label[i] == 0:
fp += 1
if labels_out[i] == 0 and label[i] == 1:
fn += 1
print('accuracy: ', acc / labels_out.shape[0])
print("Number of pairs same whale: ", np.sum(label))
print("Percentage of same whale: ", np.sum(label) / len(label) * 100)
print('TP: ', tp)
print('TN: ', tn)
print('FP: ', fp)
print('FN: ', fn)
print ('Confusion Matrix :')
print(cf_matrix_test)
|
19,623 | b8450178b77d28517e88e7c568e62d2148ba34a8 | import json
import numpy as np
import fire
def evaluation_stats(results_path=r'./evaluation_results/final/test_results__group-basic_cartpole.json'):
with open(results_path, mode="r") as f:
results = json.load(f)
mean_scores = {}
for key, value in results.items():
# @plelievre : is the right way to compute the mean?
mean_scores[key] = np.mean(results[key])
print(mean_scores)
if __name__ == "__main__":
fire.Fire(evaluation_stats)
|
19,624 | 3daa6c452a9c05c88e04b6e247c541137083d4df | #
# PySwarm, a swarming simulation tool for Autodesk Maya
#
# created 2013-2014
#
# @author: Joe Muers (joemuers@hotmail.com)
#
# All rights reserved.
#
# ------------------------------------------------------------
from abc import ABCMeta, abstractmethod
from ConfigParser import NoSectionError
import weakref
from pyswarm.pyswarmObject import PyswarmObject
import pyswarm.utils.general as util
import pyswarm.ui.uiBuilder as uib
import pyswarm.attributes.attributeTypes as at
########################################
class AttributeGroupListener(object):
__metaclass__ = ABCMeta
@abstractmethod
def onAttributeChanged(self, sectionObject, attributeName):
raise NotImplemented
#END OF CLASS - AttributeGroupListener
########################################
########################################
class _DataBlobBaseObject(object):
"""Convenience class to provide common agentId accessor for dataBlob objects.
"""
def __init__(self, agent):
self._agentId = agent.agentId
self.onUnassignedCallback = None
#####################
def _getAgentId(self):
return self._agentId
agentId = property(_getAgentId)
####################
def onUnassigned(self):
if(self.onUnassignedCallback is not None):
self.onUnassignedCallback(self._agentId)
#####################
def __eq__(self, other):
return (self._agentId == other._agentId) if(other is not None) else False
def __ne__(self, other):
return (self._agentId != other._agentId) if(other is not None) else True
def __lt__(self, other):
return self._agentId < other._agentId
def __gt__(self, other):
return self._agentId > other._agentId
def __hash__(self):
return hash(self._agentId)
#END OF CLASS - _DataBlobBaseObject
########################################
########################################
class _FollowOnBehaviourAttributeInterface(object):
def __init__(self, *args, **kwargs):
super(_FollowOnBehaviourAttributeInterface, self).__init__(*args, **kwargs)
self._defaultBehaviourID = "<None>"
self._followOnBehaviourIDs = [self._defaultBehaviourID]
self._followOnBehaviourMenu = None
self._followOnBehaviourMenuItems = None
self._followOnBehaviour = at.StringAttribute("Follow-On Behaviour", self._defaultBehaviourID)
self._followOnBehaviour.excludeFromDefaults = True
#####################
def _makeFollowOnBehaviourOptionGroup(self, annotation=None):
cmdTuple = uib.MakeStringOptionsField(self._followOnBehaviour, self._followOnBehaviourIDs, annotation=annotation)
self._followOnBehaviourMenu, self._followOnBehaviourMenuItems = cmdTuple
#####################
def _updateFollowOnBehaviourOptions(self, behaviourIDsList, defaultBehaviourId):
self._defaultBehaviourID = defaultBehaviourId
self._followOnBehaviourIDs = filter(lambda nm: nm != self.behaviourId, behaviourIDsList)
if(self._followOnBehaviourMenu is not None):
while(self._followOnBehaviourMenuItems):
uib.DeleteComponent(self._followOnBehaviourMenuItems.pop())
uib.SetParentMenuLayout(self._followOnBehaviourMenu)
if(self._followOnBehaviourIDs):
for behaviourID in self._followOnBehaviourIDs:
self._followOnBehaviourMenuItems.append(uib.MakeMenuSubItem(behaviourID))
if(self._followOnBehaviour.value not in self._followOnBehaviourIDs):
if(len(self._followOnBehaviourIDs) == 1 or self.behaviourId == defaultBehaviourId):
self._followOnBehaviour.value = self._followOnBehaviourIDs[0]
else:
self._followOnBehaviour.value = self._defaultBehaviourID
else:
self._followOnBehaviour._updateInputUiComponents()
else:
self._followOnBehaviour._value = None
self._followOnBehaviourMenuItems.append(uib.MakeMenuSubItem("<None>"))
util.EvalDeferred(util.LogWarning, # deferred eval so that warning is the final log message in console output
("All follow-on behaviour candidates for \"%s\" deleted." % self.behaviourId))
elif(self._followOnBehaviour.value not in self._followOnBehaviourIDs):
if(defaultBehaviourId != self.behaviourId):
self._followOnBehaviour.value = self._defaultBehaviourID
else:
self._followOnBehaviour.value = None
# END OF CLASS - FollowOnBehaviourAtrributeInterface
########################################
########################################
class AttributeGroupObject(PyswarmObject, at.SingleAttributeDelegate):
__metaclass__ = ABCMeta
#####################
@classmethod
def BehaviourTypeName(cls):
raise NotImplemented("Method should return default title for each subclass.")
return ""
#####################
def __init__(self, behaviourId):
super(AttributeGroupObject, self).__init__()
self._behaviourId = behaviourId
self._dataBlobs = {}
self._dataBlobRepository = {}
self._listeners = set()
self._inBulkUpdate = False
#####################
def __str__(self):
return ("Attribute set:%s" % self.behaviourId)
########
def _getDebugStr(self):
agentsString = ', '.join([("%d" % blob.agentId) for blob in sorted(self._dataBlobs.itervalues())])
attributesString = ', '.join([("%s=%s" % (attribute.attributeLabel, attribute.value))
for attribute in self._allAttributes()])
return ("<Assigned agents: %s\n\tAttributes: %s>" % agentsString, attributesString)
#####################
def __getstate__(self):
state = super(AttributeGroupObject, self).__getstate__()
strongListenerRefs = [ref() for ref in self._listeners]
state["_listeners"] = strongListenerRefs
return state
########
def __setstate__(self, state):
super(AttributeGroupObject, self).__setstate__(state)
self._listeners = set([weakref.ref(listener, self._removeDeadListenerReference)
for listener in self._listeners])
#####################
def _getBehaviourId(self):
return self._behaviourId
behaviourId = property(_getBehaviourId)
#####################
@abstractmethod
def populateUiLayout(self):
raise NotImplemented
#####################
@abstractmethod
def _createDataBlobForAgent(self, agent):
raise NotImplemented
#########
@abstractmethod
def _updateDataBlobWithAttribute(self, dataBlob, attribute):
raise NotImplemented
#####################
_ALLATTRIBUTES_RECURSIVE_CHECK_ = ["debugStr"] # list of attributes (i.e. property accessors) which also call
# # "_allAttributes" - they must be skipped to avoid a recursive loop
def _allAttributes(self):
attributesList = []
for attributeName in filter(lambda atNm:
atNm not in AttributeGroupObject._ALLATTRIBUTES_RECURSIVE_CHECK_,
dir(self)):
try:
attribute = getattr(self, attributeName)
if(isinstance(attribute, at._SingleAttributeBaseObject)):
attributesList.append(attribute)
except RuntimeError as e:
if(attributeName not in AttributeGroupObject._ALLATTRIBUTES_RECURSIVE_CHECK_):
AttributeGroupObject._ALLATTRIBUTES_RECURSIVE_CHECK_.append(attributeName)
errorString = ("Found possible recursive loop for class property \"%s\" - properties \
which use the _allAttributes method should be added to the \"_ALLATTRIBUTES_RECURSIVE_CHECK_\" list. \
Recommend this is hard-coded rather than done at runtime." % attributeName)
raise RuntimeError(errorString)
else:
raise e
return attributesList
#####################
def onFrameUpdated(self):
"""Called each time the Maya scene moves to a new frame.
Implement in subclasses if any updates are needed.
"""
pass
########
def onCalculationsCompleted(self):
"""Called each time the swarm instance finishes calculating updates
for the current frame.
Override in subclasses if needed."""
pass
#####################
def onBehaviourListUpdated(self, behaviourIDsList, defaultBehaviourId):
"""Called whenever a behaviour is added or deleted.
Implement in subclasses if required."""
pass
#####################
def getDataBlobForAgent(self, agent):
agentId = agent.agentId
if(agentId in self._dataBlobs):
raise RuntimeError("Re-requesting dataBlob which is already assigned")
newBlob = self._dataBlobRepository.pop(agentId, None)
if(newBlob is None):
newBlob = self._createDataBlobForAgent(agent)
newBlob.onUnassignedCallback = self._dataBlobUnassignedCallback
for attribute in self._allAttributes():
self._updateDataBlobWithAttribute(newBlob, attribute)
self._dataBlobs[agentId] = newBlob
return newBlob
########
def _dataBlobUnassignedCallback(self, agentId):
self._dataBlobRepository[agentId] = self._dataBlobs.pop(agentId)
########
def purgeDataBlobRepository(self):
del self._dataBlobRepository[:]
#####################
def getDefaultsFromConfigReader(self, configReader):
self._inBulkUpdate = True
sectionTitle = self.BehaviourTypeName()
util.LogDebug("Reading default attribute values for section \"%s\"..." % sectionTitle)
attributeLookup = {}
for attribute in filter(lambda at: not at.excludeFromDefaults, self._allAttributes()):
attributeLookup[attribute.attributeLabel] = attribute
if(attribute.nestedAttribute is not None):
attributeLookup[attribute.nestedAttribute.attributeLabel] = attribute.nestedAttribute
attributeReadCount = 0
try:
for attributeLabel, attributeValueStr in configReader.items(sectionTitle):
try:
attributeLookup[attributeLabel].value = attributeValueStr
attributeReadCount += 1
except Exception as e:
util.LogWarning("Could not read attribute: \"%s\" (Error=%s), ignoring..." % (attributeLabel, e))
else:
util.LogDebug("Parsed default attribute value: %s = %s" % (attributeLabel, attributeValueStr))
except NoSectionError as e:
util.LogWarning("Section \"%s\" not found." % sectionTitle)
finally:
self._inBulkUpdate = False
self._inBulkUpdate = False
self._notifyListeners(None)
return (attributeReadCount == len(attributeLookup))
########
def setDefaultsToConfigWriter(self, configWriter):
sectionTitle = self.BehaviourTypeName()
util.LogDebug("Adding default values for section \"%s\"..." % sectionTitle)
try:
if(configWriter.has_section(sectionTitle)):
configWriter.remove_section(sectionTitle)
util.LogDebug("Overwriting previous values...")
configWriter.add_section(sectionTitle)
except Exception as e:
util.LogWarning("ERROR - %s" % e)
else:
for attribute in filter(lambda at: not at.excludeFromDefaults, self._allAttributes()):
####
def _saveAttribute(configWriter, sectionTitle, attribute):
try:
configWriter.set(sectionTitle, attribute.attributeLabel, attribute.value)
util.LogDebug("Added default attribute value: %s = %s" % (attribute.attributeLabel, attribute.value))
except Exception as e:
util.LogWarning("Could not write attribute %s to file (%s)" % (attribute.attributeLabel, e))
####
_saveAttribute(configWriter, sectionTitle, attribute)
if(attribute.nestedAttribute is not None):
_saveAttribute(configWriter, sectionTitle, attribute.nestedAttribute)
#####################
def addListener(self, listener):
if(not isinstance(listener, AttributeGroupListener)):
raise TypeError("Tried to add listener %s of type %s" % (listener, type(listener)))
else:
self._listeners.add(weakref.ref(listener, self._removeDeadListenerReference))
########
def removeListener(self, listener):
if(listener in self._listeners):
self._listeners.remove(listener)
########
def _removeDeadListenerReference(self, deadReference):
self._listeners.remove(deadReference)
#########
def _notifyListeners(self, changedAttributeName):
if(not self._inBulkUpdate):
for listenerRef in self._listeners:
listenerRef().onAttributeChanged(self, changedAttributeName)
#####################
def onValueChanged(self, changedAttribute): # overridden SingleAttributeDelegate method
for dataBlob in self._dataBlobs.itervalues():
self._updateDataBlobWithAttribute(dataBlob, changedAttribute)
for dataBlob in self._dataBlobRepository.itervalues():
self._updateDataBlobWithAttribute(dataBlob, changedAttribute)
self._notifyListeners(changedAttribute.attributeLabel)
# END OF CLASS
############################# |
19,625 | 71b9e47b6c8046385b1f9c05271540f6556151f2 | from ggame import App, RectangleAsset, ImageAsset, Sprite, LineStyle, Color, Frame
class SpaceGame(App):
def __init__(self):
super().__init__()
black = Color(0, 1)
noline = LineStyle(0, black)
bg_asset = RectangleAsset(self.width, self.height, noline, black)
bg = Sprite(bg_asset, (0,0))
SpaceShip((100,100))
SpaceShip((150,150))
SpaceShip((200,50))
def step(self):
for ship in self.getSpritesbyClass(SpaceShip):
ship.step()
class SpaceShip(Sprite):
asset = ImageAsset("images/four_spaceship_by_albertov_with_thrust.png",
Frame(227,0,65,125), 4, 'vertical')
def __init__(self,position):
super().__init__(SpaceShip.asset,position)
self.vx=1
self.vy=1
self.vr=0.01
self.thrust=0
self.thrustframe=1
SpaceGame.listenKeyEvent("keydown", "space", self.thrustOn)
SpaceGame.listenKeyEvent("keyup", "space", self.thrustOff)
SpaceGame.listenKeyEvent("keydown","left arrow",self.leftRotate)
SpaceGame.listenKeyEvent("keydown","right arrow",self.rightRotate)
SpaceGame.listenKeyEvent("keydown","A",self.left)
SpaceGame.listenKeyEvent("keydown","D",self.right)
SpaceGame.listenKeyEvent("keydown","W",self.up)
SpaceGame.listenKeyEvent("keydown","S",self.down)
self.fxcenter = self.fycenter = 0.5
def step(self):
self.x += self.vx
self.y += self.vy
self.rotation += self.vr
if self.thrust == 1:
self.setImage(self.thrustframe)
self.thrustframe += 1
if self.thrustframe == 4:
self.thrustframe = 1
else:
self.setImage(0)
def thrustOn(self, event):
self.thrust = 1
def thrustOff(self, event):
self.thrust = 0
def rightRotate(self,event):
self.vr -=0.01
def leftRotate(self,event):
self.vr +=0.01
def left(self,event):
self.vx -=0.5
def right(self,event):
self.vx +=0.5
def up(self,event):
self.vy -=0.5
def down(self,event):
self.vy +=0.5
myapp = SpaceGame()
myapp.run()
|
19,626 | b476bca84ee04fc6b8251d116d9aceea6dc6eb00 | from flask import Blueprint
from api.demo_api import get_demo_api
api = Blueprint('api', __name__)
"""
here we defined class field names
"""
api_v1.add_url_rule('/demo-api/', view_func=get_demo_api, methods=['GET'])
api_v1.add_url_rule('/demo-api/', view_func=post_demo_api, methods=['POST'])
api_v1.add_url_rule('/demo-api/', view_func=delete_api, methods=['DELETE'])
|
19,627 | 42c245b21b8460eb5ac29678fb6a03d1cecbf971 | from common_sets import *
import random
# Shuffles deck
def shuffle(deck):
random.shuffle(deck)
# Draws n cards
def draw(deck, n):
hand = []
test_deck = deck.copy()
shuffle(test_deck)
for i in range(n):
hand.append(test_deck.pop(0))
return hand, test_deck
# Checks if we have a specific card in hand
def in_hand(hand, card):
return card in hand
# Checks if we have a handtrap in hand
def hts(hand):
return any(i in handtraps for i in hand)
def playable_hts_in_hand(hand):
nb_hts = 0
temp_hand_hts = []
for card in hand:
if card in handtraps and card not in opt_handtraps:
nb_hts += 1
temp_hand_hts.append(card)
elif card in opt_handtraps and card not in temp_hand_hts:
nb_hts += 1
temp_hand_hts.append(card)
return nb_hts
def cards_of_set_in_hand(hand, card_set):
number_card_of_set_in_hand = 0
for card in card_set:
number_card_of_set_in_hand += hand.count(card)
return number_card_of_set_in_hand
def different_cards_of_set_in_hand(hand, card_set):
number_card_of_set_in_hand = 0
diff = 0
for card in card_set:
number_card_of_set_in_hand += hand.count(card)
if in_hand(hand, card):
diff += 1
return number_card_of_set_in_hand
def triple_tactics_talent(deck, hand):
hand.remove("Triple Tactics Talent")
draws, deck = draw(deck, 2)
for card in draws:
hand.append(card)
return deck, hand
def pot_of_desires(deck, hand):
hand.remove("Pot of Desires")
for i in range(10):
deck.pop()
draws, deck = draw(deck, 2)
for card in draws:
hand.append(card)
return deck, hand
def upstart_goblin(hand, deck):
hand.remove("Upstart Goblin")
draws, deck = draw(deck, 1)
for card in draws:
hand.append(card)
return hand, deck
|
19,628 | 7a26f73d31ab02839f04a10a248c80e24f359c8e | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import getdate, nowdate
no_cache = 1
no_sitemap = 1
def get_context(context):
if (frappe.session.user == "Guest" or
frappe.db.get_value("User", frappe.session.user, "user_type")=="Website User"):
frappe.throw(_("You are not permitted to access this page."), frappe.PermissionError)
vineyards = []
clients = []
for acc in frappe.db.sql("select * from `tabvineyards` where true", as_dict=1):
vineyards.append(acc)
frappe.errprint(acc)
for acc in frappe.db.sql("select * from `tabCustomer` where true", as_dict=1):
clients.append(acc)
frappe.errprint(acc)
return {"SClient" : clients}
|
19,629 | 3860c5c74b8fdcdde36eac5db66d9f92ddf3bb3a | #!/usr/bin/python3.4
import requests
import json
import configparser
import redis
import time
import argparse
# Find out which config file we want to read
parser = argparse.ArgumentParser()
parser.add_argument('--config', help='Read the configuration from this file',
default='/etc/h_to_rocket_chat.conf')
args = parser.parse_args()
config_file = args.config
# Read the config file
try:
config = configparser.ConfigParser()
config.read(config_file)
redis_host = config['redis']['host']
redis_port = config['redis']['port']
redis_db = config['redis']['database']
hypothesis_url = config['hypothesis']['url']
hypothesis_search_params = config['hypothesis']['search_params']
hypothesis_api_token = config['hypothesis']['api_token']
rocketchat_endpoint = config['rocketchat']['endpoint']
rocketchat_path = config['rocketchat']['path']
rocketchat_auth_token = config['rocketchat']['auth_token']
except Exception as e:
print('Can not read config file: %s' % e)
exit(3)
# Connnect to the Redis database, which will be used to store the IDs and links
# of the annotations we have already sent to the Rocket chat channel
try:
r = redis.StrictRedis(host=redis_host, port=redis_port, db=redis_db)
except Exception as e:
print('Can not connect to Redis: %s' % e)
exit(3)
# This function will take the various pieces of information from an annotation
# and construct a message and post it to a Rocket chat channel
def post_to_rocket_chat(rc_endpoint,rc_path,rc_token,message,title,link,
text,source):
rc_headers = {'Authorization': 'Bearer ' + rc_token,
'Content-Type': 'application/x-www-form-urlencoded'}
# rc_payload = 'payload={"text":"%s", "attachments":[{"title":"%s",\
# "title_link":"%s", "text":"Source: %s. %s",\
# "color":"#764FA5"}]}' % (message, title, link, source, text)
rc_payload = 'payload={"text":"%s", "attachments":[{"title":"%s",\
"title_link":"%s", "text":"%s",\
"color":"#764FA5"}]}' % (message, title, link, text)
rc_request = requests.post(rc_endpoint + rc_path, headers=rc_headers,
data=rc_payload.encode('utf-8'))
# Check if the annotation ID is in Redis a.k.a. has already been sent to chat
def check_id_in_redis(annotation_id):
if r.get(annotation_id) == None: return False
else: return True
# Call the Hypothesis API endpoint and get annotations based on the search
# term found in the config file
def get_annotations():
h_cookies = dict(h_api_auth=hypothesis_api_token)
h_search = hypothesis_search_params
h_request = requests.get(hypothesis_url + h_search, cookies=h_cookies)
h_result = json.loads(h_request.text)
return h_result
# Get the annotations, extract the info we need from them, store in Redis and
# post them to Rocket chat
def main():
try:
annotations = get_annotations()
except Exception as e:
print('Can not get the annotations from Hypothesis: %s' % e)
exit(3)
for row in annotations['rows']:
h_id = row['id']
if 'title' in row['document']: h_title = row['document']['title'][0]
else: h_title = '<No title>'
h_text = row['text']
h_source = row['uri']
h_link = row['links']['html']
h_user = row['user'].split(':')[1].split('@')[0]
h_tags = " ".join([str(x) for x in row['tags']] )
if check_id_in_redis(h_id) == False:
try:
r.set(h_id, h_link)
except Exception as e:
print('Can not write the annotation ID to Redis: %s' % e)
exit(3)
message = "@%s created a new annotation. Tags: %s" % (h_user,h_tags)
try:
post_to_rocket_chat(rocketchat_endpoint,rocketchat_path,
rocketchat_auth_token,message,h_title,
h_link,h_text,h_source)
except Exception as e:
print('Can not post to Rocket chat: %s' % e)
exit(3)
# We want to run this script under supervisor, so go run through the main loop
# endlessly, with a short break and let supervisor take care of the rest
if __name__ == '__main__':
time.sleep(7)
while True:
main()
time.sleep(3)
|
19,630 | 9e320f1778f5bbf23f51d3ac0483f70c3c1350f5 | n = int(input())
vecinput = str(input())
def numerize (arr, rang):
newarr = [0] * rang
splited = arr.split(' ')
for k in range(rang):
newarr[k] = int(splited[k])
return newarr
newvec = numerize(vecinput, n)
for i in range(n // 2):
el1 = newvec[i]
el2 = newvec[n - 1 - i]
newvec[i] = el2
newvec[n - 1 - i] = el1
print(*newvec)
|
19,631 | 7cd6a73d0a20c32ffc2fb1f054d90088e10aade6 | from threading import Condition
from launch import LaunchDescription
import launch
from launch.substitution import Substitution
from launch_ros.actions import ComposableNodeContainer
from launch_ros.descriptions import ComposableNode
from launch_ros.actions import Node
from launch.launch_description_sources import PythonLaunchDescriptionSource
from launch.actions import IncludeLaunchDescription
from launch.actions import ExecuteProcess
from launch.actions import DeclareLaunchArgument
from launch.conditions import IfCondition
from launch.substitutions import LaunchConfiguration
from launch.substitutions import ThisLaunchFileDir
from launch.launch_context import LaunchContext
from ament_index_python.packages import get_package_share_directory
def include_launch_file(package, launchfile, launch_arguments=[]):
return IncludeLaunchDescription(
PythonLaunchDescriptionSource(
get_package_share_directory(package) + "/" + launchfile
),
launch_arguments=launch_arguments
)
def generate_launch_description():
# run_gui = DeclareLaunchArgument("gui", default_value="false",
# description="Start gzclient")
run_rviz = DeclareLaunchArgument("rviz", default_value="true",
description="Start rviz")
run_bag = DeclareLaunchArgument("bag", default_value="false",
description="Do rosbag")
# bagfile = DeclareLaunchArgument("bagfile", default_value="",
# description="output of rosbag")
wp_args = [
DeclareLaunchArgument("use_manual", default_value="false",
description="Use manual mode"),
DeclareLaunchArgument("use_wp_dev", default_value="false",
description="Use waypoints.launch.py"),
DeclareLaunchArgument("use_survey", default_value="false",
description="Use survey"),
DeclareLaunchArgument("use_single_turn", default_value="false",
description="Use single turn"),
DeclareLaunchArgument("use_simple_lap", default_value="false",
description="Use simple lap")
]
simulator = include_launch_file(
"simulator", "launch/simulator.launch.py", [
#("gui", IfCondition(LaunchConfiguration("gui"))),
# ("verbose", "true"),
]
)
# operation_manual = include_launch_file(
# "operation", "launch/manual.launch.py")
# operation = include_launch_file(
operations = [
ExecuteProcess(
cmd=["ros2", "launch", "operation", "manual.launch.py"],
condition=IfCondition(LaunchConfiguration("use_manual"))),
ExecuteProcess(
cmd=["ros2", "launch", "operation",
"waypoints.launch.py"],
condition=IfCondition(LaunchConfiguration("use_wp_dev"))),
ExecuteProcess(
cmd=["ros2", "launch", "operation",
"report_waypoints_survey.launch.py"],
condition=IfCondition(LaunchConfiguration("use_survey"))),
ExecuteProcess(
cmd=["ros2", "launch", "operation",
"report_waypoints_single_turn.launch.py"],
condition=IfCondition(LaunchConfiguration("use_single_turn"))),
ExecuteProcess(
cmd=["ros2", "launch", "operation",
"report_waypoints_simple_lap.launch.py"],
condition=IfCondition(LaunchConfiguration("use_simple_lap"))),
]
state_estimator = include_launch_file(
"state_estimator", "launch/state_estimator.launch.py")
controllers = include_launch_file(
"control", "launch/controllers.launch.py")
rviz = Node(
package="rviz2",
# namespace="/",
executable="rviz2",
name="rviz2",
arguments=[
# "--display-config " + get_package_share_directory("report_utils") + "/config.rviz"
"--display-config", "launch/config.rviz"
],
condition=IfCondition(LaunchConfiguration("rviz"))
)
tf2static = Node(
package="tf2_ros",
executable="static_transform_publisher",
arguments=["0", "0", "0", "0", "0", "0", "world", "map"]
)
spawner_process = ExecuteProcess(
cmd=["python3",
"/home/cale/thesis-code/ws/src/simulator/simulator/spawn_vehicle.py"],
output="screen"
)
rosbag_process = ExecuteProcess(
cmd=["ros2", "bag", "record", "--all", "--output", "bagfolder"],
condition=IfCondition(LaunchConfiguration("bag"))
)
ld = LaunchDescription([
run_rviz, run_bag, *wp_args,
simulator,
*operations,
state_estimator,
controllers,
rviz,
tf2static,
spawner_process,
rosbag_process
])
return ld
|
19,632 | 4a63f9ff98a5e41928cc3e32e0a1ca16118edf8a | #TODO: store constants in const, not store
# the Buffer type represents an array of chars
# For single chars, you don't want to generate a whole string
# Just use the char type
# Data in the constant pool and store are of type B for buffer
# The byte code variables are now specified like this s:s'1 which mean string type in store index 1
# get can be applied to store or const pool
# set can only be applied to ?
# We can declare arrays like this Let xs = [| 1, "a", "foo" |] where the bars help demarcate arrays
# Let xs = array(4)
# They will be heterogenous
# Arrays will always be stored in store
# They will take the a: type
# Temporary Pool
# t'1 | a:s'1
# Constant Pool
# c'1 | B: foo
# Storage Pool
# s'1 | A: 3
# s'2 | i: 1
# s'3 | c: a
# s'4 | s:c'1
# Imagine a nested array
# Let xs = [| [|1|], "a", "foo" |]
# s'1 | A:1
# s'2 | i:1
# s'3 | A:3
# s'4 | a:s'1
# ...
# We also have a record type
# Let xs = {| a: 1, b: 2 |}
# where the index is aliased, so xs.a == xs[1]
# s'1 | R:2; a,b
# s'2 | i:1
# s'3 | i:2
import pdb
class ProgramObject():
def __init__(self):
self._code = []
self._const = [0]
self._label_index = {}
self._labeled_instruction_index = {}
self._ip = 0
def code(self, ins, label=0):
self._code.append(ins)
self._ip += 1
if label != 0:
self._labeled_instruction_index[ins] = label
def const(self, val):
self._const.append(val)
def deciLbl(self, label):
self._label_index[label] = self._ip
def ilbl(self, label):
return label
def end(self):
i = 0
labeled_lines = self._labeled_instruction_index.keys()
for ins in self._code:
if ins in labeled_lines:
label = self._labeled_instruction_index[ins]
jump_to = self._label_index[label] - i
self._code[i] = ins + ' {0}'.format(str(jump_to))
i += 1
class VM():
def run(po):
# use ip as an instruction pointer
ip = 0
# use tmp to store variables
tmp = {}
# code stores bytecode instructions
code = []
# store is for globals and constants
store = {}
code.append(po.code)
constant_index = 1
# load constants
for c in po._const:
store['c{0}'.format(constant_index)] = c
constant_index += 1
# eval
while(True):
try:
ins = po._code[ip].split(" ")
except:
break
ip += 1
op = ins[0]
if op == 'set':
tmp[ins[1]] = ins[3]
if op == 'iadd':
tmp[ins[1]] = int(tmp[ins[2]]) + int(tmp[ins[3]])
if op == 'isub':
tmp[ins[1]] = int(tmp[ins[2]]) - int(tmp[ins[3]])
if op == 'bru':
if tmp[ins[1]] == '0':
# move the ip by the distance to the jump point
# TODO: why are you off by 2?
ip = ip + int(ins[2]) - 2
if op == 'br':
# move the ip by the distance to the jump point
ip = ip + int(ins[1])
if op == 'cload':
tmp[ins[1]] = store[ins[2]]
if op == 'arr':
# example array code: arr s'1 a:c'6
list = []
_, length = ins[2].split(':')
for i in range(0, int(length)):
list.append(0)
store[ins[1]] = list
# put <destination register> <index> <value>
# put t'1 i:n s:c'1
if op == 'put':
# this code is used to insert into an array
array = store[ins[1]]
_, index = ins[2].split(':')
val_type, val = ins[3].split(':')
# TODO: need some type handling based on val_type
array[int(index)] = val
if op == 'get':
# this code gets from an array and sets to a temp
# get t'1 s'1 i:1
array = store[ins[2]]
_, index = ins[3].split(':')
tmp[ins[1]] = array[int(index)]
if op == 'rec':
_, record_def_pointer = ins[2].split("''")
store.append(po._const[record_def_pointer])
tmp[ins[1]] = "R@c'8"
if op == 'print':
print(tmp[ins[1]])
# print Bob and 1981 from a record
po = ProgramObject()
# declare key and value for record
po.const("S:name") # c'1 upcase S means fieldname
po.const("S:dob") # c'2
po.const("s:Bob") # c'3
po.const("s:1981") # c'4
po.const("A:2") # c'5 upcase A means array in Record Definition
po.const("s:c'1") # c'6
po.const("s:c'2") # c'7
# declare a record definition pointing to array A
po.const("R:c'5") # c'8 big R is for record definitions, while little r is for instances
# create a record definition pointer
# the following command will insert the pointer into store
po.code("rec t'1 R@c'8") # at runtime, r@c'8 will be replaced with r:s'1
po.code("put t'1 i:1 s:c'3")
po.code("put t'1 i:2 s:c'4")
VM.run(po)
|
19,633 | 6b547ced8fcb6858a43708b06c3443bedcb9d380 | from django.shortcuts import render, redirect
from django.contrib.auth.decorators import login_required
from .models import *
from users import models
import random
def inicio(request):
template = "index.html"
contexto = {}
return render(request, template, contexto)
@login_required
def compartir(request):
return render(request, "share.html")
@login_required
def ranking(request):
form = models.Perfil.objects.filter(is_staff=False).order_by('-puntaje').values('username', 'puntaje')[:10]
context = {
'form': form
}
return render(request, "ranking.html", context)
@login_required
def resultado(request):
if request.method != "POST":
return redirect('inicio')
else:
return render(request, "resultado.html")
@login_required
def jugar(request):
'''
crea contador de preguntas
crea acumulador de puntaje
pregunta al azar
obtiene pregunta y despliega en pantalla categorรญa y opciones
valida opciรณn correcta
acumula puntaje
'''
if request.POST.get("numeroPregunta"):
numeroPregunta = int(request.POST.get("numeroPregunta"))
score = int(request.POST.get("score"))
correct = int(request.POST.get("correct"))
wrong = int(request.POST.get("wrong"))
ids = list(request.POST.get("ids"))
else:
numeroPregunta = 1
score = 0
wrong = 0
correct = 0
ids = []
if request.method != "POST":
electorDeCategoria = random.choice(range(QuesModel.objects.all().count()))
while str(electorDeCategoria) in ids:
electorDeCategoria = random.choice(range(QuesModel.objects.all().count()))
ids.append(electorDeCategoria)
form = QuesModel.objects.get(pk=electorDeCategoria)
context = {
'form':form,
"numeroPregunta":numeroPregunta,
'score':score,
'correct':correct,
'wrong':wrong,
'ids': ids
}
return render(request, "play.html", context)
elif request.method == 'POST':
if numeroPregunta < 5:
questions = QuesModel.objects.get(pk=int(request.POST.get("ID")))
opcionSeleccionada=request.POST.get("opcionMarcada")
if request.POST.get(opcionSeleccionada) == questions.ans:
score += int(request.POST.get("timer")) * 10
correct += 1
print('puntaje:', score, 'corectas:', correct, 'Nro Pregunta:', numeroPregunta)
else:
wrong += 1
print('incorrectas:', wrong, 'Nro Pregunta:', numeroPregunta)
numeroPregunta += 1 # prรณxima pregunta
electorDeCategoria = random.choice(range(QuesModel.objects.all().count()))
while str(electorDeCategoria) in ids:
electorDeCategoria = random.choice(range(QuesModel.objects.all().count()))
ids.append(electorDeCategoria)
questions = QuesModel.objects.get(pk=electorDeCategoria)
context = {
'score':score,
'correct':correct,
'wrong':wrong,
'numeroPregunta':numeroPregunta,
'form':questions,
'ids': ids
}
return render(request,'play.html',context)
else:
questions = QuesModel.objects.get(pk=int(request.POST.get("ID")))
opcionSeleccionada = request.POST.get("opcionMarcada")
if request.POST.get(opcionSeleccionada) == questions.ans:
score += int(request.POST.get("timer")) * score
correct += 1
print('puntaje:', score, 'corectas:', correct, 'Nro Pregunta:', numeroPregunta)
else:
wrong += 1
print('incorrectas:', wrong, 'Nro Pregunta:', numeroPregunta)
user = request.user
if user.puntaje is None or user.puntaje < score:
user.puntaje = score
user.save()
context = {
'score': score,
'correct': correct,
'wrong': wrong,
'numeroPregunta': numeroPregunta,
'ids': ids
}
return render(request,'resultado.html',context)
|
19,634 | df48f880c9500f65e1d74b39a2eb1553b362c9ed | import os
import uuid
import boto3
if 'SOURCE_BUCKET' in os.environ:
source_bucket = os.environ['SOURCE_BUCKET']
else:
source_bucket = 'test-poky-input'
if 'SOURCE_KEY' in os.environ:
source_key = os.environ['SOURCE_KEY']
else:
source_key = 'test_file.txt'
if 'OUTPUT_BUCKET' in os.environ:
output_bucket = os.environ['OUTPUT_BUCKET']
else:
output_bucket = 'test-poky-output'
# rename with unique key
output_key = source_key.split('.')[0] + f"_{uuid.uuid4()}." + source_key.split('.')[1]
s3 = boto3.resource('s3')
copy_source = {
'Bucket': source_bucket,
'Key': source_key
}
bucket = s3.Bucket(output_bucket)
print(f"Copying {source_key} from s3 bucket {source_bucket} to s3 bucket {output_bucket}. "
f"New name: {output_key}")
bucket.copy(copy_source, output_key)
print("Completed")
|
19,635 | b8920408a29f4d56adea16c73916818d908c8289 | # Copyright ยฉ 2020 Tim Schwenke <tim.and.trallnag+code@gmail.com>
# Licensed under Apache License 2.0 <http://www.apache.org/licenses/LICENSE-2.0>
"""
Dummy tests that ensure that Pytest does not return an error code if no tests
are run. This would lead to failure of CI/CD pipelines, for example GitHub Actions.
"""
import pytest
@pytest.mark.slow
def test_slow():
pass
def test_not_slow():
pass
|
19,636 | daab6b9a6facb700e0f4af4c27d4281fa4de76bb | import paramiko
from getpass import getpass
import time
ip_addr='50.76.53.27'
username='pyclass'
password=getpass()
port = 22
remote_conn_pre=paramiko.SSHClient()
remote_conn_pre.load_system_host_keys()
#remote_conn_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy())
#username = "pyclass"
#password = "88newclass"
#print dir(remote_conn_pre)
remote_conn_pre.connect(ip_addr,username=username,password=password,look_for_keys=False,allow_agent=False,port=port)
remote_conn=remote_conn_pre.invoke_shell()
output=remote_conn.recv(5000)
print output
remote_conn.send('show ip int brief\n')
time.sleep(1)
output=remote_conn.recv(5000)
print output
|
19,637 | 7f3f11637f76f620fbc9c337b9a02882c96dd811 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2018-04-02 06:56
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('server', '0003_auto_20180326_0908'),
]
operations = [
migrations.AddField(
model_name='sound',
name='nano_time_start',
field=models.IntegerField(null=True),
),
]
|
19,638 | d037000596d3360eb5a355864fa29a7558b76a34 | from django.test import TestCase,Client
from django.urls import reverse
import requests
from external_books.constants import BASE_URL
class TestExternalBooks(TestCase):
"""
Purpose: Test cases for external_books.views
Author: Ashkar Ali
"""
def setUp(self):
self.client = Client()
self.external_book_url = reverse('external_book')
def test_external_book_view_GET(self):
response = requests.get(BASE_URL)
self.assertTrue(response.ok)
def test_external_book_when_url_wrong(self):
response = requests.get("https://www.anapioficeandfire.com/api/booksssss/")
self.assertEquals(response.status_code,404)
|
19,639 | 7396d7af457100f1ece9fc0c92cded194c4d447d | # coding: utf-8
#
import pytest
from uiautomator2 import utils
def test_list2cmdline():
testdata = [
[("echo", "hello"), "echo hello"],
[("echo", "hello&world"), "echo 'hello&world'"],
[("What's", "your", "name?"), """'What'"'"'s' your 'name?'"""]
]
for args, expect in testdata:
cmdline = utils.list2cmdline(args)
assert cmdline == expect, "Args: %s, Expect: %s, Got: %s" % (args, expect, cmdline)
def test_inject_call():
def foo(a, b, c=2):
return a*100+b*10+c
ret = utils.inject_call(foo, a=2, b=4)
assert ret == 242
with pytest.raises(TypeError):
utils.inject_call(foo, 2) |
19,640 | 1abfd337f0dded45d6976cb89aeef11147e1c73b | import os
import re
from collections import Counter
from .base import BaseAligner
from ..multiprocessing import (align, convert_ali_to_textgrids, compile_train_graphs,
calc_fmllr, generate_pronunciations)
from ..exceptions import KaldiProcessingError
from ..helper import log_kaldi_errors
def parse_transitions(path, phones_path):
state_extract_pattern = re.compile(r'Transition-state (\d+): phone = (\w+)')
id_extract_pattern = re.compile(r'Transition-id = (\d+)')
cur_phone = None
current = 0
with open(path, encoding='utf8') as f, open(phones_path, 'w', encoding='utf8') as outf:
outf.write('{} {}\n'.format('<eps>', 0))
for line in f:
line = line.strip()
if line.startswith('Transition-state'):
m = state_extract_pattern.match(line)
_, phone = m.groups()
if phone != cur_phone:
current = 0
cur_phone = phone
else:
m = id_extract_pattern.match(line)
transition_id = m.groups()[0]
outf.write('{}_{} {}\n'.format(phone, current, transition_id))
current += 1
class PretrainedAligner(BaseAligner):
"""
Class for aligning a dataset using a pretrained acoustic model
Parameters
----------
corpus : :class:`~montreal_forced_aligner.corpus.AlignableCorpus`
Corpus object for the dataset
dictionary : :class:`~montreal_forced_aligner.dictionary.Dictionary`
Dictionary object for the pronunciation dictionary
acoustic_model : :class:`~montreal_forced_aligner.models.AcousticModel`
Archive containing the acoustic model and pronunciation dictionary
align_config : :class:`~montreal_forced_aligner.config.AlignConfig`
Configuration for alignment
temp_directory : str, optional
Specifies the temporary directory root to save files need for Kaldi.
If not specified, it will be set to ``~/Documents/MFA``
call_back : callable, optional
Specifies a call back function for alignment
"""
def __init__(self, corpus, dictionary, acoustic_model, align_config,
temp_directory=None,
call_back=None, debug=False, verbose=False, logger=None):
self.acoustic_model = acoustic_model
super(PretrainedAligner, self).__init__(corpus, dictionary, align_config, temp_directory,
call_back, debug, verbose, logger)
self.align_config.data_directory = corpus.split_directory()
self.acoustic_model.export_model(self.align_directory)
log_dir = os.path.join(self.align_directory, 'log')
os.makedirs(log_dir, exist_ok=True)
self.logger.info('Done with setup!')
@property
def model_directory(self):
return os.path.join(self.temp_directory, 'model')
@property
def align_directory(self):
return os.path.join(self.temp_directory, 'align')
def setup(self):
self.dictionary.nonsil_phones = self.acoustic_model.meta['phones']
super(PretrainedAligner, self).setup()
def align(self):
done_path = os.path.join(self.align_directory, 'done')
dirty_path = os.path.join(self.align_directory, 'dirty')
if os.path.exists(done_path):
self.logger.info('Alignment already done, skipping.')
return
try:
compile_train_graphs(self.align_directory, self.dictionary.output_directory,
self.align_config.data_directory, self.corpus.num_jobs, self.align_config)
self.acoustic_model.feature_config.generate_features(self.corpus)
log_dir = os.path.join(self.align_directory, 'log')
os.makedirs(log_dir, exist_ok=True)
self.logger.info('Performing first-pass alignment...')
align('final', self.align_directory, self.align_config.data_directory,
self.dictionary.optional_silence_csl,
self.corpus.num_jobs, self.align_config)
if not self.align_config.disable_sat and self.acoustic_model.feature_config.fmllr \
and not os.path.exists(os.path.join(self.align_directory, 'trans.0')):
self.logger.info('Calculating fMLLR for speaker adaptation...')
calc_fmllr(self.align_directory, self.align_config.data_directory,
self.dictionary.optional_silence_csl, self.corpus.num_jobs, self.align_config, initial=True, iteration='final')
self.logger.info('Performing second-pass alignment...')
align('final', self.align_directory, self.align_config.data_directory,
self.dictionary.optional_silence_csl,
self.corpus.num_jobs, self.align_config)
except Exception as e:
with open(dirty_path, 'w'):
pass
if isinstance(e, KaldiProcessingError):
log_kaldi_errors(e.error_logs, self.logger)
raise
with open(done_path, 'w'):
pass
def export_textgrids(self, output_directory):
"""
Export a TextGrid file for every sound file in the dataset
"""
ali_directory = self.align_directory
convert_ali_to_textgrids(self.align_config, output_directory, ali_directory, self.dictionary,
self.corpus, self.corpus.num_jobs, self)
self.compile_information(ali_directory, output_directory)
def generate_pronunciations(self, output_path, calculate_silence_probs=False, min_count=1):
pron_counts, utt_mapping = generate_pronunciations(self.align_config, self.align_directory, self.dictionary, self.corpus, self.corpus.num_jobs)
if calculate_silence_probs:
sil_before_counts = Counter()
nonsil_before_counts = Counter()
sil_after_counts = Counter()
nonsil_after_counts = Counter()
sils = ['<s>', '</s>', '<eps>']
for u, v in utt_mapping.items():
for i, w in enumerate(v):
if w in sils:
continue
prev_w = v[i - 1]
next_w = v[i + 1]
if prev_w in sils:
sil_before_counts[w] += 1
else:
nonsil_before_counts[w] += 1
if next_w in sils:
sil_after_counts[w] += 1
else:
nonsil_after_counts[w] += 1
self.dictionary.pronunciation_probabilities = True
for word, prons in self.dictionary.words.items():
if word not in pron_counts:
for p in prons:
p['probability'] = 1
else:
print(word)
print(pron_counts[word])
total = 0
best_pron = 0
best_count = 0
for p in prons:
p['probability'] = min_count
if p['pronunciation'] in pron_counts[word]:
p['probability'] += pron_counts[word][p['pronunciation']]
total += p['probability']
if p['probability'] > best_count:
best_pron = p['pronunciation']
best_count = p['probability']
print(total)
print(prons)
for p in prons:
if p['pronunciation'] == best_pron:
p['probability'] = 1
else:
p['probability'] /= total
self.dictionary.words[word] = prons
print(self.dictionary.words[word])
self.dictionary.export_lexicon(output_path, probability=True) |
19,641 | 627c4e849f240d89e2ce35755df1cc713a3ed324 | from enum import Enum
class Language(Enum):
English = 0
Russian = 1
|
19,642 | b89236bdda532e4b6478f15857d1fc0ccc4f8ccb | import time
import math
def get_exec_time(func):
def wrapper(*args, **kwargs):
t1 = time.time()
func(*args, **kwargs)
t2 = time.time()
print("{}(Function) took {}".format(func.__name__, round(t2-t1, 4)))
return wrapper |
19,643 | 5587b757a1f9ccf89e972e91c55d4b38a83228f6 |
# Third-Party
from rest_framework import routers
# Local
from .views import GroupViewSet
from .views import PersonViewSet
router = routers.DefaultRouter(
trailing_slash=False,
)
router.register(r'group', GroupViewSet)
router.register(r'person', PersonViewSet)
urlpatterns = router.urls
|
19,644 | 8c9faa6fcaf98f3e61cf9f71b24b2c3fdd764c88 | import socket
import sys
import signal
import os
import subprocess
import liveStream
import picamera
import time
import datetime
import glob
from pathlib import Path
# This is the main server that will be used to get all the functions of the PiBell working.
def main():
# create the socket for the rs server
try:
ss = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print("[S]: Server socket created")
except socket.error as err:
print('[S]: socket open error: {}\n'.format(err))
exit()
# bind the socket to the port to listen for the client
server_binding = ('', 9000)
ss.bind(server_binding)
ss.listen(5)
host = socket.gethostname()
print("[S]: Server host name is {}".format(host))
localhost_ip = (socket.gethostbyname(host))
print("[S]: Server IP address is {}".format(localhost_ip))
startLive = ''
isLive = False
userAndTokens = {}
startDetection = ''
isArmed = False
takeDetectPic = "YES"
# get list of host names to check for
while True:
conn, addr = ss.accept()
print("[S]: Got connection from: ", addr)
length_of_message = int.from_bytes(conn.recv(2), byteorder='big')
msg = conn.recv(length_of_message).decode("UTF-8")
print("[S]: Message from client: " + msg)
# Note the corrected indentation below
if "StartLive" in msg:
print("[S]: Starting Live Stream ...")
if isArmed == True:
startDetection.terminate()
startDetection = ''
if isLive == False:
startLive = subprocess.Popen(["python3", "liveStream.py"], stdout=subprocess.PIPE)
isLive = True
message_to_send = "OK".encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
elif "EndLive" in msg:
print("[S]: Ending Live Stream ...")
if isLive == True:
startLive.terminate()
startLive = ''
isLive = False
if isArmed == True:
# gets args ready
args = []
args.append("python3")
args.append("run.py")
args.append("example/model.h5")
args.append(takeDetectPic)
# append tokens
for key, value in userAndTokens.items():
args.append(value)
startDetection = subprocess.Popen(args, stdout=subprocess.PIPE)
isArmed = True
message_to_send = "OK".encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
elif "Take Pic" in msg:
print("[S]: Taking pic ...")
if isLive == True:
startLive.terminate()
startLive = ''
time.sleep(1)
with picamera.PiCamera() as camera:
now = datetime.datetime.now()
dateAndTime = now.strftime("%Y-%m-%d %H:%M:%S")
camera.resolution = (1280, 720)
camera.capture(dateAndTime + ".jpg") # will be saved in the current directory.
camera.close()
startLive = subprocess.Popen(["python3", "liveStream.py"], stdout=subprocess.PIPE)
message_to_send = "OK".encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
elif "Send Pics" in msg:
print("[S]: Sending pics ...")
allCurrentPics = glob.glob("*.jpg")
# print(len(allCurrentPics))
# send number of pics to client
message_to_send = str(len(allCurrentPics)).encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
# client: OK
length_of_message = int.from_bytes(conn.recv(2), byteorder='big')
msg = conn.recv(length_of_message).decode("UTF-8")
print("[S]: Message from client: " + msg)
# send all pics . . .
for pic in allCurrentPics:
print("[S]: Sending " + pic + " ...")
message_to_send = pic.encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
# OK from client
length_of_message = int.from_bytes(conn.recv(2), byteorder='big')
msg = conn.recv(length_of_message).decode("UTF-8")
print("[S]: Message from client: " + msg)
# send the size of the image to the client
a = Path(pic).stat().st_size
message_to_send = str(a).encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
# OK from client
length_of_message = int.from_bytes(conn.recv(2), byteorder='big')
msg = conn.recv(length_of_message).decode("UTF-8")
print("[S]: Message from client: " + msg)
picToSend = open(pic,'rb')
while (True):
picPtr = picToSend.read(1024)
if not picPtr:
break
conn.sendall(picPtr)
print("[S]: Removing " + pic)
os.remove(pic)
# OK from client
length_of_message = int.from_bytes(conn.recv(2), byteorder='big')
msg = conn.recv(length_of_message).decode("UTF-8")
print("[S]: Message from client: " + msg)
message_to_send = "OK".encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
conn.close()
elif "Log In" in msg:
# need to get username and token
print("[S]: Getting username and token ...")
# send OK
message_to_send = "OK".encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
# client: Username
length_of_message = int.from_bytes(conn.recv(2), byteorder='big')
msg = conn.recv(length_of_message).decode("UTF-8")
print("[S]: Message from client: " + msg)
user = msg
# send OK
message_to_send = "OK".encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
# client: token
length_of_message = int.from_bytes(conn.recv(2), byteorder='big')
msg = conn.recv(length_of_message).decode("UTF-8")
print("[S]: Message from client: " + msg)
token = msg
# add this (user:token) pair to the dictionary
userAndTokens[user] = token
print("[S]: Current Users + Tokens: ")
print(userAndTokens)
if isArmed == True:
startDetection.terminate()
startDetection = ''
# gets args ready
args = []
args.append("python3")
args.append("run.py")
args.append("example/model.h5")
args.append(takeDetectPic)
# append tokens
for key, value in userAndTokens.items():
args.append(value)
# arm the camera
startDetection = subprocess.Popen(args, stdout=subprocess.PIPE)
isArmed = True
# send OK
message_to_send = "OK".encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
conn.close()
elif "Arm Doorbell" in msg:
print("[S]: Arming Doorbell ...")
# gets args ready
args = []
args.append("python3")
args.append("run.py")
args.append("example/model.h5")
args.append(takeDetectPic)
# append tokens
for key, value in userAndTokens.items():
args.append(value)
# arm the camera
if isArmed == False:
startDetection = subprocess.Popen(args, stdout=subprocess.PIPE)
isArmed = True
# send OK
message_to_send = "OK".encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
conn.close()
elif "Disarm Doorbell" in msg:
print("[S]: Disarming Doorbell ...")
if isArmed == True:
startDetection.terminate()
startDetection = ''
isArmed = False
# send OK
message_to_send = "OK".encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
conn.close()
elif "Stop Notifications" in msg:
# send OK
message_to_send = "OK".encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
# client: Username
length_of_message = int.from_bytes(conn.recv(2), byteorder='big')
msg = conn.recv(length_of_message).decode("UTF-8")
print("[S]: Message from client: " + msg)
user = msg
print("[S]: Getting rid of token for " + user + " ...")
if user in userAndTokens:
userAndTokens.pop(user)
if isArmed == True:
startDetection.terminate()
startDetection = ''
# gets args ready
args = []
args.append("python3")
args.append("run.py")
args.append("example/model.h5")
args.append(takeDetectPic)
# append tokens
for key, value in userAndTokens.items():
args.append(value)
# arm the camera
startDetection = subprocess.Popen(args, stdout=subprocess.PIPE)
isArmed = True
# send OK
message_to_send = "OK".encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
conn.close()
elif "Send Notifs" in msg:
print("[S]: Sending notifications ...")
allCurrentPics = glob.glob("*.txt")
# print(len(allCurrentPics))
# send number of pics to client
message_to_send = str(len(allCurrentPics)).encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
# client: OK
length_of_message = int.from_bytes(conn.recv(2), byteorder='big')
msg = conn.recv(length_of_message).decode("UTF-8")
print("[S]: Message from client: " + msg)
# send all pics . . .
for pic in allCurrentPics:
print("[S]: Sending " + pic + " ...")
message_to_send = pic.encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
# OK from client
length_of_message = int.from_bytes(conn.recv(2), byteorder='big')
msg = conn.recv(length_of_message).decode("UTF-8")
print("[S]: Message from client: " + msg)
message_to_send = "OK".encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
conn.close()
elif "Pic Capture ON" in msg:
print("[S]: Turning picture capture on...")
takeDetectPic = "YES"
# if the camera is armed, disarm it and then start it with pic capture on
if isArmed == True:
startDetection.terminate()
startDetection = ''
# gets args ready
args = []
args.append("python3")
args.append("run.py")
args.append("example/model.h5")
args.append(takeDetectPic)
# append tokens
for key, value in userAndTokens.items():
args.append(value)
# arm the camera
startDetection = subprocess.Popen(args, stdout=subprocess.PIPE)
isArmed = True
# send OK back
message_to_send = "OK".encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
conn.close()
elif "Pic Capture OFF" in msg:
print("[S]: Turning picture capture off...")
takeDetectPic = "NO"
# if the camera is armed, disarm it and then start it with pic capture on
if isArmed == True:
startDetection.terminate()
startDetection = ''
# gets args ready
args = []
args.append("python3")
args.append("run.py")
args.append("example/model.h5")
args.append(takeDetectPic)
# append tokens
for key, value in userAndTokens.items():
args.append(value)
# arm the camera
startDetection = subprocess.Popen(args, stdout=subprocess.PIPE)
isArmed = True
# send OK back
message_to_send = "OK".encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
conn.close()
elif "Delete Notifs" in msg:
print("[S]: Deleting notifications ...")
# send OK
message_to_send = "OK".encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
# client: number of notifications to delete
length_of_message = int.from_bytes(conn.recv(2), byteorder='big')
msg = conn.recv(length_of_message).decode("UTF-8")
print("[S]: Message from client: " + msg)
# send OK
message_to_send = "OK".encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
numNotifsToDelete = int(msg)
count = 1;
while count <= numNotifsToDelete:
# client: file to delete
length_of_message = int.from_bytes(conn.recv(2), byteorder='big')
msg = conn.recv(length_of_message).decode("UTF-8")
print("[S]: File to delete: " + msg)
if os.path.exists(msg):
os.remove(msg)
print("[S]: " + msg + " deleted")
# send OK
message_to_send = "OK".encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
count = count + 1
conn.close()
elif "isLiveRunning" in msg:
print("[S]: Checking if live stream is running ...")
if isLive == True:
startLive.terminate()
startLive = ''
isLive = False
message_to_send = "DONE".encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
else:
message_to_send = "FAIL".encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
conn.close()
elif "Send Number of Pics" in msg:
print("[S]: Sending number of pics ...")
allCurrentPics = glob.glob("*.jpg")
print(len(allCurrentPics))
if isArmed == True:
startDetection.terminate()
startDetection = ''
isArmed = False
# send number of pics to client
message_to_send = str(len(allCurrentPics)).encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
conn.close()
elif "Send Number of Messages" in msg:
print("[S]: Sending number of message logs ...")
allCurrentLogs = glob.glob("*.txt")
print(len(allCurrentLogs))
# send number of pics to client
message_to_send = str(len(allCurrentPics)).encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
conn.close()
else:
print("[S]: Sending test...")
message_to_send = "TEST".encode("UTF-8")
conn.send(len(message_to_send).to_bytes(2, byteorder='big'))
conn.send(message_to_send)
conn.close()
# Close the server socket
ss.close()
exit()
if __name__ == "__main__":
main()
|
19,645 | 5ec5656e297f2be7f2dc954bb7ea78b56122ae18 |
# ********* Problem-1 ************
def print_depth(my_dict, initial=0):
for key, value in my_dict.items():
print(key, initial + 1)
if isinstance(value, dict):
print_depth(value, initial=initial+1)
a = {
"key1": 1,
"key2": {
"key3": 1,
"key4": {
"key5": 4
}
}
}
print_depth(a)
# ********* Problem-2 ************
class Person(object):
def __init__(self, first_name, last_name, father):
self.first_name = first_name
self.last_name = last_name
self.father = father
person_a = Person("User", "1", None)
person_b = Person("User", "2", person_a)
def print_depth(my_dict, initial=0):
for key, value in my_dict.items():
print(key, initial + 1)
if isinstance(value, dict):
print_depth(value, initial=initial+1)
elif isinstance(value, Person):
return print_depth(vars(value),initial=initial+1)
a = {
"key1": 1,
"key2": {
"key3": 1,
"key4": {
"key5": 4,
"user": person_b,
}
},
}
print_depth(a)
# ********* Problem-3 ************
class Node:
def __init__(self, key):
self.key = key
self.left = None
self.right = None
def findPath(root, path, k):
if root is None:
return False
path.append(root.key)
if root.key == k:
return True
if ((root.left != None and findPath(root.left, path, k)) or
(root.right != None and findPath(root.right, path, k))):
return True
path.pop()
return False
def findLCA(root, n1, n2):
path1 = []
path2 = []
if (not findPath(root, path1, n1) or not findPath(root, path2, n2)):
return -1
i = 0
while(i < len(path1) and i < len(path2)):
if path1[i] != path2[i]:
break
i += 1
return path1[i-1]
root = Node(1)
root.left = Node(2)
root.right = Node(3)
root.left.left = Node(4)
root.left.right = Node(5)
root.right.left = Node(6)
root.right.right = Node(7)
root.left.left.left = Node(8)
root.left.left.right = Node(9)
print("LCA(9, 4) = ", findLCA(root, 9, 4))
print("LCA(8, 5) = ", findLCA(root, 8, 5))
print("LCA(6, 7) = ", findLCA(root, 6, 7))
print("LCA(3, 7) = ", findLCA(root, 3, 7))
# TEST METHOD
def my_sum(a,b):
return a+b
|
19,646 | 6d0c7320b49483debded89ec40b0bb6f1edaf447 | from __future__ import print_function
from aimaschroomer import AiMaschroomer
from imutils.video import VideoStream
from tensorflow.keras.models import load_model
import argparse
import time
import os
import json
# Main application prepared for Raspbery pi.
ap = argparse.ArgumentParser()
ap.add_argument("-p", "--picamera", type=int, default=-1,
help="whether or not the Raspberry Pi camera should be used")
args = vars(ap.parse_args())
splittedSetPath = os.getcwd() + "/data_h5py"
# Load serialized model
modelsPath = os.path.join(splittedSetPath, "models")
modelPath = os.path.join(modelsPath, "mobilenetv2.model")
model = load_model(modelPath)
# Load mean of training data
meansPath = os.path.join(splittedSetPath, "mean", "train_mean.json")
means = json.loads(open(meansPath).read())
# initialize the video stream and allow the camera sensor to warmup
vs = VideoStream(usePiCamera=args["picamera"] > 0).start()
time.sleep(2.0)
# Let's roll...
pba = AiMaschroomer(vs, "output", model, ["Eatable", "Poisoned", "Uneatable"], means)
pba.root.mainloop() |
19,647 | e5d0159f8700d8c0da57a64bf5f50a0e986bcf1c | from django.urls import path
from .views import (
ConfirmationView,
GetMapFeatures,
InfoPageView,
LocationView,
MeetingView,
PlanfaseView,
SpecifyProjectView,
ToetswijzeView,
UploadView,
)
app_name = "aanmeldformulier"
urlpatterns = [
path("", InfoPageView.as_view(), name="info"),
path("start/", SpecifyProjectView.as_view(), name="specify-project"),
path("map/", LocationView.as_view(), name="map"),
path("map/features/<lng>/<lat>/", GetMapFeatures.as_view(), name="map-features"),
path("toetswijze/", ToetswijzeView.as_view(), name="toetswijze"),
path("planfase/", PlanfaseView.as_view(), name="planfase"),
path("upload/", UploadView.as_view(), name="upload"),
path("vergadering/", MeetingView.as_view(), name="vergadering"),
path("bevestiging/", ConfirmationView.as_view(), name="confirmation"),
]
|
19,648 | 05dbcc063202d84a66962ff337b512c1c55b3854 | import requests
import time
from bs4 import BeautifulSoup
import random
import urllib.parse
import argparse
from glob import glob
#soup = BeautifulSoup(content, 'lxml')
#result = soup.find_all('ul', class_='searchResultListUl')
#
#result_list = result[0].find_all('li')
#
#for one in result_list:
#
# print('*'*100)
#
# company_name = one.find('p', class_='searchResultCompanyname').string
# print('ๅ
ฌๅธๅ็งฐ๏ผ%s'%company_name)
#
# company_address = one.find('em', class_='searchResultJobCityval').string
# print('ๅ
ฌๅธๅฐๅ๏ผ%s'%company_address)
#
# jobdescrption = one.find('p', class_='searchResultJobdescription').find('span').string
# print('่ไฝ้ๆฑ๏ผ%s'%jobdescrption, '\n')
parser = argparse.ArgumentParser(description='')
parser.add_argument('--job_name', dest='job_name', default='็ฎๆณๅทฅ็จๅธ', help='job name')
parser.add_argument('--pages', dest='pages', type=int, default=10, help='job pages')
args = parser.parse_args()
def parseSingleUrl(url_name):
# ่งฃๆๅ
ฌๅธ้กต้ข
url_name = 'https:'+ url_name
content = requests.get(url_name).text
soup = BeautifulSoup(content, 'lxml')
result = soup.find_all('div', class_="cLeft l")
return result
def parseSinglePage(url):
# ่งฃๆๆ็ฑป่ไฝ็้กต้ข
html = requests.get(url)
content = html.text
soup = BeautifulSoup(content, 'lxml')
result = soup.find_all('ul', class_='searchResultListUl')[0]
result_list = result.find_all('div', class_="searchResultItemDetailed")
for one in result_list:
print('*'*20)
time.sleep(random.randint(2,8))
url_name = one.a['href']
res = parseSingleUrl(url_name=url_name)[0]
company_name = res.find('li', class_='cJobDetailInforWd1 marb').a.string
print('ๅ
ฌๅธๅ็งฐ๏ผ re%s'%company_name)
company_addr = res.find_all('li', class_="cJobDetailInforWd2 marb")[0]['title']
print('ๅ
ฌๅธๅฐๅ๏ผ %s'%company_addr)
job_name = res.find_all('li', class_="cJobDetailInforWd2 marb")[1].string
print('่ไฝๅ็งฐ๏ผ %s'%job_name)
person = res.find_all('li', class_="cJobDetailInforWd2 marb")[2].string
print('ๆ่ไบบๆฐ๏ผ %s'%person)
date = res.find_all('li', class_="cJobDetailInforWd2 marb")[3].string
print('ๅ
ฌๅธๅฐๅ๏ผ %s'%date)
job_duty = res.find('p', class_="mt20").text.replace('\xa0', ' ')
print('ๅทฅไฝ่่ดฃ๏ผ %s'%job_duty.strip('ๅฒไฝ่่ดฃ'))
def start():
for i in range(args.pages):
url = 'https://xiaoyuan.zhaopin.com/full/0/0_0_0_0_0_-1_{}_{}_0'.format(urllib.parse.quote(args.job_name), i)
print('*'*100)
parseSinglePage(url)
if __name__ == '__main__':
start() |
19,649 | d24c738bd77c11e875c9e1502e7ac86ba267bf93 | from mapobject import MapObject
########################################################################
class Landscape(MapObject):
"""
Landscape element. Rock, tree, wall, floor
"""
#----------------------------------------------------------------------
def __init__(self, map, x, y):
"""Constructor"""
super(Landscape, self).__init__(map, x, y)
self.landscape = True
########################################################################
class Wall(Landscape):
"""
Wall
"""
#----------------------------------------------------------------------
def __init__(self, map, x, y):
"""Constructor"""
super(Wall, self).__init__(map, x, y)
self.symbol = "#"
self.passable = False
self.view_passable = False
self.update()
########################################################################
class Door(Landscape):
"""
Door
"""
#----------------------------------------------------------------------
def __init__(self, map, x, y, closed=False):
"""Constructor"""
super(Door, self).__init__(map, x, y)
self.closed = None
self.set_closed(closed)
#----------------------------------------------------------------------
def open(self):
""""""
self.set_closed(False)
#----------------------------------------------------------------------
def set_closed(self, closed):
"""
"""
self.closed = closed
self.passable = not closed
self.view_passable = not closed
if closed:
self.symbol = "+"
else:
self.symbol = "/"
self.update()
########################################################################
class SecretDoor(Door):
"""
Secret Door
"""
#----------------------------------------------------------------------
def __init__(self, map, x, y):
"""Constructor"""
super(SecretDoor, self).__init__(map, x, y, True)
self.symbol = "#"
self.update()
########################################################################
class Stairs(Landscape):
"""
Stairs: up or down
"""
#----------------------------------------------------------------------
def __init__(self, map, x, y, down=True):
"""Constructor"""
super(Stairs, self).__init__(map, x, y)
self.passable = True
self.view_passable = True
self.connected_stairs = None
if down:
self.symbol = ">"
else:
self.symbol = "<"
self.update()
#----------------------------------------------------------------------
def connect_to(self, stairs):
""""""
self.connected_stairs = stairs
########################################################################
class Floor(Landscape):
"""
Floor
"""
#----------------------------------------------------------------------
def __init__(self, map, x, y):
"""Constructor"""
super(Floor, self).__init__(map, x, y)
self.symbol = "."
self.passable = True
self.view_passable = True
self.update()
|
19,650 | f617c226d45dc8709a1477d4451f41e4c7365fa9 | #!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Create a list at http://commons.wikimedia.org/wiki/User:Multichill/By_country_to_fix of categories which are not in their corresponding <subject>_by_country category.
'''
import sys
import wikipedia, MySQLdb, config
def connectDatabase():
'''
Connect to the mysql database, if it fails, go down in flames
'''
conn = MySQLdb.connect(config.db_hostname, db='commonswiki_p', user = config.db_username, passwd = config.db_password)
cursor = conn.cursor()
return (conn, cursor)
def getCountryList(cursor):
'''
Get the list of countries
'''
query = u"SELECT pl_title FROM page JOIN pagelinks ON page_id=pl_from WHERE page_namespace=2 AND page_is_redirect=0 AND page_title = 'Multichill/Countries' AND pl_namespace=14"
cursor.execute(query)
result = []
while True:
try:
country, = cursor.fetchone()
result.append(unicode(country, 'utf-8'))
except TypeError:
# Limit reached or no more results
break
return result
def getByCountryList(cursor, startSubject):
'''
Get a list of ..._by_country categories
'''
query = u"SELECT cat.page_title, bc.page_title FROM page AS bc JOIN categorylinks ON bc.page_id = cl_from JOIN page AS cat ON (cl_to=cat.page_title AND bc.page_title=CONCAT(cat.page_title, '_by_country')) WHERE bc.page_namespace=14 AND bc.page_is_redirect=0 AND bc.page_title LIKE '%by_country' AND cat.page_namespace=14 AND cat.page_is_redirect=0"
cursor.execute(query)
result = []
while True:
try:
subject, subjectByCountry = cursor.fetchone()
#print subject + ' ' + subjectByCountry
result.append((unicode(subject, 'utf-8'), unicode(subjectByCountry, 'utf-8')))
except TypeError:
# Limit reached or no more results
break
return result
def getMissingByCountry(cursor, subject, subjectByCountry, countries):
'''
For the <subject>_by_country category get the categories which are supposed to be subcategories, but are not.
'''
cursor.execute(u"SELECT page_title FROM page WHERE page_namespace=14 AND page_is_redirect=0 AND page_title LIKE %s AND NOT EXISTS(SELECT * FROM categorylinks WHERE cl_from=page_id AND cl_to = %s) AND NOT EXISTS(SELECT * FROM templatelinks WHERE page_id=tl_from AND tl_title ='Category_redirect')", (subject + '_%' , subjectByCountry))
result = []
while True:
try:
country = None
cat, = cursor.fetchone()
cat = unicode(cat, 'utf-8')
#print "bla"
country = isCountryCategory(cat, subject, countries)
if country:
result.append((cat, subjectByCountry, country))
#print cat + ' should be in ' + subjectByCountry
#print cat
except TypeError:
# Limit reached or no more results
break
#print "bla!" + result
return result
def isCountryCategory(cat, subject, countries):
'''
If the category is a country category, return the name of the country
'''
for country in countries:
if cat.endswith(country):
if (cat == subject + u'_from_' + country) or (cat == subject + u'_from_the_' + country):
return country
elif (cat == subject + u'_in_' + country) or (cat == subject + u'_in_the_' + country):
return country
elif (cat == subject + u'_of_' + country) or (cat == subject + u'_of_the_' + country):
return country
return None
def outputResult(missingCatsTotal):
'''
Output the results to Commons.
Can also output a ready to run script.
'''
resultwiki = u''
resultscript = u'#!/usr/pkg/bin/bash\n'
page = wikipedia.Page(wikipedia.getSite(u'commons', u'commons'), u'User:Multichill/By_country_to_fix')
comment = u'A list of categories to fix'
for (cat, subjectByCountry, country) in missingCatsTotal:
resultwiki = resultwiki + u'*[[:Category:' + cat + u']] should be in [[:Category:' + subjectByCountry + u']]\n'
resultscript = resultscript + u'python2.4 add_text.py -always -lang:commons -family:commons -page:"Category:' + cat + u'" -text:"[[Category:' + subjectByCountry.replace(u'_', u' ') + u'|' + country.replace(u'_', u' ') + u']]" -summary:"Adding [[Category:' + subjectByCountry.replace(u'_', u' ') + u']]"\n'
resultwiki = resultwiki.replace(u'_', u' ')
resultscript = resultscript.replace(u'_', u' ')
page.put(resultwiki, comment)
f = file("/home/multichill/queries/bycountry.txt", 'w')
f.write(resultscript.encode('utf-8'))
f.close()
#wikipedia.output(resultscript)
def main():
'''
The main loop
'''
conn = None
cursor = None
missingCatsTotal = []
(conn, cursor) = connectDatabase()
countries = getCountryList(cursor)
byCountryList = getByCountryList(cursor, u'')
for (subject, subjectByCountry) in byCountryList:
missingCats = getMissingByCountry(cursor, subject, subjectByCountry, countries)
if missingCats:
missingCatsTotal = missingCatsTotal + missingCats
outputResult(missingCatsTotal)
if __name__ == "__main__":
try:
main()
finally:
wikipedia.stopme()
|
19,651 | bb1613caf1e39f14e3c2f360eb8209d63fb26abb | import sys
from time import clock_gettime
from benchmarks import mnist, mnist_data
from benchmarks import simple_lstm, simple_lstm_data
from benchmarks import simple_mlp, simple_mlp_data
import pandas as pd
def round_end(results, out, start, test_name):
out.insert(0, test_name)
out.insert(1, start)
out.insert(2, clock_gettime(1))
return out
def run_tests(env=None, cpu_clock=None, gpu=None, modes=None):
results = []
batch_sizes = [1024, 512, 256, 128, 64, 32, 16, 8]
system_details = env + '_' + cpu_clock + '_' + gpu
model_name = 'simple_mlp'
test_name = model_name + '_' + system_details
for mode in modes:
for batch_size in batch_sizes:
start = clock_gettime(1)
out = simple_mlp(simple_mlp_data(), batch_size=batch_size, resource_mode=mode)
results.append(round_end(results, out, start, test_name))
model_name = 'mnist'
test_name = model_name + '_' + system_details
for mode in modes:
for batch_size in batch_sizes:
start = clock_gettime(1)
out = mnist(mnist_data(), batch_size=batch_size, resource_mode=mode)
results.append(round_end(results, out, start, test_name))
model_name = 'simple_lstm'
test_name = model_name + '_' + system_details
for mode in modes:
for batch_size in batch_sizes:
start = clock_gettime(1)
out = simple_lstm(simple_lstm_data(), batch_size=batch_size, resource_mode=mode, cudnn=False)
results.append(round_end(results, out, start, test_name))
model_name = 'simple_cudnnlstm'
test_name = model_name + '_' + system_details
try:
modes.remove('cpu')
except ValueError:
pass
for mode in modes:
for batch_size in batch_sizes:
start = clock_gettime(1)
out = simple_lstm(simple_lstm_data(), batch_size=batch_size, resource_mode=mode, cudnn=True)
results.append(round_end(results, out, start, test_name))
return results, test_name
def save_results(results, test_name):
data = pd.DataFrame(results)
data.columns = ['test', 'start', 'end', 'seconds', 'mode', 'batch_size']
data.to_csv(test_name)
if __name__ == '__main__':
print("hello")
env = sys.argv[1]
cpu_clock = sys.argv[2]
gpu = sys.argv[3]
try:
modes = sys.argv[4]
except ValueError:
modes = 'gpu,cpu,multi_gpu,parallel_gpu'
modes = modes.split(',')
results, test_name = run_tests(env, cpu_clock, gpu, modes)
test_name = test_name.replace('simple_cudnnlstm_', '')
save_results(results, test_name)
print('Benchmarking Completed!')
|
19,652 | 2fe73d684a78d885f3127daf1daf62e725347ce4 | from rest_framework import serializers
from .models import Portfolio, Transaction
class PortfolioSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Portfolio
fields = ('id', 'name', 'net_earnings', 'owner_id')
class TransactionSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Transaction
fields = (
'ticker',
'instrument_name',
'number_of_shares',
'trade_date',
'close_out_date',
'buy_price',
'sell_price',
'net_earnings',
'portfolio_id'
)
|
19,653 | 7efb41976f4029dbdc9e90cf6446473a17e8847e | from grid import Grid
from ant import Ant, TurnDirection
from typing import List
class LangtonsAnt:
def __init__(
self, initial_state: List[List[bool]], start_position: List[int]
) -> None:
self._grid = Grid(initial_state)
self._ant = Ant(start_position[0], start_position[1])
def next(self) -> bool:
current_ant_position_x, current_ant_position_y = self._ant.position
if self._grid.get_color(current_ant_position_x, current_ant_position_y):
self._grid.change_color(current_ant_position_x, current_ant_position_y)
self._ant.turn(TurnDirection.RIGHT)
self._ant.move()
else:
self._grid.change_color(current_ant_position_x, current_ant_position_y)
self._ant.turn(TurnDirection.LEFT)
self._ant.move()
return self.validate_move()
@property
def state(self):
return self._grid.state
@property
def ant_position(self):
return self._ant.position
def validate_move(self) -> bool:
current_ant_position_x, current_ant_position_y = self._ant.position
return self._grid.validate_point(current_ant_position_x, current_ant_position_y)
def validate(self) -> bool:
return self._grid.validate()
|
19,654 | 1b19c079a7af063f14512ff74729fadb25a9b29a | import unittest
import HTMLTestRunnerNew
from common.testHttpRequest import TestHttpRequest
suit = unittest.TestSuite()#ๅๅปบไธไธชๅญๅจๅจ
loader = unittest.TestLoader()#ๅๅปบไธไธชๅ ่ฝฝๅจ
suit.addTest(loader.loadTestsFromTestCase(TestHttpRequest))
with open('datareport/็็ฆๅฐ็จๅบๆต่ฏๆฅๅ.html', 'wb') as file:
runner = HTMLTestRunnerNew.HTMLTestRunner(stream=file, verbosity=2,title="็็ฆๅฐ็จๅบๆต่ฏๆฅๅ",
description="ๆฅๅฃ่ชๅจๅๆต่ฏ",tester="็ง")
runner.run(suit) |
19,655 | 3930acc57ae8821eca7be6430707cfbe9636e1f9 | import unittest
import StringIO
from .. import bitdiff
class BitdiffTest(unittest.TestCase):
def test_compare(self):
file1 = open("app/test/data/bitdiff/file1")
file1_2 = open("app/test/data/bitdiff/file1")
file2 = open("app/test/data/bitdiff/file2")
self.assertEqual(1.0, bitdiff.compare(file1,file1_2))
file1.seek(0)
self.assertEqual(95.0/96.0, bitdiff.compare(file1,file2)) |
19,656 | d3be70d60cd8aea5d167ec87c5c24fe71471f0ee | #Aidan Moran
#Accept a string and 4 integers. Print string from int 1 to 2, and 3 to 4.
length = 201
theString = ''
while length >= 201:
theString = input("Please enter a string less than 200 characters long:")
length = len(theString)
int1 = length+1
while int1 > length:
int1 = int(input("Please enter integer 1:"))
int2 = length+1
while int2 > length:
int2 = int(input("Please enter integer 2:"))
int3 = length+1
while int3 > length:
int3 = int(input("Please enter integer 3:"))
int4 = length+1
while int4 > length:
int4 = int(input("Please enter integer 4:"))
print(theString[int1:int2])
print(theString[int3:int4])
input("\n\nPress any key to exit")
|
19,657 | 42478157acb18b1cb31d34a76e19c2927abd5ae1 | import os, shutil, errno
keystore_password = "beleza.playkey"
build = "ionic cordova build android --release --prod"
unsigned_apk = '"C:/Projetos/beleza.com/beleza.com/platforms/android/app/build/outputs/apk/release/app-release-unsigned.apk"'
final_apk = '"C:/Projetos/beleza.com/beleza.com/platforms/android/app/build/outputs/apk/release/beleza.com.apk"'
keystore = "beleza.keystore"
create_keystore = "keytool -genkey -v -keystore "+keystore+" -alias beleza -keyalg RSA -keysize 2048 -validity 10000"
jarsigner = "jarsigner -verbose -sigalg SHA1withRSA -digestalg SHA1 -keystore "+keystore+" "+unsigned_apk+" beleza"
zipalign = "C:/Android/Data/build-tools/27.0.3/zipalign -v 4 "+unsigned_apk+" "+final_apk
if os.path.exists(unsigned_apk):
os.remove(unsigned_apk)
if os.path.exists(final_apk):
os.remove(final_apk)
os.system(build)
if not os.path.exists(keystore):
os.system(create_keystore)
os.system('echo %s|%s' % (keystore_password, jarsigner))
os.system(zipalign) |
19,658 | 71e3e4794b8121defc2801372f81a847cdc5f3a0 | def fib(n):
if n <= 1:
return n
f1 = fib.cache.get(n-1, fib(n - 1))
f2 = fib.cache.get(n-2, fib(n - 2))
ff = fib.cache[n] = f1 + f2
return ff
fib.cache = {}
fib(40)
for i in sorted(fib.cache.keys()):
print fib.cache[i]
|
19,659 | 03c1630d15afa5f2bc7820eb0971c1b56c134989 | /usr/share/pyshared/gluon/tests/test_routes.py |
19,660 | d74627268721153188c3b4848177f3628b8830f6 | import os
import json
from django.core.wsgi import get_wsgi_application
os.environ['DJANGO_SETTINGS_MODULE'] = "footest.settings"
application = get_wsgi_application()
from earth.models import Country
file = "static/countries.geo.json"
with open(file, "r") as caps:
dump = json.load(caps)
for doc in dump["features"]:
country = Country(country=doc["properties"]["name"],
country_id=doc["id"])
if not Country.objects.filter(country_id=country.country_id).exists():
country.save()
|
19,661 | 2214687e8435235da05a37e4d784454390d307a9 | from setuptools import setup
setup(
name='Skytap API test',
version='1.0',
description='Skytap API test',
author='Chang Lee',
author_email='changpil@gmail.com',
install_requires=[
'pytest',
'requests'
],
)
|
19,662 | a12482c0f7742e2a805aade984fcae7e82c43771 | import smtplib
Python_version = 3
if Python_version == 2:
from email.MIMEMultipart import MIMEMultipart
import email.mime.multipart
from email.MIMEText import MIMEText
from email.MIMEBase import MIMEBase
from email import encoders
from email.Utils import COMMASPACE, formatdate
from email import Encoders
else:
from email import encoders
from email.message import Message
from email.mime.audio import MIMEAudio
from email.mime.base import MIMEBase
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
COMMASPACE = ', '
# print(dir(email))
import os
import datetime
def send_Message():
# cmd = """osascript<<END
# tell application "Messages"
# send "test imessage from python" to buddy "4088967681 #" of (service 1 whose service type is iMessage)
# end tell
# END
# """
new_cmd = """ osascript -e
'tell application "Messages"
set targetBuddy to "+14088967681"
set targetService to id of 1st service whose service type = iMessage
set textMessage to 'test test again'
set theBuddy to buddy targetBuddy of service id targetService
send textMessage to theBuddy
end tell'
"""
cmd = "osascript my_message.app '{}'".format("test test test")
print(new_cmd)
os.system(cmd)
def send_email(user, pwd, recipient, subject, body):
import smtplib
FROM = user
TO = recipient if isinstance(recipient, list) else [recipient]
SUBJECT = subject
TEXT = body
# Prepare actual message
message = """From: %s\nTo: %s\nSubject: %s\n\n%s
""" % (FROM, ", ".join(TO), SUBJECT, TEXT)
try:
server = smtplib.SMTP("smtp.gmail.com", 587)
server.ehlo()
server.starttls()
server.login(user, pwd)
server.sendmail(FROM, TO, message)
server.close()
print('Successfully sent mail for {}'.format(subject))
return True
except Exception as e:
print("failed to send mail for {}".format(subject))
return False
def sendMail(to, subject, text, files=[]):
assert type(to) == list
assert type(files) == list
smtpUser = 'haobin.zheng08'
smtpPass = 'Shenghuo2014+'
msg = MIMEMultipart()
msg['From'] = smtpUser
msg['To'] = COMMASPACE.join(to)
#msg['Date'] = formatdate(localtime=True)
msg['Date'] = datetime.date.today().strftime('%Y %b %d')
msg['Subject'] = subject
msg.attach(MIMEText(text))
for file in files:
part = MIMEBase('application', "octet-stream")
part.set_payload(open(file, "rb").read())
encoders.encode_base64(part)
part.add_header('Content-Disposition', 'attachment; filename="%s"'
% os.path.basename(file))
msg.attach(part)
server = smtplib.SMTP('smtp.gmail.com:587')
server.ehlo_or_helo_if_needed()
server.starttls()
server.ehlo_or_helo_if_needed()
server.login(smtpUser, smtpPass)
server.sendmail(smtpUser, to, msg.as_string())
print('Email with file attachment is sent out!')
server.quit()
if __name__ == "__main__":
# send_Message()
# exit()
user = 'haobin.zheng08@gmail.com'
pwd = 'Shenghuo2014+'
to = ['haobin.zheng08@gmail.com', 'hbz.zheng@gmail.com']
body = "Hey, what's up?\n\n- You"
subject = 'OMG Super Important Message'
#send_email(user, pwd, to, subject, body)
#print("first method works, now tring the 2nd one with file attachment")
# a new approach
smtpUser = 'haobin.zheng08'
smtpPass = 'Shenghuo2014+'
toAdd = 'haobin.zheng08@gmail.com'
fromAdd = smtpUser
today = datetime.date.today()
subject = 'Data File 01 %s' % today.strftime('%Y %b %d')
header = 'To :' + toAdd + '\n' + 'From : ' + fromAdd + '\n' + 'Subject : ' + subject + '\n'
body = 'This is a data file on %s' % today.strftime('%Y %b %d')
#attach = 'Data on %s.csv' % today.strftime('%Y-%m-%d')
attach = 'stock_log.txt'
# print header
sendMail([toAdd], subject, body, [attach])
# user = 'haobin.zheng08'
# smtp_host = 'smtp.gmail.com'
# smtp_port = 587
# server = smtplib.SMTP()
# server.connect(smtp_host, smtp_port)
# server.ehlo()
# server.starttls()
# server.login(user, pwd)
# fromaddr = input('Send mail by the name of: ')
# tolist = input('To: ').split()
# sub = input('Subject: ')
# msg = email.MIMEMultipart.MIMEMultipart()
# msg['From'] = fromaddr
# msg['To'] = email.Utils.COMMASPACE.join(tolist)
# msg['Subject'] = sub
# msg.attach(MIMEText(input('Body: ')))
# msg.attach(MIMEText('\nsent via python', 'plain'))
# server.sendmail(user, tolist, msg.as_string())
|
19,663 | 12ab3aec4533c63c26add7b3cd7392403df8c42a | # Q1
li = [1,2,3,4,5,True,"Abc",[2,3,["hello"]]]
# 1. add a element
li.append("new_element")
print(li)
# 2. add a element at specific index
li.insert(2,"inserted at 2")
print(li)
# 3. extend with other lists
li2 = (1,34,3,4,{12,23},{1:"one",2:"two"})
li.extend(li2)
print(li)
# 4. count the frequency of elements in the list
# Fun Fact: count treats True as 1 and False as 0 and vice-versa
print(li.count(True))
# 5. reverse the list
li.reverse()
print(li)
#-------------------------------------------------------------------------------------------------------
# Q2
keys = {'a', 'e', 'i', 'o', 'u' }
# 1. Initialize dictionary with keyset
dic = dict.fromkeys(keys)
print(dic)
# 2. Initialize dictionary with keyset with a default value
dic = dict.fromkeys(keys,"Default")
print(dic)
# 3. Search for a key if a key doesn't exists return False otherwise it's value
dic.get('a','Not Found')
dic.get('s','Not Found')
print(dic)
# 4. pop out the last key:value in dictionary
dic['s']='Hello'
print("popped ",dic.popitem())
# 5. Update the dictionary using another dictionary
dic2 = {'a':'vowel','b':'consonent'}
dic.update(dic2)
print(dic)
|
19,664 | 3d39efb0f1a3e8afd96b9ef3d0adb979dbb39175 | import translitru
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'name': 'translitru',
'description': 'Transliteration for Russian language by predefined rules (GOST 7.79-2000, ISO 9-1995, U.S. State Department)',
'packages': ['translitru'],
'version': translitru.__version__,
'author': 'Vitaly Zagorovskiy',
'url': 'https://github.com/vzagorovskiy/translit-ru',
'download_url': 'https://github.com/vzagorovskiy/translit-ru/archive/master.zip',
'author_email': 'vzagorovskiy@gmail.com',
'install_requires': [],
'scripts': [],
'license': 'MIT',
'keywords': ['transliteration', 'transliterate'],
'classifiers': ['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Natural Language :: Russian',
'Topic :: Text Processing',
]
}
setup(**config)
|
19,665 | 5518e29d3e447412eb0ccfb03226fd342ba67a99 | #----------------------------------
# n10.py: Network Connection
#
import os
import copy
import numpy as np
import matplotlib.pyplot as plt
from neuron import h
# class definition
class HHneuron():
def __init__(self):
self.soma = h.Section()
self.ap_dend = h.Section()
self.soma.L = 30.0
self.soma.diam = 30.0
self.soma.nseg = 1
self.soma.insert('hh')
self.ap_dend = h.Section()
self.ap_dend.L = 500.0
self.ap_dend.diam = 2
self.ap_dend.nseg = 23
self.ap_dend.insert('hh')
self.ap_dend.gnabar_hh = 0.012
self.ap_dend.gkbar_hh = 0.0036
self.ap_dend.gl_hh = 0.00003
self.ap_dend.connect(self.soma, 1.0, 0)
# synaptic input
self.esyn = h.Exp2Syn(0.5, sec=self.ap_dend) #syn points to ap_dend,0.5
self.esyn.tau1 = 0.5
self.esyn.tau2 = 1.0
self.esyn.e = 0
# end of class HHneuron
# cells
hh_neuron = [HHneuron() for i in range(2)]
# synapse
stim = h.NetStim(0.5)
stim.interval = 20.0
stim.number = 3
stim.start = 20.0
stim.noise = 0
# connections
nclist = []
nclist.append(h.NetCon(stim,hh_neuron[0].esyn, 0.0,0,0.2))
nclist.append(h.NetCon(hh_neuron[0].soma(0.5)._ref_v,hh_neuron[1].esyn,10,1,-0.02))
tstop = 100
dt = 0.01
v_init = -65
cvode = h.CVode()
cvode.active(1)
cvode.atol(1.0e-5)
a = []
b = np.zeros(3)
h.finitialize(v_init)
while h.t < tstop:
cvode.solve() #cvode.solve() -> calculate by defalt steps | cvode.solve(h.t+dt) -> calculate by dt steps
b[0] = h.t
b[1] = hh_neuron[0].soma(0.5).v
b[2] = hh_neuron[1].soma(0.5).v
print '%lf,%lf,%lf' %(b[0],b[1],b[2])
a.append(copy.deepcopy(b))
a=np.array(a)
print 'Calculation Complete.'
for i in range(1,3):
plt.plot(a[:,0],a[:,i])
plt.xlim([0,tstop])
plt.ylim([-100,100])
plt.show()
#exit()
|
19,666 | e183727907e6283de3bbf67c74673d02c98fd2c1 | """Test for fibinachi sequence."""
CORRECT_LIST = [0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89]
# def test_fibonacci():
# """Test fibonacci list for fibonacci pattern."""
# from fib import fibonacci
# fib_list = fibonacci(0)
# print(fib_list)
# for i in range(len(fib_list)):
# print(fib_list[i], CORRECT_LIST[i])
# assert fib_list[i] is CORRECT_LIST[i]
def test_fib():
"""Test fibonacci for corect item at index."""
from fib import fibinacci
result = fibinacci(0)
assert result == CORRECT_LIST[0]
|
19,667 | ce5684a7ecc03bc3637786008cbf83afd553e8a1 | import time
from triangle_matrix import *
#import Levenshtein
# =============================================================================
# Calculate Jaccard Similarities
# =============================================================================
def calcJaccard(docsAsShingleSets):
#print("\nCalculating Jaccard Similarities..."
numDocs = len(docsAsShingleSets)
JSim = createTriangleMatrixNumpy(numDocs)
t0 = time.time() # Time the calculation.
# For every document pair...
for i in range(0, numDocs):
# Print progress every 100 documents.
#if (i % 100) == 0:
# print(" (" + str(i) + " / " + str(numDocs) + ")")
s1 = docsAsShingleSets[i]
for j in range(i + 1, numDocs):
# Retrieve the set of shingles for document j.
s2 = docsAsShingleSets[j]
# Calculate and store the actual Jaccard similarity.
JSim[getTriangleIndex(i, j, numDocs)] =(len(s1.intersection(s2)) / float(len(s1.union(s2))))
## TEMP
# print('1 - Jaccard(' + str(i) + ',' + str(j) + ')=' + str(JSim[getTriangleIndex(i, j, numDocs)]))
# Calculate the elapsed time (in seconds)
elapsed = (time.time() - t0)
#print("\nCalculating all Jaccard Similarities took %.2fsec" % elapsed)
return JSim
# =============================================================================
# Calculate Levenstien Distance
# =============================================================================
def calcLev(docs):
print("\nCalculating Levenshtein Similarities...")
numDocs = len(docs)
LevSim = createTriangleMatrixNumpy(numDocs)
t0 = time.time() # Time the calculation.
for i in range(0, numDocs):
# Print progress every 100 documents.
if (i % 100) == 0:
print(" (" + str(i) + " / " + str(numDocs) + ")")
s1 = docs[i][1]
for j in range(i + 1, numDocs):
s2 = docs[j][1]
# Calculate and store the normalized levenstien similarity.
LevSim[getTriangleIndex(i, j, numDocs)] = Levenshtein.distance(s1,s2) / float(max(len(s1), len(s2)))
##TEMP
# print('NormED(' + str(i) + ',' + str(j) + ')=' + str(LevSim[getTriangleIndex(i, j, numDocs)]) + ', Lengths:'+ str(len(s1)) + ',' + str(len(s2)))
# Calculate the elapsed time (in seconds)
elapsed = (time.time() - t0)
print("\nCalculating all Levenshtein Similarities took %.2fsec" % elapsed)
return LevSim
def lev(a, b):
if not a: return len(b)
if not b: return len(a)
return min(lev(a[1:], b[1:]) + (a[0] != b[0]), lev(a[1:], b) + 1, lev(a, b[1:]) + 1)
|
19,668 | 5c38aadad216df89f3a353d80b8cf8c86549354d | alist = [10, 20, 23, 26, 27, 35, 38, 41, 46, 49, 54, 56, 64, 70, 81, 87, 88, 90, 92, 96, 98]
temp = None
def binary_search(left: int, right: int, key: int) -> int:
global temp
if len(alist) == 1:
if alist[left] == key:
return alist[left]
else:
return 0
else:
mid = (left + right) // 2
print("mid ", mid)
if mid == temp or mid > len(alist) - 1: # Condition for preventing the deep recursion.
return 0
else:
temp = mid
if alist[mid] == key:
return alist[mid]
elif key < alist[mid]:
return binary_search(left, mid - 1, key)
else:
return binary_search(mid + 1, right, key)
"""
This segment for dynamic of the program
"""
# while True:
# a = input("Enter the element of the list = ")
# if a == "":
# break
# else:
# alist.append(int(a))
alist.sort()
print(alist)
print(len(alist))
search_key = int(input("Enter the number you want to search = "))
result = binary_search(0, len(alist), search_key)
print(result)
|
19,669 | 7affe223bc17936a3ee210f9b78bd3dcc3d4ec66 | from django.contrib import admin
from .models import History,Project,Users
# Register your models here.
class ProjectAdmin(admin.ModelAdmin):
list_display=("release_date",)
search_field=("release_date",)
date_hierarchy="release_date"
fields=("project_name","person","release_date")
#raw_id_fields=("project_name",)
class HistoryAdmin(admin.ModelAdmin):
raw_id_fields=("project_name",)
admin.site.register(History,HistoryAdmin)
#admin.site.register(Project)
admin.site.register(Project,ProjectAdmin)
admin.site.register(Users)
|
19,670 | 4399717262ffbd79852e568d8a2dd5416d8cd54a | import time
import pygame
# ้ณไน่ทฏๅพ
filePath = r"D:\PythonCode\StudyCode\004_่ชๅจๅๅๅ
ฌ\Kalimba.mp3"
pygame.mixer.init()
track = pygame.mixer.music.load(filePath)
pygame.mixer.music.play()
time.sleep(5)
pygame.mixer.music.pause()
time.sleep(5)
pygame.mixer.music.unpause()
time.sleep(5)
pygame.mixer.music.stop() |
19,671 | becc45f420a30bd9dcc896334686ef13710a1ffd | from abc import ABC, abstractmethod
import numpy as np
from Plotter.Plotter import Plotter
import math
class ModulatorBase(ABC):
orders = (4, 12, )
phaseOffsets = (0, 0, )
amplitudes = (1, 2, )
carrierFreq = 1000
sampleFreq = 4000
orderCountMethod = "sum"
@abstractmethod
# Zwraca tablicฤ postaci wykลadniczych dla wszystkich bodรณw
def modulate(self, bits):
pass
@abstractmethod
# Zwraca tablicฤ bitรณw
def demodulate(self, signal):
pass
def getSamplesPerSymbol(self):
return np.int(np.round(self.sampleFreq/self.carrierFreq))
def getAlignedBits(self, bits):
if self.orderCountMethod == "sum":
m = math.log2(sum(self.orders))
elif self.orderCountMethod == "mul":
m = math.log2(np.prod(self.orders))
if bits.size % m != 0:
bits = np.resize(bits, (np.int(bits.size + m-(bits.size % m))))
return bits
def getSignal(self, bits):
bits = self.getAlignedBits(bits)
modulated = self.modulate(bits)
modulatedRepeated = np.repeat(modulated, self.getSamplesPerSymbol())
carrierLinspace = np.linspace(
0, 2*np.pi*modulated.size, modulatedRepeated.size, endpoint=False)
return (modulatedRepeated.real * np.cos(carrierLinspace)) + (modulatedRepeated.imag * np.sin(carrierLinspace))
def getSymbolsFromSignal(self, signal):
signal = signal.reshape((-1, self.getSamplesPerSymbol()))
signal = np.fft.fft(signal, axis=1)
signal = np.divide(signal, self.getSamplesPerSymbol()/2)
signal.imag *= -1 # nie wiem czemu ale dziaลa
return signal[:, 1]
|
19,672 | 12bea740b097781785f3596883d243204220c7e3 | #encoding=utf8
import subprocess
def getDom(url):
cmd = r'C:\Users\t-jizh\Downloads\phantomjs-2.1.1-windows\bin\phantomjs.exe constructionDom.js "%s"'%url
print "cmd:", cmd
stdout,stderr = subprocess.Popen(cmd,shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE).communicate()
print stderr
return stdout
targetUrl = 'www.yahoo.com'
getDom(targetUrl) |
19,673 | 8198815fbe350cea634ae3d54ebcf531ba97ea86 |
from django.urls import path, re_path
from django.views.generic.base import TemplateView
from . import views
app_name = 'public'
urlpatterns = [
# templates
path('', views.mainPage, name='mainPage'),
path('index', views.index, name='index'),
path('charts', views.charts, name='charts'),
path('elements', views.elements, name='elements'),
path('icons', views.icons, name='icons'),
path('notifications', views.notifications, name='notifications'),
path('page-lockscreen', views.pageLockscreen, name='page-lockscreen'),
path('page-login', views.pageLogin, name='page-login'),
path('page-profile', views.pageProfile, name='page-profile'),
path('panels', views.panels, name='panels'),
path('tables', views.tables, name='tables'),
path('typography', views.typography, name='typography'),
# work on
path('mainPage', views.mainPage, name='mainPage'),
path('member1', views.member1, name='member1'),
path('member2', views.member2, name='member2'),
path('member3', views.member3, name='member3'),
path('equipManage', views.equipManage, name='equipManage'),
path('systemManage', views.systemManage, name='systemManage'),
] |
19,674 | 9aee6d6c2ac68fb7f506125bbd9dfda9a9efe5ab | # Module 5 Homework
# dynomite_dicts
# Prof. James Mertz
# Josue Tiguila Jr.
# Dictionary name Pokedex
pokedex = {}
pokedex['Venosaur'] = 'Grass', 'Poisen'
pokedex['Charizard'] = 'Fire', 'Flying'
pokedex['Blastoise'] = 'Water'
# Show contents of dictionary
print ("Dictionary with Blastoise")
print (pokedex)
#show contents without Blastoise
print ("Dictionary without Blastoise")
pokedex.pop('Blastoise')
print (pokedex) |
19,675 | 5ae7ea44fca6db84b8becf3e71a66f04a82e0eec | file=open('in','r')
wfile=open('output01','w')
number_of_cases= int(file.readline())
for i in xrange(number_of_cases):
fulline=file.readline().split()
fulstr1=fulline[0]
n=int(fulline[1])
nvaleu=0
# print fulstr1
for j in xrange(len(fulstr1)):
substr=fulstr1[j:]
# print substr
count=0
if(len(substr)>=n):
for k in substr:
# print k
if(k!='a' and k!='e' and k!='i' and k!='o' and k!='u'):
count+=1
else:
count=0
if (count>=n):
# print substr
# print count
nvaleu+=1
break
for l in xrange(len(fulstr1)):
fulstr=fulstr1[0:-l]
# print fulstr
for j in xrange(len(fulstr)):
substr=fulstr[j:]
# print substr
count=0
if(len(substr)>=n):
for k in substr:
# print k
if(k!='a' and k!='e' and k!='i' and k!='o' and k!='u'):
count+=1
else:
count=0
if (count>=n):
# print substr
# print count
nvaleu+=1
break
wfile.write('Case #'+str(i+1)+':'+str(nvaleu)+'\n')
|
19,676 | dce2d866eddccf6e020a5d1db7c4c8c2322f8acf | import socket
import threading
import time
import os
import string
def objectify(str):
return [char for char in str]
def deleteStrs(str, *args, **kwargs):
fromStart = kwargs.get('start', None)
fromEnd = kwargs.get('end', None)
str = objectify(str)
if fromStart:
for x in range(0, fromStart + 1):
str.pop(0)
if fromEnd:
for x in range(0, fromEnd + 1):
str.pop(-1)
returnString = ''
for letter in str:
returnString += letter
return returnString
def searchFor(searchObject, toSearch, *args, **kwargs):
needIndex = kwargs.get('index', False)
needFind = kwargs.get('isfound', True)
if needIndex:
needFind = False
if needIndex:
returnNumber = 0
for item in toSearch:
if item == searchObject:
return returnNumber
else:
returnNumber += 1
elif needFind:
for item in toSearch:
if item == searchObject:
return True
return False
class server:
def __init__(self, ip, knownIPs, knownDomains, domain, *args, **kwargs):
self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server.bind((ip, 5050))
self.ip = ip
self.domain = domain
self.databaseIPs = knownIPs
self.databaseDomains = knownDomains
self.MessageDatatbase = kwargs.get('MessageDatatbase', None)
self.PORT = 5050
self.HEADER = 1024
self.BUFFER = 1024
self.DISCONNECT = "!disconnect!"
self.FORMAT = 'utf-8'
self.recognition = '!recognition!'
self.domainQueriesIps = [
]
self.domainQueriesPort = [
]
self.fileDir = []
self.files = []
def handle_client(self, conn, addr):
recvThread = threading.Thread(target = self.recv, args = (conn, addr))
recvThread.start()
def start(self):
print(f"[LISTENING] Server is listening on {self.ip}")
self.server.listen()
connected = True
reloadFilesThread = threading.Thread(target = self.reloadFiles)
reloadFilesThread.start()
while connected:
conn, addr = self.server.accept()
handleClientThread = threading.Thread(target = self.handle_client, args = (conn, addr))
handleClientThread.start()
def reloadFiles(self):
for (dirpath, filenames) in os.walk('./contents'):
for d in filenames:
dirname = os.path.join(dirpath, d)
if not(searchFor(dirname, self.files)) and not(dirname == ""):
self.files.append(d)
self.fileDir.append(dirname)
def recv(self, conn, addr):
connected = True
while connected:
header = self.server.recv(self.HEADER).decode(self.FORMAT)
if header:
header = int(header)
msg = self.server.recv(header).decode(self.FORMAT)
if msg.startswith('S.'):
pass
elif msg.startswith('DQ. S'):
self.domainQueriesIps.append(addr)
self.domainQueriesPort.append(conn)
elif msg.startswith('DQ. C'):
del self.domainQueriesIps[searchFor(addr, self.domainQueriesIps)]
del self.domainQueriesPort[searchFor(conn, self.domainQueriesPort)]
self.send(self.recognition, 'server')
def fileSearch(self, search, domain):
if self.domain == domain:
filetranslate = str.maketrans('.', '/')
filetranslate += '.txt'
try:
f = search.translate(filetranslate)
if f.read():
try:
res = []
attr =[]
keys = []
exec(f.read())
if len(res) > 0:
self.send('Res.', 'server')
for file in res:
self.sendOtherFile(file, 'server')
except:
pass
except:
pass
else:
self.send('FN', 'server')
def recvFile(self, serverObject):
f = ''
exec(f'f = self.{serverObject}.recv(self.HEADER).decode(self.FORMAT)')
if f == 'F.':
filename = ''
filesize = ''
exec(f'filename = self.{serverObject}.recv(self.HEADER).decode(self.FORMAT)')
exec(f"filesize = self.{serverObject}.recv(self.HEADER).decode(self.FORMAT)")
else:
return False
def sendFile(self, fileLoc, serverObject):
needed = True
while needed:
if fileLoc.endswith('.txt'):
c = open(fileLoc, 'r')
fileContents = c.read()
fileContents = objectify(fileContents)
returnString = ''
returnArray = []
for letter in fileContents:
if len(returnString.encode(self.FORMAT)) == self.HEADER:
returnArray.append(returnString.encode(self.FORMAT))
returnString = ''
else:
returnString += letter
self.sendBatch(returnArray, serverObject, searchProtocol = 'FT.')
needed = False
else:
self.sendOtherFile(fileLoc, 'server', searchProtocol = 'FO.')
def sendOtherFile(self, fileLoc, serverObject, *args, **kwargs):
searchProtocol = kwargs.get('searchProtocol', '')
filesize = os.path.getsize(fileLoc)
exec(f"self.send('F.', serverObject)")
exec('self.send(' + searchProtocol + fileLoc + ',' + serverObject + ')')
exec(f'self.send({searchProtocol}, {filesize}, {serverObject})')
with open(fileLoc, 'rb') as f:
while True:
bytesRead = f.read(self.BUFFER)
if not(bytesRead > 0):
break
exec(f"self.send({bytesRead}, {serverObject}, encoded = True)")
exec(f"self.send('F. C', {serverObject})")
def sendTxtFile(self, file):
str = objectify(file)
returnStrObject = []
strToAppend = ''
strTest = ''
strTestLen = 0
for letter in str:
strToAppend += letter
strTest = strToAppend.encode(self.FORMAT)
strTestLen = len(strTest)
if strTestLen == self.HEADER:
returnStrObject.append(strToAppend)
return returnStrObject
def sendBatch(self, object, socketObject, *args, **kwargs):
code = kwargs.get('searchProtocol', '')
for batch in object:
send = code.encode(self.FORMAT) + batch
exec(f"self.send({send}, {socketObject}, encoded = True)")
def rcv(self, *args, **kwargs):
header = ''
server = kwargs.get('serverObject', None)
conn = kwargs.get('conn', None)
if server:
exec('header = self.' + server + '.recv(self.HEADER).decode(self.FORMAT)')
elif conn:
exec(f"header = {conn}.recv(self.HEADER).decode(self.FORMAT)")
else:
return False
if header:
header = int(header)
msg = ''
if server:
exec('msg = self.' + server + '.recv(header).decode(self.FORMAT)')
elif conn:
exec(f"msg = {conn}.recv(header).decode(self.FORMAT)")
return msg
else:
pass
def send(self, msg, serverObject, *args, **kwargs):
encoded = kwargs.get('encoded', False)
code = kwargs.get('protocol', False)
if encoded:
if code:
msg = code + msg
message = msg.encode(self.FORMAT)
else:
message = msg.encode(self.FORMAT)
message_length = len(message)
send_length = message_length.encode(self.FORMAT)
send_length += b' ' * (self.HEADER - message_length)
exec('self.' + serverObject + '.send(' + send_length + ')')
exec('self.' + serverObject + '.send(' + message + ')')
|
19,677 | 96e3a636f83843fc71b1beb4beec1095c7767b2b | from kivymd.app import MDApp
from kivy import Config
from kivy.lang import Builder
Config.set('graphics', 'multisamples', '0')
Config.set('graphics', 'height', 500)
Config.set('graphics', 'width', 500)
Config.write()
class Login(MDApp):
def build(self):
self.screen = Builder.load_file('login.kv')
return self.screen
def __init__(self, **kwargs):
super(Login, self).__init__(**kwargs)
def main():
Login().run()
if __name__ == '__main__':
main() |
19,678 | f5232adaad5450eb1edaebf03dd649b2ec34abc8 | # def flight_report(estimated_time, scheduled_time):
# if estimated_time > scheduled_time:
# time_diff = estimated_time - scheduled_time
# time_diff = time_diff * 60
# print("Flight will arrive", int(time_diff), "min early!")
# # def flight_report(estimated_time, scheduled_time):
# elif estimated_time < scheduled_time:
# time_diff = scheduled_time - estimated_time
# time_diff = time_diff * 60
# print("Flight will be delayed", int(time_diff), "min")
# # def flight_report(estimated_time, scheduled_time):
# else:
# print("Flight will be there on time, No delayed!")
def flight_report(estimated_time, scheduled_time):
if estimated_time > scheduled_time:
time_diff = estimated_time - scheduled_time
time_diff = time_diff * 60
return print("Flight will arrive", int(time_diff), "min early!")
elif estimated_time < scheduled_time:
time_diff = estimated_time - scheduled_time
time_diff = time_diff * 60
print("Flight will be delayed", int(time_diff), "min")
else:
print("Hello World!") |
19,679 | 5da0ca8171826268e6afddab683e13db5958e378 | class Solution:
def subsetSum(self, arr, sum, n) -> bool:
if sum == 0:
return True
if n == 0 and sum != 0:
return False
if arr[n-1] > sum:
return self.subsetSum(arr, n-1, sum)
return self.subsetSum(arr, sum - arr[n-1], n-1) or self.subsetSum(arr, sum, n-1)
if __name__ == '__main__':
arr = [3, 34, 4, 12, 5, 2]
sum = 9
sol = Solution()
print(sol.subsetSum(arr, sum, len(arr)))
|
19,680 | 4259082b10d5b86bbe8c41ab4228571a4ce38874 | class MenuOption():
"""
Option for a menu.
.. attribute:: text
The text to display to the user.
.. attribute:: function
The function to execute.
"""
def __init__(self, text, function):
"""
:param text: Text to display.
:type text: str.
:param function: Function to execute on selection.
:type function: func()
"""
self.text = text
self.function = function
class Menu():
"""
Simple wrapper for an interactive menu.
.. attribute:: options
A list of :class:`ui.MenuOption`s.
"""
def __init__(self, options):
"""
:param options: Options for the menu.
:type options: [:class:`ui.MenuOption`]
"""
self.options = options
def display(self):
"""
A while (True) wrapper for displaying options.
Lets the user choose one.
"""
while (True):
self.print()
choice = self.get_choice()
if (choice == len(self.options)):
break
else:
self.options[choice].function()
def get_choice(self):
"""
Gets a choice from the user and checks it for validity.
"""
number = -1
while (number < 0) or (number > len(self.options)):
number = int(input('Enter your menu choice: '))
return number
def print(self):
"""
Displays the menu.
"""
for idx, item in enumerate(self.options):
print(idx, '-', item.text)
print(len(self.options), '- Exit')
|
19,681 | d48a8a52bf4ecb6efcb84bfe4b9eb288a15497f4 | from common.element_info_utils import ElementUtils
from common.basepage import BasePage
from common.browser import Browser
from common.login_base import LoginBase
from common.read_config_utils import Config
class MainPage(BasePage):
def __init__(self,driver):
super().__init__(driver)
element = ElementUtils('main_suite','main').get_elements_info()
self.bug_menu_click = element['bug_click']
self.user_info = element['user_info']
self.quit_login = element['quit_login']
def bug_menu(self):
self.click(self.bug_menu_click)
def user_info_click(self):
self.click(self.user_info)
def quit_login_click(self):
self.click(self.quit_login)
if __name__=='__main__':
driver = Browser().get_driver()
driver.get(Config.get_config_url)
main = LoginBase(driver).default_login()
m = MainPage(driver)
m.timeout(2)
m.user_info_click()
m.quit_login_click() |
19,682 | 279e87fa374070320c6a8f11e43b905745a7a96b | # -*- coding: utf-8 -*-
"""This module provides functions for transformations between px4/airframe and
gazebo/baselink coordinate systems.
Transformations are python adapations of the c++ sourcecode of the mavros
package found at https://github.com/mavlink/mavros
Example:
Either::
orientation_px4 = orientation_gazebo_to_px4(orientation_gazebo)
or::
orientation_gazebo = orientation_px4_to_gazebo(orientation_px4)
"""
import math
import tf.transformations
import pyquaternion
import numpy
__author__ = "Thies Lennart Alff"
NED_ENU_Q = tf.transformations.quaternion_from_euler(math.pi, 0.0,
math.pi / 2.0)
# numpy.roll is needed since tf's order is xyzw and pyquaternion's order is wxyz
NED_ENU_Q = pyquaternion.Quaternion(numpy.roll(NED_ENU_Q, 1))
AIRCRAFT_BASELINK_Q = tf.transformations.quaternion_from_euler(math.pi, 0, 0)
# numpy.roll is needed since tf's order is xyzw and pyquaternion's order is wxyz
AIRCRAFT_BASELINK_Q = pyquaternion.Quaternion(numpy.roll(
AIRCRAFT_BASELINK_Q, 1))
def quaternion_from_rpy(roll, pitch, yaw):
"""Get quaternion from roll, pitch, yaw.
Args:
roll (float): roll angle in radians
pitch (float): ptich angle in radians
yaw (float): yaw angle in radians
Returns:
[pyquaternion.Quaternion]: Quaternion object
"""
quaternion = tf.transformations.quaternion_from_euler(roll, pitch, yaw)
return pyquaternion.Quaternion(numpy.roll(quaternion, 1))
def rpy_from_quaternion(quaternion):
"""Get the roll-pitch-yaw representation of a twist given by quaternions.
Args:
quaternion (pyquaternion.Quaternion): Twist
Returns:
[tuple]: Tuple of roll, pitch and yaw angles.
"""
(yaw, pitch, roll) = quaternion.yaw_pitch_roll
return (roll, pitch, yaw)
def _transform_orientation(transform, orientation_q):
if transform == "body_frames":
return orientation_q * AIRCRAFT_BASELINK_Q
elif transform == "global_frames":
return NED_ENU_Q * orientation_q
else:
return None
def tf_baselink_to_aircraft(orientation_q):
"""Transforms from baselink to aircraft frame.
Args:
orientation_q (pyquaternion.Quaternion): Orientation in baselink frame.
Returns:
pyquaternion.Quaternion: Orientation in aircraft frame.
"""
return _transform_orientation("body_frames", orientation_q)
def tf_aircraft_to_baselink(orientation_q):
"""Transforms from aircraft to baselink frame.
Args:
orientation_q (pyquaternion.Quaternion): Orientation in aircraft frame.
Returns:
pyquaternion.Quaternion: Orientation in baselink frame.
"""
return _transform_orientation("body_frames", orientation_q)
def tf_ned_to_enu(orientation_q):
"""Transforms from NED to ENU as reference frame for the orientation.
Args:
orientation_q (pyquaternion.Quaternion): Orientation with NED as
reference frame.
Returns:
pyquaternion.Quaternion: Orientation with ENU as reference frame.
"""
return _transform_orientation("global_frames", orientation_q)
def tf_enu_to_ned(orientation_q):
"""Transform from ENU to NED as reference frame for the orientation.
Args:
orientation_q (pyquaternion.Quaternion): Orientation with ENU as
reference frame.
Returns:
pyquaternion.Quaternion: Orientation with NED as reference frame.
"""
return _transform_orientation("global_frames", orientation_q)
def orientation_gazebo_to_px4(orientation_q):
""" Transformation from gazebo frame to px4 frame.
Twist of the baselink to the static ENU frame <-> Twist of the airframe to
the static NED frame
Args:
orientation_q (pyquaternion.Quaternion): Orientation of
the baselink in respect to the ENU frame.
Returns:
pyquaternion.Quaternion: Orientation of the airframe in respect to the
NED frame.
"""
return tf_enu_to_ned(tf_baselink_to_aircraft(orientation_q))
def orientation_px4_to_gazebo(orientation_q):
""" Transformatio from px4 frame to gazebo frame.
Twist of the airframe to the static NED frame <-> Twist of the baselink to
the static ENU frame.
Args:
orientation_q (pyquaternion.Quaternion): Orientation of the
airframe in respect to the NED frame.
Returns:
pyquaternion.Quaternion: Orientation of the baselink in respect to the
ENU frame.
"""
return tf_aircraft_to_baselink(tf_ned_to_enu(orientation_q))
def position_gazebo_to_px4(position_x, position_y, position_z):
"""Transforms position from gazebo's ENU frame to px4's NED frame.
Args:
position_x (float): x-coordinate in ENU frame.
position_y (float): y-coordinate in ENU frame.
position_z (float): z-coordinate in ENU frame.
Returns:
tuple: Tuple of the transformed position.
"""
return (position_y, position_x, -position_z)
def position_px4_to_gazebo(position_x, position_y, position_z):
"""Transforms position from px4's NED frame to gazebo's ENU frame.
Args:
position_x (float): x-coordinate in NED frame.
position_y (float): y-coordinate in NED frame.
position_z (float): z-coordinate in NED frame.
Returns:
tuple: Tuple of the transformed position.
"""
return (position_y, position_x, -position_z)
if __name__ == "__main__":
pass
|
19,683 | 921a91a1b92286b0381866d37b718a1a523ce571 | # Tencent is pleased to support the open source community by making Angel available.
#
# Copyright (C) 2017-2018 THL A29 Limited, a Tencent company. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
#
# https://opensource.org/licenses/Apache-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and limitations under
# the License.
#
import torch
@torch.jit.script
def parse_feat(x, batch_ids, field_ids, field_num, encode):
# type: (Tensor, Tensor, Tensor, int, str) -> Tensor
k = x.size(1)# embedding_dim
if encode == "multi-hot":
b = torch._unique(batch_ids, sorted=False)[0].size(0)# batchsize
f = field_num
t_index = [batch_ids.view(-1).to(torch.long), field_ids.view(-1).to(torch.long)]
e_transpose = x.view(-1, k).transpose(0, 1)
count = torch.ones(x.size(0))
hs = []
for i in range(k):
h = torch.zeros(b, f)
c = torch.zeros(b, f)
h.index_put_(t_index, e_transpose[i], True)
c.index_put_(t_index, count, True) # sum
h = h / c.clamp(min=1) # avg
hs.append(h.view(-1, 1))
emb_cat = torch.cat(hs, dim=1)
output = emb_cat.view(b, -1)
elif encode == "one-hot":
output = x.view(-1, field_num * k)
else:
output = x
return output
|
19,684 | 207afa23403c27ce2505a8bd3355669e29f50ad8 | # Databricks notebook source
# MAGIC %md ## 301 - Ingesting CIFAR Images into Spark DataFrames and Evaluating Pre-Trained CNTK Models
# COMMAND ----------
from mmlspark.cntk import CNTKModel
from mmlspark.downloader import ModelDownloader
from pyspark.sql.functions import udf
from pyspark.sql.types import IntegerType
from os.path import abspath
# COMMAND ----------
# MAGIC %md Set some paths.
# COMMAND ----------
cdnURL = "https://mmlspark.azureedge.net/datasets"
# Please note that this is a copy of the CIFAR10 dataset originally found here:
# http://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz
imagesWithLabels = spark.read.parquet("wasbs://publicwasb@mmlspark.blob.core.windows.net/CIFAR10_test.parquet")
# COMMAND ----------
modelName = "ConvNet"
modelDir = "dbfs:///models/"
# COMMAND ----------
# MAGIC %md Get the model
# COMMAND ----------
d = ModelDownloader(spark, modelDir)
model = d.downloadByName(modelName)
# COMMAND ----------
# MAGIC %md Evaluate CNTK model.
# COMMAND ----------
import time
start = time.time()
# Use CNTK model to get log probabilities
cntkModel = CNTKModel().setInputCol("images").setOutputCol("output") \
.setModelLocation(model.uri).setOutputNode("z")
scoredImages = cntkModel.transform(imagesWithLabels)
# Transform the log probabilities to predictions
def argmax(x): return max(enumerate(x),key=lambda p: p[1])[0]
argmaxUDF = udf(argmax, IntegerType())
imagePredictions = scoredImages.withColumn("predictions", argmaxUDF("output")) \
.select("predictions", "labels")
numRows = imagePredictions.count()
end = time.time()
print("classifying {} images took {} seconds".format(numRows,end-start))
# COMMAND ----------
# MAGIC %md Plot confusion matrix.
# COMMAND ----------
imagePredictions = imagePredictions.toPandas()
y, y_hat = imagePredictions["labels"], imagePredictions["predictions"]
# COMMAND ----------
import matplotlib.pyplot as plt
import numpy as np
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(y, y_hat)
labels = ["airplane", "automobile", "bird", "cat", "deer", "dog", "frog",
"horse", "ship", "truck"]
plt.imshow(cm, interpolation="nearest", cmap=plt.cm.Blues)
plt.colorbar()
tick_marks = np.arange(len(labels))
plt.xticks(tick_marks, labels, rotation=90)
plt.yticks(tick_marks, labels)
plt.xlabel("Predicted label")
plt.ylabel("True Label")
display(plt.show()) |
19,685 | be5a9635c5667c04dde603e8d67c87b82f7b5394 | # ์ฐ์ ์์ ํ
# '์ฐ์ ์์๊ฐ ๊ฐ์ฅ ๋์ ๋ฐ์ดํฐ๋ฅผ ๊ฐ์ฅ ๋จผ์ ์ญ์ 'ํ๋ ์๋ฃ๊ตฌ์กฐ๋ค
# EX) ์ฌ๋ฌ ๊ฐ์ ๋ฌผ๊ฑด ๋ฐ์ดํฐ๋ฅผ ์๋ฃ๊ตฌ์กฐ์ ๋ฃ์๋ค๊ฐ ๊ฐ์น๊ฐ ๋์ ๋ฌผ๊ฑด ๋ฐ์ดํฐ๋ถํฐ ๊บผ๋ด์ ํ์ธํ๋ ๊ฒฝ์ฐ์ ์ฐ์ ์์ ํ
# ์๋ฃ๊ตฌ์กฐ -์ถ์ถ๋๋ ๋ฐ์ดํฐ-
# Stack -> ๊ฐ์ฅ ๋์ค์ ์ฝ์
๋ ๋ฐ์ดํฐ
# Queue -> ๊ฐ์ฅ ๋จผ์ ์ฝ์
๋ ๋ฐ์ดํฐ
# Priority Queue -> ๊ฐ์ฅ ์ด์ ์์๊ฐ ๋์ ๋ฐ์ดํฐ
"""
Heap(ํ)
---> ์ฐ์ ์์ ํ๋ฅผ ๊ตฌํํ๊ธฐ ์ํด ์ฌ์ฉํ๋ ์๋ฃ๊ตฌ์กฐ ์ค ํ๋
Min Heap( ๊ฐ์ด ๋ฎ์ ๋ฐ์ดํฐ ๋ถํฐ ๋จผ์ ) / Max Heap ( ๊ฐ์ด ๋์ ๋ฐ์ดํฐ ๋จผ์ ) ---> ์ถ์ถ
-> ๋ค ์ต์คํธ๋ผ ์ต๋จ ๊ฒฝ๋ก ์๊ณ ๋ฆฌ์ฆ์ ํฌํจํด ๋ค์ํ ์๊ณ ๋ฆฌ์ฆ์์ ์ฌ์ฉ
์ฐ์ ์์ ํ ๊ตฌํ ๋ฐฉ์ ์ฝ์
์๊ฐ ์ญ์ ์๊ฐ
๋ฆฌ์คํธ O(1) O(N)
ํ(Heap) O(logN) O(lonN)
"""
import heapq # minheap์ผ๋ก , ์ฐ์ ์์ ๋ฎ์ ๊ฒ ๋ถํฐ ์ฐจ๋ก๋๋ก --> ์ค๋ฆ ์ฐจ์!!
# heappush ( ํน์ ๋ฆฌ์คํธ์, ๋ฐ์ดํฐ ๋ฃ๊ธฐ)
# heappop (๋ฆฌ์คํธ ) ---> ๋ฐ์ดํฐ๋ฅผ ๊บผ๋ด์
def heapsort(iterable):
h = []
result = []
# ๋ชจ๋ ์์๋ฅผ ์ฐจ๋ก๋๋ก ํ์ ์ฝ์
for value in iterable:
heapq.heappush(h, value)
# ํ์ ์ฝ์
๋ ๋ชจ๋ ์์๋ฅผ ์ฐจ๋ก๋๋ก ๊บผ๋ด์ด ๋ด๊ธฐ
for i in range(len(h)):
result.append(heapq.heappop(h))
return result
result = heapsort([1, 3, 5, 7, 9, 2, 4, 6, 8, 0])
print(result)
# ์ต๋ ํ์ ์ฐ๋ ค๋ฉด -value, -heapq ์์ ๋ฐ๋๋ก! |
19,686 | 8b0ed3db7a274ab9dad79c35562d2d090ed6c651 | import flask, os, sqlite3
from flask import render_template, request
from flask import send_from_directory
from werkzeug.utils import secure_filename
if not os.path.isfile('db.sqlite3'):
db = sqlite3.connect('db.sqlite3')
db.execute('CREATE TABLE photos(photo TEXT)')
db.commit()
db.close()
app = flask.Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def home():
if request.method == 'POST' and \
request.files and 'photo' in request.files:
photo = request.files['photo']
filename = secure_filename(photo.filename)
path = os.path.join('uploads', filename)
photo.save(path)
db = sqlite3.connect('db.sqlite3')
db.execute('INSERT INTO photos(photo) VALUES (?)', (filename,))
db.commit()
db.close()
return render_template('form_with_file_upload.html')
@app.route('/view')
def view():
db = sqlite3.connect('db.sqlite3')
cur = db.execute('SELECT photo FROM photos')
photos = []
for row in cur:
photos.append(row[0])
db.close()
return render_template('view_file_uploads.html', photos=photos)
@app.route('/photos/<filename>')
def get_file(filename):
return send_from_directory('uploads', filename)
if __name__ == '__main__':
app.run() |
19,687 | f9187aaf57c2b08fe1aac08268e21917d1c28048 | from django.shortcuts import render
from django.views.generic import TemplateView
from .models import SkyPost
class SkyIndexView(TemplateView):
template_name = 'sky_news/index.html'
def get_context_data(self, **kwargs):
context = super(SkyIndexView, self).get_context_data()
context['posts'] = SkyPost.objects.all()
return context
class SkyPostView(TemplateView):
template_name = 'sky_news/post.html'
def get_context_data(self, post_pk, **kwargs):
context = super(SkyPostView, self).get_context_data()
context['post'] = SkyPost.objects.get(pk=post_pk)
return context
|
19,688 | 722c1d97d8f9d27e6feb329e1f53a580f6decaf6 | from fastapi.testclient import TestClient
from app.app import app
client = TestClient(app, raise_server_exceptions=False)
def test_legit_date():
response = client.get("/orders/expired/20210303")
assert response.status_code == 200
def test_sql_injection():
response = client.get("/orders/expired/20210303 or (1=1)")
assert response.status_code == 500 |
19,689 | 0e25baae6a79b059ffe4880f39b34b2d222c8eb0 | #!/usr/bin/python
# -*- coding: utf8 -*-
# cp936
#===============================================================================
# ไฝ่
๏ผfasiondog
# ๅๅฒ๏ผ1๏ผ20150314, Added by fasiondog
#===============================================================================
from ._trade_instance import *
|
19,690 | 063a7053c743f4ff2841d71b7d43e388b5e4f6e1 | import numpy as np
import cv2
import math
from matplotlib import pyplot as plt
orgimg= cv2.imread('pic.jpg',0)
img=cv2.copyMakeBorder(orgimg,1,1,1,1,cv2.BORDER_REPLICATE)
#### Step 1: Create an array H of length G initialzed with 0
H=np.zeros((1,256))
Hc=np.zeros((1,256))
F=np.zeros((1,256))
T=np.zeros((1,256))
#### Step 2: Form the image histogram
for x in range(0,256):
for y in range(0,256):
gp= img[x][y]
H[0][gp] = H[0][gp] + 1
cv2.imshow('Original Image',img)
#Htp=np.transpose(H)
plt.figure('H: Image Histogram')
plt.plot(np.transpose(H))
plt.show()
#### Step 3: Form the cummulative image histogram
Hc[0][0]= H[0][0]
for z in range(1,256):
Hc[0][z]=Hc[0][z-1] + H[0][z]
#Hctp=np.transpose(Hc)
plt.figure('Hc: Cummulative Image Histogram')
plt.plot(np.transpose(Hc))
plt.show()
#### Step 4: Set Transformation function
for a in range(1,256):
T[0][a]= round((255 * Hc[0][a])/(256*256))
#Ttp=np.transpose(T)
plt.figure('T: Transformation Function')
plt.plot(np.transpose(T))
plt.show()
####6 Step 5: Rescan image and write output image with gray levels
for i in range(1,256):
for j in range(1,256):
x= img[i][j]
img[i][j] = T[0][x]
cv2.imshow('Enhanced Image', img)
for i in range(1,256):
for j in range(1,256):
b= T[0][j]
F[0][b] = F[0][b] + 1
#Ftp=np.transpose(F)
plt.figure('Histogram of enhanced Image')
plt.plot(np.transpose(F))
plt.show() |
19,691 | ce713fd7a32b48a0c66455b002739a737a949b25 | from rest_framework import serializers
from measurements.models import Measurement
from users.models import User
class UserSerializer(serializers.HyperlinkedModelSerializer):
measurements = serializers.HyperlinkedRelatedField(
queryset=Measurement.objects.all(),
view_name='measurement-detail',
many=True)
class Meta:
model = User
fields = ('url', 'email', 'measurements')
class MeasurementSerializer(serializers.HyperlinkedModelSerializer):
user = serializers.CharField(
read_only=True,
default=serializers.CurrentUserDefault())
class Meta:
model = Measurement
fields = ('id', 'user', 'date', 'height', 'weight')
validators = [
serializers.UniqueTogetherValidator(
queryset=Measurement.objects.all(),
fields=('user', 'date'),
message=("Users may only log a single measurement "
"against any given date.")
)
]
|
19,692 | 84bacba5e7ab6a095dc408fe77fa50ab2376c56c |
import re, os
class Address:
'''Class with two attributes and a repr : an IPV4 or IPV6 address
and a list of hostnames'''
# Lazy: every valid address matches, some incorrect address will match
format=re.compile('(\d{1,3}\.){3}\d{1,3}')
# Here is another that re matches ipv4 and ipv6 but I would rather ignore ipv6
# lines since they can give up to two more names to an address:
# localhost ::1 and fe80::1%lo0 (OSX 10.5 )
# There is a free beer for the good fellow who can shed light on this
# format=re.compile(
# ''' (\d{1,3}\.){3}\d{1,3} # ipv4
# | ([0-9a-fA-F]{1,4}){0,7}::?([0-9a-fA-F]{1,4}) # ipv6
# ''',re.VERBOSE)
def __init__(self, a, h):
'''The address parameter is checked against the format
regexp. self.hostnames is a list but the hostname parameter
can be given as a list of strings or a simple string.'''
if self.format.match(a): self.address=a
else: raise "Address format error : %s" % a
self.hostnames = h
def __repr__(self, col1width=3):
'''Common /etc/hosts start the hostname columns at the 24th
character and separate the two columns by as much tabs as
needed'''
sep="\t"*((col1width*8-1-len(self.address))/8+1)
return repr("%s%s%s\n" % ( self.address, sep, self.hostnames ))
class Hosts(dict):
'''This class features two dictionnaries addresses and hostnames
pointing to Address instances. The dict accessors are overriden
for the object to appear as a dictionnary which can be accessed
both ways, depending on whether the parameter matches the address
format'''
#
#
__address=Address.format
line=re.compile(
'''(^
(?P<info>
(?P<address> (\d{1,3}\.){3}\d{1,3})
(?P<sep> \s+)
(?P<hostnames> .+) )
(?P<optcmt> \s+\#.*)?
$)|(^
.*
$)''', re.VERBOSE)
def __iter__(self):
return self
def next(self):
for a in self.addresses:
yield a
def __init__(self, defaults=None, **kwargs):
# Ce serait sympa de creer le dict a partir d'un dict
self.addresses={}
self.hostnames={}
self.filename=filename
if filename and type(filename)==str:
self.read(filename)
elif filename and type(filename)==dict:
for k,v in defaults.items(): self[k]=v
if kwargs:
for k,v in kwargs.items(): self[k]=v
def __repr__(self):
'''Represents itself as a common /etc/hosts file'''
# defaults ordering anyone? localhost at the top..
# especially useful for the ifcfg files device at the top and ipaddr right then
lines = map( repr, self.addresses.values())
return "".join( [ l for l in lines if l.find('~del~')==-1 ] )
def __getitem__(self,item):
'''If the item is an address, returns the hostnames, else return
the address.'''
if self.__address.match(item):
return self.addresses[item].hostnames
else:
return self.hostnames[item].address
def __delitem__(self,item):
'''Removes a whole line 'address - hostnames'. Can be called
by address or hostname'''
if self.__address.match(item):
a, h = item, self.addresses[item].hostnames
else:
a, h = self.hostnames[item].address, self.hostnames[item].hostnames
# The references in both indexes are del. The Address
# instance is deleted by the python garbage collector
del self.addresses[a]
for i in h.split(): del self.hostnames[i]
def __setitem__(self, item, value):
'''Various case : the address or host already exists. The host can be
a list of strings. Say 10 mc'''
if self.__address.match(item): a, h = item, value
else: h, a = item, value
if not a in self.addresses and ( not h in self.hostnames or h=='~del~' ):
# New address and new hostname
# Create an instance, and 2 references
new=Address(a,h)
self.addresses[a] = new
self.hostnames[h] = new
elif h in self.hostnames and not a in self.addresses:
# Modifying the address of an existing hostname:
# deletion of the old ref in self.addresses
# new entry in self.addresses
# modification of the address attribute in the instance
del self.addresses[self[h]]
self.addresses[a] = self.hostnames[h]
self.hostnames[h].address = a
elif ( h=='~del~' or not h in self.hostnames ) and a in self.addresses:
# Renaming an address
# deletion of the old entries in hostnames
# new entry in self.hostnames
# reset of the hostnames attribute in the instance
print self[a],h
for i in self[a].split(' '): del self.hostnames[i]
self.hostnames[h] = self.addresses[a]
self.addresses[a].hostnames = h
elif h in self.hostnames and a in self.addresses and self[h]!=a:
# Do we want to keep old references and alias: no
del self[a]
new=Address(a,h)
self.addresses[a] = new
self.hostnames[h] = new
def reprline(self,item):
if self.__address.match(item):
return repr(self.addresses[item])
else:
return repr(self.hostnames[item])
def append(self,item,value):
if self.__address.match(item): a, h = item, value
else: h, a = item, value
self.hostnames[h]=self.addresses[a]
self.hostnames[h].hostnames.append(h)
def read(self,filenames):
"""Read and parse a filename or a list of filenames.
Files that cannot be opened are silently ignored; this is
designed so that you can specify a list of potential
configuration file locations (e.g. current directory, user's
home directory, systemwide directory), and all existing
configuration files in the list will be read. A single
filename may also be given.
Return list of successfully read files.
"""
if isinstance(filenames, basestring):
filenames = [filenames]
read_ok = []
for filename in filenames:
try:
fp = open(filename)
except IOError:
continue
self._read(fp)
fp.close()
read_ok.append(filename)
return read_ok
def _read(self, fp):
'''The file parameter can be a filename or a file
descriptor. The file should conform to the common hosts format'''
for l in fp:
a, h = self.line.match( l).group('address', 'hostnames')
if a:
for i in h.split(): self[h]=a
def write(self,filename=None):
filename = filename or self.filename
c=Hosts() # Because we pop the written lines, we pop on a copy
c.addresses, c.hostnames = self.addresses.copy(), self.hostnames.copy()
f=file(filename+'~','w')
if os.path.isfile(filename):
for l in file(filename):
a, h = c.line.match(l).group('address', 'hostnames')
if a:
for i in h.split()+[a]:
try: print c[i].find('~del~')
except: pass
if i in c.hostnames.keys() + c.addresses.keys() and c[i].find('~del~')!=-1:
f.write( self.line.sub( c.reprline(i)[:-1], l ))
del c[i]
break
else:
f.write(l)
else:
f.write(l)
# else:
# if a in c.addresses:
# if c[a][0]!='~del~':
# f.write( self.__address.sub( repr(c.addresses[a])[:-1], l ))
# del c[a]
# else:
# f.write(l)
# if a and h:
# i=[ i for i in h.split(), a if i in c and c[i]!='~del~'].pop():
# if i:
# f.write( self.line.sub( repr(c.hostnames[i])[:-1], l ))
# del c[i]
# else: f.write(l)
# else: f.write(l)
f.write(repr(c))
f.close()
os.rename(filename+'~',filename)
if __name__=="__main__":
# The idea is to get the iterator to work and make write uses dict
from os import getenv
# Parse /etc/hosts and its different constructors (input file, kwargs and dict)
h=Hosts()
h['1.0.0.1']='mc'
print h['1.0.0.1']
print h['mc']
del h['mc']
print h['mc']
h['mc']='1.0.0.1'
print h['1.0.0.1']
print h['mc']
del h['1.0.0.1']
print h['mc']
h=Hosts('hosts.txt')
h=Hosts(mc='1.0.0.1','1.0.0.2'='mc02')
h=Hosts({'mc':'1.0.0.1','mc02':'1.0.0.2'})
h=Hosts('/etc/hosts', {'mc':'1.0.0.1', '1.0.0.2':'mc02'})
h=Hosts('/etc/hosts', mc03='1.0.0.3', '1.0.0.4'='mc04')
# Add a name to an address, change the address of a hostname
h.append('127.0.0.1', 'localhost.yemen')
h.append('broadcast', '255.255.255.255')
h['127.0.0.1']='~del~'
h['192.0.0.1']='~del~'
h.read()
repr(h)
h.write(myhost)
print file(myhost).read()
# Scenarios d'update de fichier hosts:
# 1.
h=Hosts()
h['192.168.0.1']='mc01'
h['mc02']='192.168.0.2'
h['255.255.255.255']='~del~'
h['mc02']='~del~'
h['localhost']='10.0.0.1'
h.append('localhost.yemen','10.0.0.1')
h.write('hosts.txt')
reset_testfile()
# 2.
Hosts('hosts.txt',
localhost='1.0.0.1',
'1.0.0.2'='mc02'
).write()
|
19,693 | 284a8445daf16f0848eea0a93ebed494b94f6fec | from argparse import ArgumentParser
def get():
parser = ArgumentParser(description='๐ Py Ping')
parser.add_argument(
'--duration',
dest='duration',
default=60 * 60 * 24,
type=int,
help='Execution duration in seconds. Default 24h.'
)
parser.add_argument(
'--cmd',
dest='command',
default='ping -t 1 <HOST>',
help='Ping command. Instead of specifying the IP set it as <HOST>.'
)
parser.add_argument(
'--pattern',
dest='result_pattern',
default=r'.*time=(\d+\.?\d*)\sms',
help='RegExp pattern to capture latency.'
)
parser.add_argument(
'--hosts',
dest='hosts',
nargs='+',
default=['8.8.8.8', '8.8.4.4'],
help='Hosts.'
)
parser.add_argument(
'--sleep',
dest='sleep_time',
default=1,
type=int,
help='Sleep time between requests in seconds.'
)
parser.add_argument(
'--max-ping',
dest='max_acceptable_latency',
default=70,
type=int,
help='Max acceptable latency in ms.'
)
parser.add_argument(
'--no-chart',
dest='no_chart',
nargs='?',
type=bool,
const=True,
default=False,
help='Disable chart.'
)
return parser.parse_args()
|
19,694 | 26fc891cee6536e6cf16def588f4c379ca720a36 | import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import xarray as xr
import metpy.calc as mpcalc
from glob import glob
import numpy as np
import pandas as pd
from utils import get_run, get_dset, get_colormap, subset_arrays, get_coordinates, \
get_projection_cartopy, plot_maxmin_points, annotation_forecast,\
annotation, annotation_run, options_savefig, figsize_x, figsize_y
from matplotlib.colors import BoundaryNorm
from multiprocessing import Pool, cpu_count
from functools import partial
import os
def plot_var(f_step, projection):
# NOTE!
# If we are inside this function it means that the picture does not exist
# The one employed for the figure name when exported
variable_name = 'gph_t_850'
# Build the name of the output image
run_string, _ = get_run()
filename = '/tmp/' + projection + '_' + \
variable_name + '_%s_%03d.png' % (run_string, f_step)
"""In the main function we basically read the files and prepare the variables to be plotted.
This is not included in utils.py as it can change from case to case."""
dset = get_dset(vars_3d=['t@850', 'fi@500'], f_times=f_step).squeeze()
dset = subset_arrays(dset, projection)
time = pd.to_datetime(dset.valid_time.values)
cum_hour = dset.step.values.astype(int)
temp_850 = dset['t'] - 273.15
z_500 = dset['z']
gph_500 = mpcalc.geopotential_to_height(z_500)
gph_500 = xr.DataArray(gph_500.magnitude, coords=z_500.coords,
attrs={'standard_name': 'geopotential height',
'units': gph_500.units})
levels_temp = np.arange(-30., 30., 1.)
levels_gph = np.arange(4700., 6000., 70.)
cmap = get_colormap('temp')
fig = plt.figure(figsize=(figsize_x, figsize_y))
ax = plt.gca()
lon, lat = get_coordinates(temp_850)
lon2d, lat2d = np.meshgrid(lon, lat)
ax = get_projection_cartopy(plt, projection, compute_projection=True)
if projection == 'euratl':
norm = BoundaryNorm(levels_temp, ncolors=cmap.N)
cs = ax.pcolormesh(lon2d, lat2d, temp_850, cmap=cmap, norm=norm)
else:
cs = ax.contourf(lon2d, lat2d, temp_850, extend='both',
cmap=cmap, levels=levels_temp)
c = ax.contour(lon2d, lat2d, gph_500, levels=levels_gph,
colors='white', linewidths=1.)
labels = ax.clabel(c, c.levels, inline=True, fmt='%4.0f', fontsize=6)
maxlabels = plot_maxmin_points(ax, lon, lat, gph_500,
'max', 80, symbol='H', color='royalblue', random=True)
minlabels = plot_maxmin_points(ax, lon, lat, gph_500,
'min', 80, symbol='L', color='coral', random=True)
an_fc = annotation_forecast(ax, time)
an_var = annotation(
ax, 'Geopotential height @500hPa [m] and temperature @850hPa [C]', loc='lower left', fontsize=6)
an_run = annotation_run(ax, time)
plt.colorbar(cs, orientation='horizontal',
label='Temperature', pad=0.03, fraction=0.04)
plt.savefig(filename, **options_savefig)
plt.clf()
return filename
def plot_vars(f_step, projection, load_all=False):
# The one employed for the figure name when exported
variable_name = 'gph_t_850'
# Build the name of the output image
run_string, _ = get_run()
if load_all:
f_steps = list(range(0, 79)) + list(range(81, 121, 3))
else:
f_steps = [f_step]
filenames = ['/tmp/' + projection + '_' + variable_name +
'_%s_%03d.png' % (run_string, f_step) for f_step in f_steps]
test_filenames = [os.path.exists(f) for f in filenames]
if all(test_filenames): # means the files already exist
return filenames
# otherwise do the plots
dset = get_dset(vars_3d=['t@850', 'fi@500'], f_times=f_steps).squeeze()
# Add a fictictious 1-D time dimension just to avoid problems
if 'step' not in dset.dims.keys():
dset = dset.expand_dims('step')
#
dset = subset_arrays(dset, projection)
time = pd.to_datetime(dset.valid_time.values)
cum_hour = dset.step.values.astype(int)
temp_850 = dset['t'] - 273.15
z_500 = dset['z']
gph_500 = mpcalc.geopotential_to_height(z_500)
gph_500 = xr.DataArray(gph_500.magnitude, coords=z_500.coords,
attrs={'standard_name': 'geopotential height',
'units': gph_500.units})
levels_temp = np.arange(-30., 30., 1.)
levels_gph = np.arange(4700., 6000., 70.)
lon, lat = get_coordinates(temp_850)
lon2d, lat2d = np.meshgrid(lon, lat)
cmap = get_colormap('temp')
args = dict(filenames=filenames, projection=projection, levels_temp=levels_temp,
cmap=cmap, lon2d=lon2d, lat2d=lat2d, lon=lon, lat=lat, temp_850=temp_850.values,
gph_500=gph_500.values, levels_gph=levels_gph, time=time, run_string=run_string)
if load_all:
single_plot_param = partial(single_plot, **args)
iterator = range(0, len(f_steps))
pool = Pool(cpu_count())
results = pool.map(single_plot_param, iterator)
pool.close()
pool.join()
else:
results = single_plot(0, **args)
return results
def single_plot(it, **args):
filename = args['filenames'][it]
if os.path.exists(filename):
return filename
fig = plt.figure(figsize=(figsize_x, figsize_y))
ax = plt.gca()
ax = get_projection_cartopy(
plt, args['projection'], compute_projection=True)
if args['projection'] == 'euratl':
norm = BoundaryNorm(args['levels_temp'], ncolors=args['cmap'].N)
cs = ax.pcolormesh(args['lon2d'], args['lat2d'], args['temp_850'][it],
cmap=args['cmap'], norm=norm)
else:
cs = ax.contourf(args['lon2d'], args['lat2d'], args['temp_850'][it], extend='both',
cmap=args['cmap'], levels=args['levels_temp'])
c = ax.contour(args['lon2d'], args['lat2d'], args['gph_500'][it],
levels=args['levels_gph'], colors='white', linewidths=1.)
labels = ax.clabel(c, c.levels, inline=True, fmt='%4.0f', fontsize=6)
maxlabels = plot_maxmin_points(ax, args['lon'], args['lat'], args['gph_500'][it],
'max', 80, symbol='H', color='royalblue', random=True)
minlabels = plot_maxmin_points(ax, args['lon'], args['lat'], args['gph_500'][it],
'min', 80, symbol='L', color='coral', random=True)
try:
an_fc = annotation_forecast(ax, args['time'][it])
except TypeError:
an_fc = annotation_forecast(ax, args['time'])
an_var = annotation(
ax, 'Geopotential height @500hPa [m] and temperature @850hPa [C]', loc='lower left', fontsize=6)
an_run = annotation(
ax, 'Run: ' + args['run_string'] + ' UTC', loc='upper right')
plt.colorbar(cs, orientation='horizontal',
label='Temperature', pad=0.03, fraction=0.04)
plt.savefig(filename, **options_savefig)
plt.clf()
return(filename)
|
19,695 | 43143be13ef73d021c310cba6dd21ff8118ec110 | #!/usr/bin/env python
"""entities.py
Defines the Portfolio, Investor, and Broker classes
"""
import logging
import random
logging.basicConfig(level=logging.DEBUG,
# filename='entities.log',
# mode='w',
format=' %(asctime)s - %(levelname)s - %(message)s')
# logging.disable(logging.CRITICAL)
# List of all Investors for broker to access
INVESTORS = list()
class Portfolio:
"""Portfolio class for stocks
Fields: portfolios, value
Methods: add_stock(symbol, quantity), remove_stock(symbol)
"""
def __init__(self):
"""Make a portfolio to keep track of stocks"""
self.portfolios = list()
self.value = 0
def add_stock(self, symbol, quantity, unit_price):
"""Add stock of given symbol and quantity to the portfolio"""
# TODO write SQL statement to grab unit_price
stock_price_total = quantity * unit_price # TODO write SQL statement
# TODO deduct stock quantity from market ??
self.portfolios.append((symbol, quantity, unit_price))
self.value += stock_price_total
def remove_stock(self, symbol, quantity):
"""Remove stocks of given symbol and quantity from the portfolio"""
for p_symbol, p_quantity, p_unit_price in self.portfolios:
if p_symbol == symbol:
logging.debug("Found %s, %s, %s" %
(p_symbol, p_quantity, p_unit_price))
# First delete completely
self.portfolios.remove((p_symbol,
p_quantity,
p_unit_price))
# Check if some quantity of stocks should remain
if quantity < p_quantity:
# Keep remainder
self.portfolios.append((p_symbol,
p_quantity-quantity,
p_unit_price))
# Reduce value of portfolio by value of stocks removed
total_price = quantity * p_unit_price
self.value -= total_price
def __repr__(self):
"""Show details about the portfolio"""
return "<Portfolio: %s | valued at %s>" % (self.portfolios, self.value)
class Investor:
"""Investor class for anyone wishing to invest with some cash
Fields: name, cash, risk_money, credit_line, portfolios
Methods: add_portfolio(portfolio)
"""
def __init__(self, name, cash):
"""Make an Investor object for stock exchange"""
self.name = name
self.cash = cash
self.risk_money = 0.5 * self.cash
self.credit_line = 100000
self.portfolios = []
self.investment_seed = 0.5 * self.risk_money
# Add to global list of INVESTORS
INVESTORS.append(self)
def add_portfolio(self, portfolio):
"""Add portfolio to investor's financial records"""
self.portfolios.append(portfolio)
def get_loan_amount(self):
"""Return the loan amount for the investor"""
loan_amount = min(self.credit_line, (0.5 * self.risk_money))
return loan_amount
def get_stock(self, broker):
"""Get a random stock from broker"""
logging.info("RUNNING get_stock() for investor...")
# Determine loan amount for investor
loan = self.get_loan_amount()
# Give investor loan money & update broker's cash
self.cash = self.cash + loan
broker.cash = broker.cash - loan
# Pay broker fees
fee = broker.fee
self.cash -= fee
broker.cash += fee
# Randomly select a stock from broker
logging.info("What's here: %s" % broker.portfolios)
sym, qty, price = random.choice(broker.portfolios[0].portfolios)
# Buy correct quantity
qty_tobuy = int( (self.cash / float(price)) )
#logging.info("%s stocks buy of symbol = %s will be bought" % (qty, sym) )
#logging.info("BUYING")
if (qty_tobuy >= qty):
first_purchase = qty * float(price)
self.cash -= first_purchase
# logging.info("Buying all of %s" % sym)
else:
self.cash -= qty_tobuy * float(price)
# logging.info("AFTER BUY")
p = Portfolio()
p.add_stock(sym, qty, price)
self.add_portfolio(p)
# logging.info("stock %s, %s, %s has been added" % (sym, qty, price))
# logging.info("PRINTING investor: %s" % investor)
# Remove stock from broker's portfolio
# logging.info("Broker portfolio BEFORE removal: %s" % (broker.portfolios[0].portfolios))
# logging.info("type: %s" % type(broker.portfolios[0]))
broker.portfolios[0].remove_stock(sym, qty)
#logging.info("Broker portfolio AFTER removal: %s" % (broker.portfolios[0].portfolios))
def __repr__(self):
"""Show details about the Investor"""
name = "Investor: %s" % self.name
cash = "Cash: %s" % self.cash
risk_money = "Risk Money: %s" % self.risk_money
portfolio = "Portfolio: %s" % self.portfolio
info = name + cash + risk_money + portfolio
return info
def __str__(self):
"""Show string representation of the Investor"""
return "<Investor: %s | Portfolio: %s>" % (self.name, self.portfolios)
class Broker:
"""Broker class for faciliator of finances"""
def __init__(self, name, cash=1000000):
"""Make a Broker object
Fields: name, cash, term, stock_money, margin_money, fee, portfolio,
maintenance_threshold
Methods:
"""
self.name = name
self.cash = cash
self.term = 7
self.stock_money = 0.40 * cash
self.margin_money = 0.40 * cash
self.fee = 30
self.portfolios = list()
self.maintenance_threshold = 0.25
def add_portfolio(self, portfolio):
"""Add portfolio to broker's financial records"""
self.portfolios.append(portfolio)
def get_stock(self, investor):
"""Get stock back from the investor"""
# Find out the stock details
sym, qty, price = investor.portfolios[0].portfolios[0]
# p = investor.portfolios[0]
# Check if broker has a portfolio
if self.portfolios[0]:
self.portfolios[0].add_stock(sym, qty, price)
else:
# Broker doesn't have a portfolio
p = Portfolio()
#logging.info("p is: %s" % p)
p.add_stock(sym, qty, price)
self.add_portfolio(p)
logging.info("Broker's portfolios AFTER addition: %s" % self)
# logging.info("WHAT ARE YOU")
logging.info("Investor portfolio BEFORE removal: %s" % investor.portfolios[0].portfolios)
investor.portfolios[0].remove_stock(sym, qty)
logging.info("Investor portfolio AFTER removal: %s" % investor.portfolios[0])
# investor.portfolios[0].portfolios.remove( (sym, qty, price) )
# investor.portfolios[0].remove(sym, qty, price)
total_price = qty * price
investor.portfolios[0].value -= total_price
investor.cash += qty * float(price)
def __repr__(self):
"""Show details about the broker"""
name = "Broker: %s" % self.name
cash = "Cash: %s" % self.cash
portfolio = "Portfolio: %s" % self.portfolio
return name + cash + portfolio
def __str__(self):
"""Show string representation of the Broker"""
return "<Broker: %s | Portfolio: %s>" % (self.name, self.portfolios)
if __name__ == "__main__":
logging.info("Creating portfolio1... ")
p1 = Portfolio()
p1.add_stock("BMD", 50, 15)
p1.add_stock("AZA", 10, 15)
p1.add_stock("ATI", 50, 15)
p1.add_stock("AYZ", 40, 15)
logging.info("Creating broker1")
broker1 = Broker("GordonGekko")
logging.info("Creating investor1")
investor1 = Investor("JohnDoe", 200000)
logging.info("Adding portfolio1 to broker1")
broker1.add_portfolio(p1)
logging.info("Adding portfolio1 to investor1")
investor1.add_portfolio(p1)
logging.info("Displaying broker1 info AFTER ADDING portfolio")
logging.info(broker1)
logging.info("Displaying investor1 info AFTER ADDING portfolio")
logging.info(investor1)
logging.info("Removing stock 'BMD' from portfolio")
p1.remove_stock("BMD", 50)
logging.info("Displaying broker1 info AFTER REMOVING stock")
logging.info(broker1)
logging.info("Displaying investor1 info AFTER REMOVING stock")
logging.info(investor1)
logging.info("Moving stock from investor1 TO broker1")
broker1.get_stock(investor1)
logging.info("Displaying investor1 AFTER stock given to broker")
logging.info(investor1)
logging.info("Displaying broker1 AFTER stock given to broker")
logging.info(broker1)
|
19,696 | a8aa8bb8c7f0c5f584f013c171c26dca0bdeb0c0 | # Copyright 2023 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Dummy benchmarks using WPR to test page load time.
The number produced isn't meant to represent any actual performance
data of the browser.
"""
from benchmarks import loading_metrics_category
from core import perf_benchmark
from page_sets import dummy_wpr_story_set
from telemetry import benchmark
from telemetry.web_perf import timeline_based_measurement
@benchmark.Info(emails=['maxqli@google.com'], component='Test>Telemetry')
class DummyWprLoadBenchmark(perf_benchmark.PerfBenchmark):
options = {'pageset_repeat': 2}
page_set = dummy_wpr_story_set.DummyWprStorySet
def CreateCoreTimelineBasedMeasurementOptions(self):
tbm_options = timeline_based_measurement.Options()
loading_metrics_category.AugmentOptionsForLoadingMetrics(tbm_options)
tbm_options.config.chrome_trace_config.EnableUMAHistograms(
'PageLoad.PaintTiming.NavigationToLargestContentfulPaint',
'PageLoad.PaintTiming.NavigationToFirstContentfulPaint',
'PageLoad.LayoutInstability.CumulativeShiftScore')
return tbm_options
def CreateStorySet(self, options):
return dummy_wpr_story_set.DummyWprStorySet()
@classmethod
def Name(cls):
return 'UNSCHEDULED_dummy_wpr_benchmark.loading_using_wpr'
|
19,697 | f49c507c59911fbbd9d5a460d9e4b05ca9516fde | from django import forms
from django.http import HttpResponse
from django.shortcuts import redirect, render
from django.template import loader
from django.urls import reverse
from .models import Task
from .submit import complete_task, edit_submission, save_submission
def index(request):
# type of request: https://docs.djangoproject.com/en/1.11/ref/request-response/#httprequest-objects
# type of request.POST: https://docs.python.org/3.5/library/stdtypes.html#dict
# values in request.POST are of type str
print(request.POST)
print(type(request.POST))
todo_list = Task.objects.all()
waiting_todos = todo_list.filter(completed=False)
tasks_done = todo_list.filter(completed=True)
past_search_term = ""
task_to_be_edited_text = ""
for item_name in request.POST:
item_value = request.POST[item_name]
complete_prefix = 'complete_task_'
delete_prefix = "delete_submission_"
undo_prefix = "undo_complete_"
edit_prefix = "edit_task_"
save_prefix = "save_task_"
cancel_prefix = "cancel_task_"
if (item_name.startswith(complete_prefix)):
task_id_str = item_name[len(complete_prefix):]
id_to_complete = int(task_id_str)
print (id_to_complete)
Task.objects.filter(id=id_to_complete).update(completed = True)
if (item_name.startswith(delete_prefix)):
task_id_str = item_name[len(delete_prefix):]
id_to_delete = int(task_id_str)
print (id_to_delete)
Task.objects.filter(id=id_to_delete).delete()
if (item_name.startswith(undo_prefix)):
task_id_str = item_name[len(undo_prefix):]
id_to_undo = int(task_id_str)
print (id_to_undo)
Task.objects.filter(id=id_to_undo).update(completed = False)
if (item_name.startswith(save_prefix)):
task_id_str = item_name[len(save_prefix):]
id_to_save = int(task_id_str)
print (id_to_save)
content = (request.POST['text_field_edit'])
if content != "":
Task.objects.filter(id=id_to_save).update(task_text = content)
if (item_name.startswith("submit_task")):
content = (request.POST['text_field_entry'])
if content != "":
if not Task.objects.filter(task_text=content).exists():
save_submission(content)
if (item_name.startswith("search_tasks")):
content = (request.POST['text_field_search'])
past_search_term = content
if content != "":
print(content)
print(Task.objects.filter(task_text__icontains=content))
waiting_todos = todo_list.filter(
task_text__icontains=content, completed=False)
tasks_done = todo_list.filter(
task_text__icontains=content, completed=True)
if (item_name.startswith(edit_prefix)):
task_id_str = item_name[len(edit_prefix):]
id_to_undo = int(task_id_str)
task_to_edit_part1 = Task.objects.filter(id=id_to_undo)[0]
task_to_be_edited_text = task_to_edit_part1.task_text
context = {
'waiting_todos': waiting_todos.order_by("id"),
'tasks_done': tasks_done.order_by("id"),
'past_search_term': past_search_term,
'task_to_be_edited_text': task_to_be_edited_text,
}
# print (context)
return render(request, 'todolist/index.html', context)
|
19,698 | d8095c9a9167e7d65e096156dcd9b66409fdadba | class Desciple:
def __init__(self, entID, config, color=None):
self.packet = pc.Packet(entID, config, color)
self.client = pc.Client(self.packet)
self.server = pc.Server(config)
self.sync(self.packet, self.server)
self.sync(self.packet, self.client)
def sync(self, src, dst):
for attr in self.packet.paramNames():
setattr(dst, attr,
getattr(src, attr))
|
19,699 | 84ffc7caa49845ab596c268d626332a666f47a14 | n, k = map(int, input().split())
n = min(n, k-1)
if n <= k // 2:
m = 0
else:
m = n - k // 2
print(m)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.