blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2
values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313
values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107
values | src_encoding stringclasses 20
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 4 6.02M | extension stringclasses 78
values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5814667ab2dc78a084fcc28e328da9c0ea52a717 | 1af743e49691ea957f9dde256bf12454b7d8bf1b | /ta/swagger_server/models/battery_params.py | 11161083bbb0980a637a88041eb23f1222331b94 | [] | no_license | cmu-mars/RoboTest | bfbba0e90e4d6e967a4872c0bf8f243b0bcc34cd | 8654cf78f8ec382ac10c7b9b2dd6fee6a69d5d9b | refs/heads/master | 2021-06-28T16:32:01.796954 | 2020-01-18T03:39:49 | 2020-01-18T03:39:49 | 233,116,142 | 0 | 0 | null | 2021-06-02T00:52:01 | 2020-01-10T19:26:50 | Python | UTF-8 | Python | false | false | 2,212 | py | # coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from swagger_server.models.base_model_ import Model
from swagger_server import util
class BatteryParams(Model):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, charge: float=None): # noqa: E501
"""BatteryParams - a model defined in Swagger
:param charge: The charge of this BatteryParams. # noqa: E501
:type charge: float
"""
self.swagger_types = {
'charge': float
}
self.attribute_map = {
'charge': 'charge'
}
self._charge = charge
@classmethod
def from_dict(cls, dikt) -> 'BatteryParams':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The BatteryParams of this BatteryParams. # noqa: E501
:rtype: BatteryParams
"""
return util.deserialize_model(dikt, cls)
@property
def charge(self) -> float:
"""Gets the charge of this BatteryParams.
the level to which the battery should be set, in mWh. cannot be more than the maximum charge for the power model specified in the THs response to `/ready`. # noqa: E501
:return: The charge of this BatteryParams.
:rtype: float
"""
return self._charge
@charge.setter
def charge(self, charge: float):
"""Sets the charge of this BatteryParams.
the level to which the battery should be set, in mWh. cannot be more than the maximum charge for the power model specified in the THs response to `/ready`. # noqa: E501
:param charge: The charge of this BatteryParams.
:type charge: float
"""
if charge is None:
raise ValueError("Invalid value for `charge`, must not be `None`") # noqa: E501
if charge is not None and charge < 0: # noqa: E501
raise ValueError("Invalid value for `charge`, must be a value greater than or equal to `0`") # noqa: E501
self._charge = charge
| [
"suj@email.sc.edu"
] | suj@email.sc.edu |
37918e456f1093d2e322b5cc2db239b82e6e9ef2 | 3cfec03e77dc2e75d74febb502685792c074efca | /train_process.py | 311a69b3c828852eef6f5de1a2493a5538784f27 | [] | no_license | Vinayak-5349/deep_cricket | b26fdbb6b554e0b2370bbd76955ac2deb10635a4 | d935effe0f98eb0237e292400ac2a29f1e9023a8 | refs/heads/master | 2023-03-17T09:32:15.688911 | 2018-12-20T14:00:13 | 2018-12-20T14:00:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,059 | py | import torch
import torch.nn as nn
from torch import optim
import torch.nn.functional as F
import torchvision.models as models
from torchvision import transforms
import glob
import cv2
import numpy as np
from __future__ import print_function, division
import pandas as pd
import time
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
"""
Choosing the device makes the tensor computation to run on GPU when it
is available.
"""
def train_and_test(batch_details, model1, model2 ,transforms,video_path, probs_c = 0.3, criterion = None, optimizer1 = None, optimizer2 = None, sample_rate = 8, phase = 'train'):
"""
This function carries out the training process of the network for each step. It validates on test set for each epoch on True.
model1 : convolution network module
model2 : Prediction network module
transforms : A transform object that contains transforms for applying on input image
prob_c : weight of the attention term that we use in loss computation.
criterion : Loss object.
optimizer1, optimizer2 : Objects for doing optimisation on convolution and prediction module.
sample_rate : No of frames for each video which if of fixed length
phase : Tells whether it is a training or testing phase
"""
batch_size = len(batch_details)
image = torch.zeros(batch_size*sample_rate, 3, 224, 224) #computes the image input for convolution network
files = []
a,b,c = [],[],[]
for folder in batch_details:
"""
The frames for each video are put in separate folder.
So for every instance of the batch, collecting the respective
file names of the frames, for image construction.
"""
if (phase == 'train'):
order = glob.glob(video_path + "/train" + folder[0] + "/*.jpg")
order.sort()
files += order
else:
order = glob.glob(video_path + "/test" + folder[0] + "/*.jpg")
order.sort()
files += order
a.append(folder[1]-1)
b.append(folder[2]-1)
c.append(folder[3]-1) #target tensors for each output
for no, loc in enumerate(files):
img = cv2.imread(loc)
img = transforms(img)
image[no] = img #applying transforms to images
image = image.to(device)
a = torch.LongTensor(a) #criterion expects ground truth vectors in LongTensor format
b = torch.LongTensor(b)
c = torch.LongTensor(c)
a = a.to(device)
b = b.to(device)
c = c.to(device)
if (phase == 'train'):
model1.train()
model2.train()
compute_map = model2(image) #computing the feature map.
predict, probs = model1(compute_map)
loss1 = criterion(predict[0],a)
loss2 = criterion(predict[1],b )
loss3 = criterion(predict[2],c)
probs = probs.permute(1,0,2)
loss = loss3 + loss1 + loss2 + probs_c * (((1-probs.sum(dim=1))**2).sum(dim=1).mean()) #adding the stochastic loss of attention weights as mentioned in the paper.
optimizer1.zero_grad()
optimizer2.zero_grad()
loss.backward() #backward propagation of the loss
optimizer1.step()
optimizer2.step()
model1.eval()
model2.eval()
compute_map = model2(image)
predict2, _ = model1(compute_map)
elif(phase == 'test'):
model1.eval()
model2.eval()
compute_map = model2(image)
predict2, _ = model1(compute_map) #computing the prediction in test phase.
"""
Finding the best class from the predicted outputs
"""
_, pre_1 = torch.max(predict2[0], dim = 1)
_, pre_2 = torch.max(predict2[1], dim = 1)
_, pre_3 = torch.max(predict2[2], dim = 1)
"""
Comparing the predicted and ground truth vectors.
"""
a1 = (sum(pre_1 == a).item())/batch_size
a2 = (sum(pre_2 == b).item())/batch_size
a3 = (sum(pre_3 == c).item())/batch_size
if (phase == 'train'):
return loss.item(), (a1,a2,a3)
else:
return (a1,a2,a3)
| [
"gouthamkumarvgk@gmail.com"
] | gouthamkumarvgk@gmail.com |
2f4272b7fb57d5ae830a86778c740c33d3acbb78 | 59b3acec6c6a021528682573e478ced2794c6b03 | /Py4eSpec/02-PDS/myCode/fileOpen.py | 3cab1975d253f82ecbc7468904dcf24f02da2a21 | [] | no_license | charlesluch/Code | f9ed52e5cfb45917baf35277cdf8da68e54332ea | a9d772d73ab40bd01266d7d833dbeab95797bf44 | refs/heads/master | 2021-10-09T01:41:48.815186 | 2018-12-20T02:59:15 | 2018-12-20T02:59:15 | 131,547,087 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 446 | py | # 7.1 Write a program that prompts for a file name, then opens that file and reads through the file, and print the contents of the file in upper case. Use the file words.txt to produce the output below.
# You can download the sample data at http://www.py4e.com/code3/words.txt
# Use words.txt as the file name
fname = input("Enter file name: ")
fh = open(fname)
for line in fh:
line = line.rstrip()
line = line.upper()
print(line)
| [
"c_luchetti@hotmail.com"
] | c_luchetti@hotmail.com |
727fc97005633da5105c31d875de048d679cb327 | 17268419060d62dabb6e9b9ca70742f0a5ba1494 | /pp/samples/191_mirror_h.py | 5d5f8caa93016a9121b917401e02a52f9b2ade76 | [
"MIT"
] | permissive | TrendingTechnology/gdsfactory | a19124423b12cbbb4f35b61f33303e9a012f82e5 | c968558dba1bae7a0421bdf49dc192068147b776 | refs/heads/master | 2023-02-22T03:05:16.412440 | 2021-01-24T03:38:00 | 2021-01-24T03:38:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 188 | py | if __name__ == "__main__":
import pp
c = pp.Component()
m1 = c << pp.c.mmi1x2()
m2 = c << pp.c.mmi1x2()
m2.reflect_h(port_name="E1")
m2.movex(10)
pp.show(c)
| [
"noreply@github.com"
] | noreply@github.com |
42cb93ad311b8c7bb7ac5e8bbedb918897b219d6 | 58b709056e6cf578c02681b2c3d709cf6e175de3 | /Prac_09/sort_files_2.py | 119d68c884a2a51d4a617d61bc37ec3e7304ee2e | [] | no_license | imnkywf/CP1404praticals | c70efd9e02772deac3d501f561524f3b0ae25484 | 582ab50b67a21d048652927bbfbcc2af2bf91d49 | refs/heads/master | 2023-04-29T09:57:31.316264 | 2021-05-19T05:58:15 | 2021-05-19T05:58:15 | 353,263,557 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 418 | py | import os
category = []
os.chdir("FilesToSort")
path = "."
files = os.listdir(path)
for filename in files:
extension = filename.split('.')[-1]
print("{} file".format(extension))
if extension not in category:
new_dir = input("What category would you like to sort {} files into? ".format(extension))
try:
os.mkdir(new_dir)
except FileExistsError:
pass
| [
"yiwei.fan@my.jcu.edu.au"
] | yiwei.fan@my.jcu.edu.au |
02b92d92f732d2e44906cebf9d57c84e87516241 | a18ff11fca8810be96bdb23e76e6c0e97d2579ee | /contrib/bitrpc/bitrpc.py | 41f3d82888b779542fa54287723aa8858624e7ea | [
"MIT"
] | permissive | cryptocurinfo/sovereigncoin | 66f33cce584540b2d095801a8902224d08638c82 | 118b6705f176051403735116499691d35ce1f103 | refs/heads/master | 2020-03-30T10:35:48.196810 | 2014-10-26T03:51:47 | 2014-10-26T03:51:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,837 | py | from jsonrpc import ServiceProxy
import sys
import string
# ===== BEGIN USER SETTINGS =====
# if you do not set these you will be prompted for a password for every command
rpcuser = ""
rpcpass = ""
# ====== END USER SETTINGS ======
if rpcpass == "":
access = ServiceProxy("http://127.0.0.1:27004")
else:
access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:27004")
cmd = sys.argv[1].lower()
if cmd == "backupwallet":
try:
path = raw_input("Enter destination path/filename: ")
print access.backupwallet(path)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccount":
try:
addr = raw_input("Enter a Bitcoin address: ")
print access.getaccount(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccountaddress":
try:
acct = raw_input("Enter an account name: ")
print access.getaccountaddress(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getaddressesbyaccount":
try:
acct = raw_input("Enter an account name: ")
print access.getaddressesbyaccount(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getbalance":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getbalance(acct, mc)
except:
print access.getbalance()
except:
print "\n---An error occurred---\n"
elif cmd == "getblockbycount":
try:
height = raw_input("Height: ")
print access.getblockbycount(height)
except:
print "\n---An error occurred---\n"
elif cmd == "getblockcount":
try:
print access.getblockcount()
except:
print "\n---An error occurred---\n"
elif cmd == "getblocknumber":
try:
print access.getblocknumber()
except:
print "\n---An error occurred---\n"
elif cmd == "getconnectioncount":
try:
print access.getconnectioncount()
except:
print "\n---An error occurred---\n"
elif cmd == "getdifficulty":
try:
print access.getdifficulty()
except:
print "\n---An error occurred---\n"
elif cmd == "getgenerate":
try:
print access.getgenerate()
except:
print "\n---An error occurred---\n"
elif cmd == "gethashespersec":
try:
print access.gethashespersec()
except:
print "\n---An error occurred---\n"
elif cmd == "getinfo":
try:
print access.getinfo()
except:
print "\n---An error occurred---\n"
elif cmd == "getnewaddress":
try:
acct = raw_input("Enter an account name: ")
try:
print access.getnewaddress(acct)
except:
print access.getnewaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaccount":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaccount(acct, mc)
except:
print access.getreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaddress":
try:
addr = raw_input("Enter a Bitcoin address (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaddress(addr, mc)
except:
print access.getreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "gettransaction":
try:
txid = raw_input("Enter a transaction ID: ")
print access.gettransaction(txid)
except:
print "\n---An error occurred---\n"
elif cmd == "getwork":
try:
data = raw_input("Data (optional): ")
try:
print access.gettransaction(data)
except:
print access.gettransaction()
except:
print "\n---An error occurred---\n"
elif cmd == "help":
try:
cmd = raw_input("Command (optional): ")
try:
print access.help(cmd)
except:
print access.help()
except:
print "\n---An error occurred---\n"
elif cmd == "listaccounts":
try:
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.listaccounts(mc)
except:
print access.listaccounts()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaccount":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaccount(mc, incemp)
except:
print access.listreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaddress":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaddress(mc, incemp)
except:
print access.listreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "listtransactions":
try:
acct = raw_input("Account (optional): ")
count = raw_input("Number of transactions (optional): ")
frm = raw_input("Skip (optional):")
try:
print access.listtransactions(acct, count, frm)
except:
print access.listtransactions()
except:
print "\n---An error occurred---\n"
elif cmd == "move":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.move(frm, to, amt, mc, comment)
except:
print access.move(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendfrom":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendfrom(frm, to, amt, mc, comment, commentto)
except:
print access.sendfrom(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendmany":
try:
frm = raw_input("From: ")
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.sendmany(frm,to,mc,comment)
except:
print access.sendmany(frm,to)
except:
print "\n---An error occurred---\n"
elif cmd == "sendtoaddress":
try:
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
amt = raw_input("Amount:")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendtoaddress(to,amt,comment,commentto)
except:
print access.sendtoaddress(to,amt)
except:
print "\n---An error occurred---\n"
elif cmd == "setaccount":
try:
addr = raw_input("Address: ")
acct = raw_input("Account:")
print access.setaccount(addr,acct)
except:
print "\n---An error occurred---\n"
elif cmd == "setgenerate":
try:
gen= raw_input("Generate? (true/false): ")
cpus = raw_input("Max processors/cores (-1 for unlimited, optional):")
try:
print access.setgenerate(gen, cpus)
except:
print access.setgenerate(gen)
except:
print "\n---An error occurred---\n"
elif cmd == "settxfee":
try:
amt = raw_input("Amount:")
print access.settxfee(amt)
except:
print "\n---An error occurred---\n"
elif cmd == "stop":
try:
print access.stop()
except:
print "\n---An error occurred---\n"
elif cmd == "validateaddress":
try:
addr = raw_input("Address: ")
print access.validateaddress(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrase":
try:
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
print "\n---Wallet unlocked---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrasechange":
try:
pwd = raw_input("Enter old wallet passphrase: ")
pwd2 = raw_input("Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
print
print "\n---Passphrase changed---\n"
except:
print
print "\n---An error occurred---\n"
print
else:
print "Command not found or not supported" | [
"roger.k.h.3@gmail.com"
] | roger.k.h.3@gmail.com |
8813f6544d2ccea3832683f456c77c7e969252cd | 11d697345808e3630985d70600fd6f2bed1ac7e5 | /slacktheme/models.py | 519400c4f6b64e846e1d9bf5d6e8f82435b917a8 | [] | no_license | openhealthcare/opal-slacktheme | ce97ddac3c490ed19a3ab96dd85a17eec010cff5 | c819e02f9e4a45a554ae5b49d28b95a812a86bca | refs/heads/master | 2021-01-19T22:52:41.572813 | 2017-06-08T13:21:42 | 2017-06-08T13:21:42 | 88,879,256 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 251 | py | """
Models for slacktheme
"""
#
# Warning - even if you don't have any models, please don't delete this file.
# Some parts of Django require you to have something it can import called
# slacktheme.models in order for us to let you be a Django app.
# | [
"david@deadpansincerity.com"
] | david@deadpansincerity.com |
fe1155c8ff3f89a2bfb40b7f2ccfda48fcecb345 | 2b4c966449521584972ad2916a0142964b088728 | /Alteryx_TQL/Code/tsTQL/tsTQL.py | 6783639ec4cac22b602a90e50995ba8302c71a5b | [
"MIT"
] | permissive | sameersatyam/community-tools | 8ae512c87910b541802de1379cf9ede7a439ec89 | 4b8a257371604204b221822715718c5b29cfff18 | refs/heads/master | 2020-06-01T05:22:26.920339 | 2019-06-06T14:44:50 | 2019-06-06T14:44:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,492 | py | import AlteryxPythonSDK as Sdk
import xml.etree.ElementTree as Et
import select
import gzip
import sys
from classes.sshClient import sshClient
class AyxPlugin:
"""
Implements the plugin interface methods, to be utilized by the Alteryx engine to communicate with a plugin.
Prefixed with "pi", the Alteryx engine will expect the below five interface methods to be defined.
"""
def __init__(self, n_tool_id: int, alteryx_engine: object, output_anchor_mgr: object):
"""
Constructor is called whenever the Alteryx engine wants to instantiate an instance of this plugin.
:param n_tool_id: The assigned unique identification for a tool instance.
:param alteryx_engine: Provides an interface into the Alteryx engine.
:param output_anchor_mgr: A helper that wraps the outgoing connections for a plugin.
"""
# Default properties
self.n_tool_id = n_tool_id
self.alteryx_engine = alteryx_engine
self.output_anchor_mgr = output_anchor_mgr
# Custom properties
self.destinationServer = None
self.userName = None
self.password = None
self.tqlFilePath = None
self.tqlStatements = None
self.is_valid = True
self.tables = []
self.batchSize = 100
self.is_initialized = True
self.single_input = None
self.output_anchor = None
self.information_anchor = None
self.tsmessage = None
self.tsmessage_type = Sdk.FieldType.string
self.tsmessage_size = 2000
# Custom properties
self.sshConnection = None
self.channel = None
self.stdin = None
self.stdout = None
self.stderr = None
self.writer = None
self.record_creator = None
self.record_info_out = None
def write_lists_to_TS(self,lines):
"""
A non-interface, helper function that handles writing a compressed CSV in Memory and clears the list elements.
"""
try:
compressed=gzip.compress(lines.encode())
if self.channel.send_ready():
self.channel.sendall(compressed)
return True
except:
self.xmsg("Error", "Error sending Data")
self.stdin.close()
self.channel.shutdown_write()
self.xmsg('Info', 'Completed Sending Commands')
self.writeChunks(600)
self.sshConnection.close()
self.xmsg('Info', 'Connection with Destination Closed')
self.output_anchor.assert_close()
self.information_anchor.assert_close()
sys.exit()
return False
def writeChunks(self,intimeout=30):
"""
A non-interface, helper function that reads the SSH buffers and stores the chunks for reporting.
"""
timeout = intimeout
stdout_chunks = []
stderr_chunks = []
stdout_chunks.append(self.stdout.channel.recv(len(self.stdout.channel.in_buffer)).decode('utf-8'))
while not self.channel.closed or self.channel.recv_ready() or self.channel.recv_stderr_ready():
got_chunk = False
readq, _, _ = select.select([self.stdout.channel], [], [], timeout)
for c in readq:
if c.recv_ready():
stdout_chunks.append(self.stdout.channel.recv(len(c.in_buffer)).decode('utf-8'))
got_chunk = True
if c.recv_stderr_ready():
stderr_chunks.append(self.stderr.channel.recv_stderr(len(c.in_stderr_buffer)).decode('utf-8'))
got_chunk = True
if not got_chunk \
and self.stdout.channel.exit_status_ready() \
and not self.stderr.channel.recv_stderr_ready() \
and not self.stdout.channel.recv_ready():
self.stdout.channel.shutdown_read()
self.stdout.channel.close()
break
self.stdout.close()
self.stderr.close()
columnnames = None
prevtsrow = None
tsmessage = ''.join((stderr_chunks))
for tsrow in tsmessage.splitlines():
if str(tsrow).strip() != '':
if str(tsrow).strip()[0] == '-':
columnnames = prevtsrow
self.record_info_out[0].set_from_string(self.record_creator, str(tsrow).strip())
out_record = self.record_creator.finalize_record()
self.information_anchor.push_record(out_record, False)
self.record_creator.reset()
prevtsrow = str(tsrow).strip()
tsmessage = ''.join((stdout_chunks))
if columnnames is not None:
self.record_info_out[0].set_from_string(self.record_creator, columnnames)
out_record = self.record_creator.finalize_record()
self.output_anchor.push_record(out_record, False)
self.record_creator.reset()
for tsrow in tsmessage.splitlines():
self.record_info_out[0].set_from_string(self.record_creator, str(tsrow).strip())
out_record = self.record_creator.finalize_record()
self.output_anchor.push_record(out_record, False)
self.record_creator.reset()
return True
def pi_init(self, str_xml: str):
"""
Handles configuration based on the GUI.
Called when the Alteryx engine is ready to provide the tool configuration from the GUI.
:param str_xml: The raw XML from the GUI.
"""
# Testing code change
# Getting the dataName data property from the Gui.html
self.destinationServer = Et.fromstring(str_xml).find('DestinationServer').text if 'DestinationServer' in str_xml else self.xmsg("Error", "Please Enter a Destination Server")
self.userName = Et.fromstring(str_xml).find('UserName').text if 'UserName' in str_xml else self.xmsg("Error", "Please Enter a User Name")
self.tqlFilePath = Et.fromstring(str_xml).find('dataSourceFilePath').text if 'dataSourceFilePath' in str_xml else None
self.tqlStatements = Et.fromstring(str_xml).find('TQLText').text if 'TQLText' in str_xml else None
if self.tqlStatements is None and self.tqlFilePath is None:
self.xmsg("Error", "Please Enter a TQL File or TQL Statement")
password = Et.fromstring(str_xml).find('Password').text if 'Password' in str_xml else None
if password is None:
self.xmsg('Error', "A Password Must be Entered")
else:
self.password = self.alteryx_engine.decrypt_password(Et.fromstring(str_xml).find('Password').text, 0)
self.output_anchor = self.output_anchor_mgr.get_output_anchor('Output')
self.information_anchor = self.output_anchor_mgr.get_output_anchor('Information')
def pi_add_incoming_connection(self, str_type: str, str_name: str) -> object:
"""
The IncomingInterface objects are instantiated here, one object per incoming connection.
Called when the Alteryx engine is attempting to add an incoming data connection.
:param str_type: The name of the input connection anchor, defined in the Config.xml file.
:param str_name: The name of the wire, defined by the workflow author.
:return: The IncomingInterface object(s).
"""
return self
def pi_add_outgoing_connection(self, str_name: str) -> bool:
"""
Called when the Alteryx engine is attempting to add an outgoing data connection.
:param str_name: The name of the output connection anchor, defined in the Config.xml file.
:return: True signifies that the connection is accepted.
"""
return True
def pi_push_all_records(self, n_record_limit: int) -> bool:
"""
Handles generating a new field for no incoming connections.
Called when a tool has no incoming data connection.
:param n_record_limit: Set it to <0 for no limit, 0 for no records, and >0 to specify the number of records.
:return: False if there's an error with the field name, otherwise True.
"""
#self.xmsg('Error','Missing Incoming Connection')
#return False
if self.alteryx_engine.get_init_var(self.n_tool_id, 'UpdateOnly') == 'False':
self.record_info_out = Sdk.RecordInfo(self.alteryx_engine)
self.record_info_out.add_field(self.tsmessage, self.tsmessage_type,
self.tsmessage_size)
self.output_anchor.init(self.record_info_out)
self.information_anchor.init(self.record_info_out)
self.record_creator = self.record_info_out.construct_record_creator()
self.sshConnection = sshClient(self.destinationServer, self.userName, self.password,
22, True, True)
if self.sshConnection is None:
self.xmsg('Info', 'Error with SSH Connection')
return False
else:
self.xmsg('Info', 'Connection with Destination Established')
cmd = 'gzip -dc | tql --query_results_apply_top_row_count 0 --null_string ""'
# cmd = 'tql --query_results_apply_top_row_count 0 --pagination_size 1000000 --null_string ""'
self.xmsg('Info', cmd)
self.stdin, self.stdout, self.stderr = self.sshConnection.ssh.exec_command(cmd)
self.xmsg('Info', 'Executing Command')
self.channel = self.stdout.channel
self.channel.settimeout(None)
if self.tqlStatements is not None:
lines = self.tqlStatements.splitlines()
for line in lines:
self.xmsg('Info',line)
self.write_lists_to_TS(self.tqlStatements)
elif self.tqlFilePath is not None:
with open(self.tqlFilePath,'r') as myFile:
lines = "".join(line for line in myFile)
myFile.close()
for line in lines.splitlines():
self.xmsg('Info',line)
self.write_lists_to_TS(lines)
return True
def pi_close(self, b_has_errors: bool):
"""
Called after all records have been processed.
:param b_has_errors: Set to true to not do the final processing.
"""
if self.alteryx_engine.get_init_var(self.n_tool_id, 'UpdateOnly') == 'False':
self.stdin.close()
self.channel.shutdown_write()
self.xmsg('Info', 'Completed Sending Commands')
self.writeChunks(600)
self.sshConnection.close()
self.xmsg('Info', 'Connection with Destination Closed')
self.output_anchor.assert_close()
self.information_anchor.assert_close()
def xmsg(self, msg_type: str, msg_string: str):
"""
A non-interface, non-operational placeholder for the eventual localization of predefined user-facing strings.
:param msg_string: The user-facing string.
:return: msg_string
"""
if msg_type == "Info":
self.alteryx_engine.output_message(self.n_tool_id, Sdk.EngineMessageType.info,msg_string)
elif msg_type == "Error":
self.alteryx_engine.output_message(self.n_tool_id, Sdk.EngineMessageType.error,msg_string)
return True
class IncomingInterface:
"""
This class is returned by pi_add_incoming_connection, and it implements the incoming interface methods, to be\
utilized by the Alteryx engine to communicate with a plugin when processing an incoming connection.
Prefixed with "ii", the Alteryx engine will expect the below four interface methods to be defined.
"""
def __init__(self, parent: object):
"""
Constructor for IncomingInterface.
:param parent: AyxPlugin
"""
# Default properties
pass
def ii_init(self, record_info_in: object) -> bool:
"""
Handles appending the new field to the incoming data.
Called to report changes of the incoming connection's record metadata to the Alteryx engine.
:param record_info_in: A RecordInfo object for the incoming connection's fields.
:return: False if there's an error with the field name, otherwise True.
"""
pass
def ii_push_record(self, in_record: object) -> bool:
"""
Responsible for pushing records out.
Called when an input record is being sent to the plugin.
:param in_record: The data for the incoming record.
:return: False if there's a downstream error, or if there's an error with the field name, otherwise True.
"""
pass
def ii_update_progress(self, d_percent: float):
"""
Called by the upstream tool to report what percentage of records have been pushed.
:param d_percent: Value between 0.0 and 1.0.
"""
# Inform the Alteryx engine of the tool's progress.
pass
def ii_close(self):
"""
Called when the incoming connection has finished passing all of its records.
"""
pass | [
"kendrickcheath@gmail.com"
] | kendrickcheath@gmail.com |
ba76c73c721d2e7dce88a5c8b406da79875e6b43 | a995c3ba49df323ae95d1348231a4155f43ae83d | /BuildingManagerCore/models.py | cae8b6ee94f28c387a6ec998411174d773ee921e | [] | no_license | sdosis/BuildingManager | 44e53888e9165d0c1dddc0077703229abaa77af7 | 212c88b812a41fcfbaf5182a1b494807c70aa5ac | refs/heads/master | 2021-04-26T23:18:15.827692 | 2018-03-16T07:50:28 | 2018-03-16T07:50:28 | 123,966,790 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,702 | py | from django.db import models
from django_neomodel import DjangoNode
from neomodel import StructuredNode, StringProperty,RelationshipTo,RelationshipFrom,UniqueIdProperty
# Create your models here.
class Building(models.Model):
address = models.CharField(max_length=100)
def __str__(self):
return self.address
class Apartment(models.Model):
number = models.CharField(max_length=10)
floor = models.IntegerField()
building = models.ForeignKey(Building,on_delete=models.DO_NOTHING,default='0')
def __str__(self):
return str(self.building.id) + " " + self.number
class Person(models.Model):
name = models.CharField(max_length=30)
mobile = models.CharField(max_length=15)
apartment = models.ManyToManyField(Apartment)
def __str__(self):
return self.name
class DistrictNode(StructuredNode):
name = StringProperty()
neighborhoods = RelationshipFrom('NeighborhoodNode','PART_OF')
class NeighborhoodNode(StructuredNode):
name = StringProperty()
community = RelationshipTo('DistrictNode','PART_OF')
buildings = RelationshipFrom('BuildingNode','PART_OF')
class BuildingNode(StructuredNode):
address = StringProperty(unique_index=True)
neighborhood = RelationshipTo('NeighborhoodNode','PART_OF')
apartments = RelationshipFrom('ApartmentNode','PART_OF')
class ApartmentNode(StructuredNode):
number = StringProperty()
floor = StringProperty()
building = RelationshipTo(BuildingNode,'PART_OF')
persons = RelationshipFrom('PersonNode','LIVES_IN')
class PersonNode(StructuredNode):
name = StringProperty()
mobile = StringProperty()
apartment = RelationshipTo(ApartmentNode,'LIVES_IN')
| [
"spyridon.dosis@gmail.com"
] | spyridon.dosis@gmail.com |
e3e96c55bff7e7233f26119f874a33cbfcf4140e | 8f7302ed3fc44722405461169b6f406fbde1ca4d | /statlas/config/base.py | 3f102a9d57033500ef958f53b6433af1a8dea8c8 | [] | no_license | MonsterSwell/Statlas | c7312d5921c6ec88d11390c8b54412074acf9ebb | a77c9c556ca839386173420de97ec988a0226a6c | refs/heads/master | 2021-01-01T15:35:39.495507 | 2011-06-28T18:39:06 | 2011-06-28T18:39:06 | 1,222,459 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,016 | py | import socket
import os.path, functools
from fluxdeps.helpers import get_secret_key
DEBUG = True
"""
Some helper functions for making life easy
"""
# Helper for translations
# http://docs.djangoproject.com/en/dev/ref/settings/#languages
gettext = lambda s: s
# The actual site root and a helper function to point to a folder in siteroot
SITE_ROOT = os.path.realpath(os.path.dirname(__file__)+'/../')
IN_SITE_ROOT = functools.partial(os.path.join, SITE_ROOT)
"""
Settings
"""
SITE_ID=1
# List of people to notify on server error
# http://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
)
# List of people to notify on broken link error
# http://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
SEND_BROKEN_LINK_EMAILS=True
# Local time zone
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# http://docs.djangoproject.com/en/dev/ref/settings/#time-zone
TIME_ZONE = 'Europe/Amsterdam'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
# http://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en'
# Use of I18N (custom language) and L18N (dates)
# http://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
# http://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_I18N = True
USE_L10N = True
USE_THOUSAND_SEPARATOR = True
# Link to default and admin media.
# ADMIN_MEDIA_PREFIX mag niet gelijk zijn aan MEDIA_ROOT
# http://docs.djangoproject.com/en/dev/ref/settings/#admin-media-prefix
MEDIA_ROOT = IN_SITE_ROOT('media')
MEDIA_URL = '/media/'
ADMIN_MEDIA_PREFIX = '/admin/media/'
PRIVATE_DATA_ROOT = IN_SITE_ROOT('data', 'private')
IN_PRIVATE_DATA_ROOT = functools.partial(os.path.join, PRIVATE_DATA_ROOT)
SECRET_KEY = get_secret_key()
# Debug Toolbar
# https://github.com/robhudson/django-debug-toolbar
#INTERNAL_IPS = ('127.0.0.1',)
# http://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.request",
"django.contrib.messages.context_processors.messages",
"django.core.context_processors.request",
)
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'django.template.loaders.app_directories.load_template_source',
)
MIDDLEWARE_CLASSES = (
'fluxdeps.middleware.StaticServe',
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
'fluxdeps.middleware.OmitWWW',
)
ROOT_URLCONF = 'urls'
TEMPLATE_DIRS = (
IN_SITE_ROOT('templates'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.markup',
'django.contrib.admin',
'django.contrib.humanize',
'fluxdeps.default',
'debug_toolbar',
'socialregistration',
'statmap',
'accounts',
)
# No WWW
PREPEND_WWW = False
"""
OAUTH
"""
TWITTER_CONSUMER_KEY = ''
TWITTER_CONSUMER_SECRET_KEY = ''
TWITTER_REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token'
TWITTER_ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token'
TWITTER_AUTHORIZATION_URL = 'https://api.twitter.com/oauth/authorize'
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'socialregistration.auth.TwitterAuth',
)
SOCIALREGISTRATION_GENERATE_USERNAME = True
"""
URL2PNG
"""
URL2PNG_APIKEY = ''
URL2PNG_SECRET = ''
URL2PNG_BOUNDS = "500x500"
LOGIN_URL = '/login/'
LOGIN_REDIRECT_URL = '/'
LOGOUT_URL = '/logout/'
| [
"kilian@kilianvalkhof.com"
] | kilian@kilianvalkhof.com |
0e72a7a1739168e4bdf4ea279ad566cb2856b350 | 7b2d5f751abad4d0747017931cdcf49b9929d9e5 | /api/functions.py | ee539fbf60fcef316519ca45744cfe5e19dd4e64 | [] | no_license | r-i-c-h-a/Recipe_Generator | 412a8eb6b9a4de6c8e58bb979832f04a86c526f9 | 1953f7f1d04fc1909fd6e0180df953170cd9fc19 | refs/heads/master | 2022-09-22T02:42:47.375890 | 2020-05-31T08:52:13 | 2020-05-31T08:52:13 | 268,240,373 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,079 | py | import pandas as pd
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import linear_kernel
from rake_nltk import Rake
import numpy as np
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.feature_extraction.text import CountVectorizer
def return_csv():
df = pd.read_csv('allrecipes.csv')
df = df.drop_duplicates(subset='title', keep="first")
df = df[['title','calories','ingredients']]
return df
def gen_bag_of_words(df):
key_words = []
title = []
for index, row in df.iterrows():
ing = row['ingredients']
#direct = row['directions']
#cat = row['categories']
#title.append(row['title'].strip())
titl = row['title']
r = Rake()
# extracting the words by passing the text
r.extract_keywords_from_text(ing+titl)
# getting the dictionary whith key words as keys and their scores as values
key_words_dict_scores = r.get_word_degrees()
# assigning the key words to the new column for the corresponding movie
key_words.append(list(key_words_dict_scores.keys()))
df['Key_words'] = key_words
#df['title'] = title
df.drop(columns = ['ingredients'], inplace = True)
df.set_index('title', inplace = True)
bags = []
columns = df.columns
for index, row in df.iterrows():
words = ''
for col in columns:
if col == 'Key_words':
keys = [str(i) for i in row[col]]
words = words + ' '.join(row[col])+ ' '
else:
words = words + str(row[col])+ 'col' +' '
bags.append(words)
df['bag_of_words'] = bags
df.drop(columns = [col for col in df.columns if col!= 'bag_of_words'], inplace = True)
return df
def return_cos_sim(df,df1):
# instantiating and generating the count matrix
count = CountVectorizer()
count_matrix = count.fit_transform(df['bag_of_words'])
count_matrix1 = count.transform(df1['bag_of_words'])
cosine_sim = cosine_similarity(count_matrix, count_matrix1)
# creating a Series for the recipe titles so they are associated to an ordered numerical
# list I will use later to match the indexes
indices = pd.Series(df.index)
return indices, cosine_sim
def recommendation(indices,cosine_sim):
recommended_recipes = []
# creating a Series with the similarity scores in descending order
i=0
score_series = []
for line in cosine_sim:
score_series.append((np.mean(line),i))
i+=1
k = lambda a:a[0]
sort_mean = sorted(score_series,reverse=True)
#print(sort_mean)
# populating the list with the titles of the best 10 matching recipes
for i in sort_mean[1:11]:
recommended_recipes.append(list(indices)[i[1]])
return recommended_recipes
#df = return_csv()
#df1 = df.sample(5)
#df = gen_bag_of_words(df)
#df1 = gen_bag_of_words(df1)
#ind, cosine_sim = return_cos_sim(df,df1)
#print(list(df1.index),recommendation(ind,cosine_sim)) | [
"kirthikagurumurthy@gmail.com"
] | kirthikagurumurthy@gmail.com |
07ef2d67568565f349ca85383443d41d67fd187a | 72c4177c189c339a43e0dada7d008b14ba485d87 | /mlclass-ex4/main.py | 169564de1c76f778f19ef925ec569b6d28e61188 | [] | no_license | jeffin143/neuralnetworkdigits | 9383d816f73d22ebd84f94a70ae24c362c01ee79 | 8feadd989760324fb1b1e5209ed757da72ea9471 | refs/heads/master | 2021-09-05T16:00:21.653869 | 2018-01-29T14:10:30 | 2018-01-29T14:10:30 | 119,390,432 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,237 | py | #!/usr/bin/env python
#
# Neural network learning
#
# depends on
#
# displayData.py
# sigmoidGradient.py
# randInitializeWeights.py
# nnCostFunction.py
#
import scipy.io
import random
import time
import numpy as np
import displayData as dd
import nnCostFunction as nncf
import sigmoidGradient as sg
import randInitializeWeights as riw
import checkNNGradients as cnng
from scipy.optimize import minimize
import predict as pr
## Setup the parameters you will use for this exercise
input_layer_size = 400 # 20x20 Input Images of Digits
hidden_layer_size = 25 # 25 hidden units
num_labels = 10 # 10 labels, from 1 to 10
# (note that we have mapped "0" to label 10)
## =========== Part 1: Loading and Visualizing Data =============
# We start the exercise by first loading and visualizing the dataset.
# You will be working with a dataset that contains handwritten digits.
#
# Load Training Data
print('Loading and Visualizing Data ...')
mat = scipy.io.loadmat('dataset.mat')
X = mat["X"]
y = mat["y"]
m = X.shape[0]
print m
# crucial step in getting good performance!
# changes the dimension from (m,1) to (m,)
# otherwise the minimization isn't very effective...
y=y.flatten()
# Randomly select 100 data points to display
rand_indices = np.random.permutation(m)
sel = X[rand_indices[:100],:]
dd.displayData(sel)
print('Initializing Neural Network Parameters...')
initial_Theta1 = riw.randInitializeWeights(input_layer_size, hidden_layer_size)
initial_Theta2 = riw.randInitializeWeights(hidden_layer_size, num_labels)
# Unroll parameters
nn_params = np.concatenate((initial_Theta1.reshape(initial_Theta1.size, order='F'), initial_Theta2.reshape(initial_Theta2.size, order='F')))
print('Training Neural Network...')
maxiter=20
lambda_reg = 0.1
myargs = (input_layer_size, hidden_layer_size, num_labels, X, y, lambda_reg)
results = minimize(nncf.nnCostFunction, x0=nn_params, args=myargs, options={'disp': True, 'maxiter':maxiter}, method="L-BFGS-B", jac=True)
nn_params = results["x"]
# Obtain Theta1 and Theta2 back from nn_params
Theta1 = np.reshape(nn_params[:hidden_layer_size * (input_layer_size + 1)], \
(hidden_layer_size, input_layer_size + 1), order='F')
Theta2 = np.reshape(nn_params[hidden_layer_size * (input_layer_size + 1):], \
(num_labels, hidden_layer_size + 1), order='F')
print('\nVisualizing Neural Network... \n')
dd.displayData(Theta1[:, 1:])
raw_input('Program paused. Press enter to continue.\n')
pred = pr.predict(Theta1, Theta2, X)
#print(y)
# code below to see the predictions that don't match
"""
fmt = '{} {}'
print(fmt.format('y', 'pred'))
for y_elem, pred_elem in zip(y, pred):
if y_elem == pred_elem:
#print(fmt.format(y_elem%10, pred_elem%10))
print(y[y_elem])
print(pred[pred_elem])
#raw_input('Program paused. Press enter to continue.\n')
"""
fmt = '{} {}'
print(fmt.format('y', 'pred'))
for x in range(10):
p=np.random.randint(1,m)
pred = pr.predict(Theta1, Theta2, X[p])
print(fmt.format(y[p]%10, pred%10))
dd.displayData(X[p])
time.sleep(.500)
pred = pr.predict(Theta1, Theta2, X)
print('Training Set Accuracy: {:f}'.format( ( np.mean(pred == y)*100 ) ) )
| [
"noreply@github.com"
] | noreply@github.com |
271916c172d033c6c05adf04acde7f202951f0ef | 480733d1e6f79f906dec32990327a408289a15e4 | /Magisterka/venv/Animation/Animation_test2.py | 34c4e76b02f282792815d5ebfee49ca29bc34438 | [] | no_license | ksia4/Guided_Waves | 6f3b73892ce7f83c3f69a71aeb53f0414433ffca | 7396707ee4838c92583ad9e5fbd1d04e020d365a | refs/heads/master | 2020-03-09T13:54:27.911906 | 2018-09-15T18:12:10 | 2018-09-15T18:12:10 | 128,822,364 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,365 | py | import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from multiprocessing import Process
import time # optional for testing only
import matplotlib.animation as animation
# A. First we define some useful tools:
def wait_fig():
# Block the execution of the code until the figure is closed.
# This works even with multiprocessing.
if matplotlib.pyplot.isinteractive():
matplotlib.pyplot.ioff() # this is necessary in mutliprocessing
matplotlib.pyplot.show(block=True)
matplotlib.pyplot.ion() # restitute the interractive state
else:
matplotlib.pyplot.show(block=True)
return
def wait_anim(anim_flag, refresh_rate = 0.1):
#This will be used in synergy with the animation class in the example
#below, whenever the user want the figure to close automatically just
#after the animation has ended.
#Note: this function uses the controversial event_loop of Matplotlib, but
#I see no other way to obtain the desired result.
while anim_flag[0]: #next code extracted from plt.pause(...)
backend = plt.rcParams['backend']
if backend in plt._interactive_bk:
figManager = plt._pylab_helpers.Gcf.get_active()
if figManager is not None:
figManager.canvas.start_event_loop(refresh_rate)
def draw_fig(fig = None):
#Draw the artists of a figure immediately.
#Note: if you are using this function inside a loop, it should be less time
#consuming to set the interactive mode "on" using matplotlib.pyplot.ion()
#before the loop, event if restituting the previous state after the loop.
if matplotlib.pyplot.isinteractive():
if fig is None:
matplotlib.pyplot.draw()
else:
fig.canvas.draw()
else:
matplotlib.pyplot.ion()
if fig is None:
matplotlib.pyplot.draw()
else:
fig.canvas.draw()
matplotlib.pyplot.ioff() # restitute the interactive state
matplotlib.pyplot.show(block=False)
return
def pause_anim(t): # This is taken from plt.pause(...), but without unnecessary
# stuff. Note that the time module should be previously imported.
# Again, this use the controversial event_loop of Matplotlib.
backend = matplotlib.pyplot.rcParams['backend']
if backend in matplotlib.pyplot._interactive_bk:
figManager = matplotlib.pyplot._pylab_helpers.Gcf.get_active()
if figManager is not None:
figManager.canvas.start_event_loop(t)
return
else: time.sleep(t)
def f(x, y):
return np.sin(x) + np.cos(y)
def plot_graph():
fig = plt.figure()
x = np.linspace(0, 2 * np.pi, 120)
y = np.linspace(0, 2 * np.pi, 100).reshape(-1, 1)
im = fig.gca().imshow(f(x, y))
draw_fig(fig)
n_frames = 50
# ==============================================
# First method - direct animation: This use the start_event_loop, so is
# somewhat controversial according to the Matplotlib doc.
# Uncomment and put the "Second method" below into comments to test.
for i in range(n_frames): # n_frames iterations
x += np.pi / 15.
y += np.pi / 20.
im.set_array(f(x, y))
draw_fig(fig)
pause_anim(0.015) # plt.pause(0.015) can also be used, but is slower
wait_fig() # simply suppress this command if you want the figure to close
# automatically just after the animation has ended
#================================================
#Second method: this uses the Matplotlib prefered animation class.
#Put the "first method" above in comments to test it.
'''def updatefig(i, fig, im, x, y, anim_flag, n_frames):
x = x + i * np.pi / 15.
y = y + i * np.pi / 20.
im.set_array(f(x, y))
if i == n_frames - 1:
anim_flag[0] = False
anim_flag = [True]
animation.FuncAnimation(fig, updatefig, repeat=False, frames=n_frames,
interval=50, fargs=(fig, im, x, y, anim_flag, n_frames), blit=False)
# Unfortunately, blit=True seems to causes problems
wait_fig()'''
# wait_anim(anim_flag) #replace the previous command by this one if you want the
# figure to close automatically just after the animation
# has ended
# ================================================
return
# C. Using multiprocessing to obtain the desired effects. I believe this
# method also works with the "threading" module, but I haven't test that.
def main(): # it is important that ALL the code be typed inside
# this function, otherwise the program will do weird
# things with the Ipython or even the Python console.
# Outside of this condition, type nothing but import
# clauses and function/class definitions.
if __name__ != '__main__': return
p = Process(target=plot_graph)
p.start()
print('hello', flush = True) #just to have something printed here
# p.join() # suppress this command if you want the animation be executed in
# parallel with the subsequent code
for i in range(3): # This allows to see if execution takes place after the
#process above, as should be the case because of p.join().
print('world', flush = True)
time.sleep(1)
main() | [
"ksia4@onet.eu"
] | ksia4@onet.eu |
c7061c5f8e05414df5cf2cb7e57ffafa7e7e342a | 9cfea31898f5db9937ffeb34b9b33d2320daae5a | /code/Andre/py/fit_unet_d8g_222_swrap_09.py | 81526826f7ff216c39a745e2e8099bdd1de4e895 | [
"MIT"
] | permissive | tjvananne/kaggle_dsb2017 | 8720953ebba2b29ae3406652352df4b691ce26e4 | 21b9afa60d8c675314f81f04d6d65c3b9f6b7d7e | refs/heads/master | 2021-01-19T13:56:38.720868 | 2017-08-28T12:22:02 | 2017-08-28T12:22:02 | 100,864,262 | 0 | 0 | null | 2017-08-20T13:44:03 | 2017-08-20T13:44:03 | null | UTF-8 | Python | false | false | 19,086 | py | """
Created on Thu Jan 26 17:04:11 2017
@author: Andre Stochniol, andre@stochniol.com
Fit unet style nodule identifier on Luna databaset using 8-grid scheme
Physical resolution 2x2x2mm
Data aggregated, shuffled; wrap augmentation used (swrap)
"""
import numpy as np
from keras.models import load_model,Model
from keras.layers import MaxPooling3D
from keras.layers import Convolution3D
from keras.layers import Input, merge, UpSampling3D
from keras.optimizers import Adam
from keras import backend as K
#from keras.preprocessing.image import ImageDataGenerator # Keras original
from image_as_mod3d_2dmask import ImageDataGenerator # our modified version
K.set_image_dim_ordering('th')
smooth = 1.
def dice_coef(y_true, y_pred):
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
intersection = K.sum(y_true_f * y_pred_f)
return (2. * intersection + smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + smooth)
DICE_LOW_LIMIT = 0 ## was 0.001
def dice_coef_np(y_true, y_pred):
y_true_f = y_true.flatten()
y_pred_f = y_pred.flatten()
y_pred_f [y_pred_f < DICE_LOW_LIMIT] = 0.
y_pred_f [y_pred_f > 1- DICE_LOW_LIMIT] = 1.
intersection = np.sum(y_true_f * y_pred_f)
return (2. * intersection + smooth) / (np.sum(y_true_f) + np.sum(y_pred_f) + smooth)
def dice_coef_pos_np(y_true, y_pred, pos = 0):
y_true_f = y_true[:,pos].flatten()
y_pred_f = y_pred[:,pos].flatten()
intersection = np.sum(y_true_f * y_pred_f)
return (2. * intersection + smooth) / (np.sum(y_true_f) + np.sum(y_pred_f) + smooth)
def dice_coef_loss(y_true, y_pred):
return -dice_coef(y_true, y_pred)
def unet_model_xd3_2_6l_grid(nb_filter=48, dim=5, clen=3 , img_rows=224, img_cols=224 ): # NOTE that this procedure is/should be used with img_rows & img_cols as None
# aiming for architecture similar to the http://cs231n.stanford.edu/reports2016/317_Report.pdf
# Our model is six layers deep, consisting of a series of three CONV-RELU-POOL layyers (with 32, 32, and 64 3x3 filters), a CONV-RELU layer (with 128 3x3 filters), three UPSCALE-CONV-RELU lay- ers (with 64, 32, and 32 3x3 filters), and a final 1x1 CONV- SIGMOID layer to output pixel-level predictions. Its struc- ture resembles Figure 2, though with the number of pixels, filters, and levels as described here
## 3D CNN version of a previously developed unet_model_xd_6j
zconv = clen
inputs = Input((1, dim, img_rows, img_cols))
conv1 = Convolution3D(nb_filter, zconv, clen, clen, activation='relu', border_mode='same')(inputs)
conv1 = Convolution3D(nb_filter, zconv, clen, clen, activation='relu', border_mode='same')(conv1)
pool1 = MaxPooling3D(pool_size=(2, 2, 2))(conv1)
conv2 = Convolution3D(2*nb_filter, zconv, clen, clen, activation='relu', border_mode='same')(pool1)
conv2 = Convolution3D(2*nb_filter, zconv, clen, clen, activation='relu', border_mode='same')(conv2)
pool2 = MaxPooling3D(pool_size=(2, 2, 2))(conv2)
conv4 = Convolution3D(4*nb_filter, zconv, clen, clen, activation='relu', border_mode='same')(pool2)
conv4 = Convolution3D(4*nb_filter, zconv, clen, clen, activation='relu', border_mode='same')(conv4)
up6 = merge([UpSampling3D(size=(2, 2, 2))(conv4), conv2], mode='concat', concat_axis=1)
conv6 = Convolution3D(2*nb_filter, zconv, clen, clen, activation='relu', border_mode='same')(up6)
conv6 = Convolution3D(2*nb_filter, zconv, clen, clen, activation='relu', border_mode='same')(conv6)
up7 = merge([UpSampling3D(size=(2, 2, 2))(conv6), conv1], mode='concat', concat_axis=1) # original - only works for even dim
conv7 = Convolution3D(nb_filter, zconv, clen, clen, activation='relu', border_mode='same')(up7)
conv7 = Convolution3D(nb_filter, zconv, clen, clen, activation='relu', border_mode='same')(conv7)
pool11 = MaxPooling3D(pool_size=(2, 1, 1))(conv7)
conv12 = Convolution3D(2*nb_filter, zconv, 1, 1, activation='relu', border_mode='same')(pool11)
conv12 = Convolution3D(2*nb_filter, zconv, 1, 1, activation='relu', border_mode='same')(conv12)
pool12 = MaxPooling3D(pool_size=(2, 1, 1))(conv12)
conv13 = Convolution3D(2*nb_filter, zconv, 1, 1, activation='relu', border_mode='same')(pool12)
conv13 = Convolution3D(2*nb_filter, zconv, 1, 1, activation='relu', border_mode='same')(conv13)
pool13 = MaxPooling3D(pool_size=(2, 1, 1))(conv13)
if (dim < 16):
conv8 = Convolution3D(1, 1, 1, 1, activation='sigmoid')(pool13)
else: # need one extra layer to get to 1D x 2D mask ...
conv14 = Convolution3D(2*nb_filter, zconv, 1, 1, activation='relu', border_mode='same')(pool13)
conv14 = Convolution3D(2*nb_filter, zconv, 1, 1, activation='relu', border_mode='same')(conv14)
pool14 = MaxPooling3D(pool_size=(2, 1, 1))(conv14)
conv8 = Convolution3D(1, 1, 1, 1, activation='sigmoid')(pool14)
model = Model(input=inputs, output=conv8)
model.compile(optimizer=Adam(lr=1e-4), loss=dice_coef_loss, metrics=[dice_coef])
#model.compile(optimizer=Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0), loss=dice_coef_loss, metrics=[dice_coef])
return model
def grid_data(source, grid=32, crop=16, expand=12):
height = source.shape[3] # should be 224 for our data, when used in the initial fix-size mode
width = source.shape[4]
gridheight = (height - 2 * crop) // grid # should be 6 for our data
gridwidth = (width - 2 * crop) // grid
cells = []
for j in range(gridheight):
for i in range (gridwidth):
cell = source[:,:,:, crop+j*grid-expand:crop+(j+1)*grid+expand, crop+i*grid-expand:crop+(i+1)*grid+expand]
cells.append(cell)
cells = np.vstack (cells)
return cells, gridwidth, gridheight
def data_from_grid (cells, gridwidth, gridheight, grid=32):
width = cells.shape[4]
crop = (width - grid ) // 2 ## for simplicity we are assuming the same crop (and grid) in x & y directions
if crop > 0: # do NOT crop with 0 as we get empty cells ...
cells = cells[:,:,:,crop:-crop,crop:-crop]
shape = cells.shape
new_shape_1_dim = shape[0]// (gridwidth * gridheight) # ws // 36 -- Improved on 20170306
new_shape = (gridwidth * gridheight, new_shape_1_dim, ) + tuple([x for x in shape][1:]) # was 36, Improved on 20170306
cells = np.reshape(cells, new_shape)
cells = np.moveaxis(cells, 0, -3)
shape = cells.shape
new_shape2 = tuple([x for x in shape[0:3]]) + (gridheight, gridwidth,) + tuple([x for x in shape[4:]])
cells = np.reshape(cells, new_shape2)
cells = cells.swapaxes(-2, -3)
shape = cells.shape
combine_shape =tuple([x for x in shape[0:3]]) + (shape[-4]*shape[-3], shape[-2]*shape[-1],)
cells = np.reshape(cells, combine_shape)
return cells
def data_from_grid_by_proximity (cells, gridwidth, gridheight, grid=32):
# disperse the sequential dats into layers and then use data_from_grid
shape = cells.shape
new_shape_1_dim = shape[0]// (gridwidth * gridheight) # ws // 36 -- Improved on 20170306
### NOTE tha we invert the order of shapes below to get the required proximity type ordering
new_shape = (new_shape_1_dim, gridwidth * gridheight, ) + tuple([x for x in shape][1:]) # was 36, Improved on 20170306
#new_shape = (gridwidth * gridheight, new_shape_1_dim, ) + tuple([x for x in shape][1:]) # was 36, Improved on 20170306
# swap ordering of axes
cells = np.reshape(cells, new_shape)
cells = cells.swapaxes(0, 1)
cells = np.reshape(cells, shape)
cells = data_from_grid (cells, gridwidth, gridheight, grid)
return cells
def load_aggregate_masks_scans (masks_mnames, grids, upgrid_multis):
scans = []
masks = []
igrid = 0
for masks_names in masks_mnames:
if (len(masks_names) > 0):
grid = grids[igrid]
upgrid_multi = upgrid_multis[igrid]
upgcount = upgrid_multi * upgrid_multi
scans1 = []
masks1 = []
for masks_name in masks_names:
print ("Loading: ", masks_name)
masks0 = np.load(''.join((masks_name, ".npz")))['arr_0']
scans0 = np.load(''.join((masks_name.replace("masks_", "scans_", 1), ".npz")))['arr_0']
masks1.append(masks0)
scans1.append(scans0)
scans1 = np.vstack(scans1)
masks1 = np.vstack(masks1)
if len(masks) > 0:
scans1 = np.vstack([scans1, scans])
masks1 = np.vstack([masks1, masks])
lm = len(masks1) // upgcount * upgcount
scans1 = scans1[0:lm] # cut to multiples of upgcount
masks1 = masks1[0:lm]
index_shuf = np.arange(lm)
np.random.shuffle(index_shuf)
scans1 = scans1[index_shuf]
masks1 = masks1[index_shuf]
scans = data_from_grid_by_proximity(scans1, upgrid_multi, upgrid_multi, grid=grid)
masks = data_from_grid_by_proximity(masks1, upgrid_multi, upgrid_multi, grid=grid)
igrid += 1
return masks, scans
def load_aggregate_masks_scans_downsample2 (masks_mnames, grids, upgrid_multis, down_base):
scans = []
masks = []
down_size = 50000
igrid = 0
for masks_names in masks_mnames:
if (len(masks_names) > 0):
grid = grids[igrid]
upgrid_multi = upgrid_multis[igrid]
upgcount = upgrid_multi * upgrid_multi
scans1 = []
masks1 = []
for masks_name in masks_names:
print ("Loading: ", masks_name)
masks0 = np.load(''.join((masks_name, ".npz")))['arr_0']
scans0 = np.load(''.join((masks_name.replace("masks_", "scans_", 1), ".npz")))['arr_0']
if igrid >= 0:
down = down_base * (4 ** igrid) # dynamic
if len(masks0) > down_size and down > 1:
print("Down-sampling masks0/scans0 by: ", masks_name, down)
lm = len(masks0)
index_shuf = np.arange(lm)
np.random.shuffle(index_shuf)
scans0 = scans0[index_shuf]
masks0 = masks0[index_shuf]
masks0 = masks0[0:len(masks0):down]
scans0 = scans0[0:len(scans0):down]
masks1.append(masks0)
scans1.append(scans0)
scans1 = np.vstack(scans1)
masks1 = np.vstack(masks1)
if len(masks) > 0:
scans1 = np.vstack([scans1, scans])
masks1 = np.vstack([masks1, masks])
lm = len(masks1) // upgcount * upgcount
scans1 = scans1[0:lm] # cut to multiples of upgcount
masks1 = masks1[0:lm]
index_shuf = np.arange(lm)
np.random.shuffle(index_shuf)
scans1 = scans1[index_shuf]
masks1 = masks1[index_shuf]
scans = data_from_grid_by_proximity(scans1, upgrid_multi, upgrid_multi, grid=grid)
masks = data_from_grid_by_proximity(masks1, upgrid_multi, upgrid_multi, grid=grid)
igrid += 1
return masks, scans
if __name__ == '__main__':
# Key initial parameters
dim = 8
start_from_scratch = False
load_initial_weights = False
if start_from_scratch and load_initial_weights:
model_weights_name_to_start_from = "../luna/models/d8_2x2x2_best_weights.h5" # only used when start_from_scratch is True and load_initial_weights is True
### KEY running parameteres
nb_epoch = 1
model_load_name = '../luna/models/d8g4a_model_71.h5'
model_save_name = '../luna/models/d8g4a_model_72.h5' ### MUST include "_model" string as we use this for a substituion for weights file
seed = 71000 # should be varied by steps/stages
downsample = 1
set_lr_value = False
new_lr_value = 1e-5 # only used when set_lr_value is True
use_large_validation = True
grids = [20, 40]
upgrid_multis = [2, 2] # we modify only the last one if/as needed
batch_size = 7 * int((8 // upgrid_multis[1])**2) # calculated for a 12GB graphics card (such as Tesla K80/AWS P2 system)
masks_mnames = [
[
"../luna/models/masks_d8g1x20ba4a_2x2x2_nodules_0_3_6860",
"../luna/models/masks_d8g1x20ba4a_2x2x2_nodules_4_8_8178",
"../luna/models/masks_d8g1x20ba4a_2x2x2_blanks_0_3_68442",
"../luna/models/masks_d8g1x20ba4a_2x2x2_blanks_4_8_97406"
],
[
"../luna/models/masks_d8g1x40ba4a_2x2x2_nodules_0_3_5940",
"../luna/models/masks_d8g1x40ba4a_2x2x2_nodules_4_8_6925",
"../luna/models/masks_d8g1x40ba4a_2x2x2_blanks_0_3_52367", ## unblock this one
"../luna/models/masks_d8g1x40ba4a_2x2x2_blanks_4_8_74880"
]]
masks_val_mnames = [
[
"../luna/models/masks_d8g1x20ba4a_2x2x2_nodules_9_9_1442"
],
[
"../luna/models/masks_d8g1x40ba4a_2x2x2_nodules_9_9_1101"
]]
masks_val_large_mnames = [
[
"../luna/models/masks_d8g1x20ba4a_2x2x2_nodules_9_9_1442",
"../luna/models/masks_d8g1x20ba4a_2x2x2_blanks_9_9_19861"
],
[
"../luna/models/masks_d8g1x40ba4a_2x2x2_nodules_9_9_1101",
#"../luna/models/masks_d8g1x40ba4a_2x2x2_blanks_9_9_15122"
]]
np.random.seed(seed)
masks, scans = load_aggregate_masks_scans_downsample2 (masks_mnames, grids, upgrid_multis, downsample)
print ("Masks and Scans shapes: ", masks.shape, scans.shape)
masks[masks < 0] = 0 # just in case (eliminate the blanks's marking)
if masks.shape[2] > 1:
masks = masks[:,:,masks.shape[2] // 2] ## select the central value as this one contains still all data
masks = masks[:, np.newaxis]
print ("Masks shape after 2D mapping: ", masks.shape)
masks_val, scans_val = load_aggregate_masks_scans (masks_val_mnames, grids, upgrid_multis)
print ("Val Masks and Scans shapes: ", masks_val.shape, scans_val.shape)
masks_val[masks_val < 0] = 0
if masks_val.shape[2] > 1:
masks_val = masks_val[:,:,masks_val.shape[2] // 2] ## select the central value as this one contains still all data
masks_val = masks_val[:, np.newaxis]
print ("Masks_val shape after 2D mapping: ", masks_val.shape)
masks_val_large, scans_val_large = load_aggregate_masks_scans (masks_val_large_mnames, grids, upgrid_multis)
print ("Large Val Masks and Scans shapes: ", masks_val_large.shape, scans_val_large.shape)
masks_val_large[masks_val_large < 0] = 0
if masks_val_large.shape[2] > 1:
masks_val_large = masks_val_large[:,:,masks_val_large.shape[2] // 2] ## select the central value as this one contains still all data
masks_val_large = masks_val_large[:, np.newaxis]
print ("Large Val Masks shape after 2D mapping: ", masks_val_large.shape)
if start_from_scratch:
model = unet_model_xd3_2_6l_grid(nb_filter=20, dim=dim, clen=3, img_rows=None , img_cols=None )
print(model.summary())
if load_initial_weights:
model_weights_name = model_weights_name_to_start_from ### could potentially load best weights
model.load_weights(model_weights_name)
print("Weights and output models: ", model_weights_name, model_save_name)
else:
print("Start from scratch (no weights),output models: ", model_save_name)
else:
## load_previous_model
print ("Loading model: ", model_load_name)
model = load_model(model_load_name, #3
custom_objects={'dice_coef_loss': dice_coef_loss,
'dice_coef': dice_coef
}
)
#print(model.summary())
print("Load and output models: ", model_load_name, model_save_name)
## set the data ...
masks = masks.astype(np.int16)
final_couple_of_iterations = False
if final_couple_of_iterations:
masks = np.concatenate((masks, masks_val))
scans = np.concatenate((scans, scans_val))
data_gen_args = dict(featurewise_center=False,
samplewise_center=False,
featurewise_std_normalization=False,
samplewise_std_normalization=False,
zca_whitening=False,
rotation_range=0,
width_shift_range=0.02, # was 0.5
height_shift_range=0.02, # was 0.5
horizontal_flip=False, # was True
vertical_flip=False,
fill_mode= "wrap",
zoom_range=0
)
image_datagen = ImageDataGenerator(**data_gen_args)
mask_datagen = ImageDataGenerator(**data_gen_args)
# Provide the same seed and keyword arguments to the fit and flow methods
shuffle = True # default
image_datagen.fit(scans, augment=True, seed=seed)
mask_datagen.fit(masks, augment=True, seed=seed)
image_generator = image_datagen.flow(scans,
batch_size = batch_size,
#shuffle = shuffle,
seed=seed)
mask_generator = mask_datagen.flow(masks,
batch_size = batch_size,
#shuffle = shuffle,
seed=seed)
# combine generators into one which yields image and masks
train_generator = zip(image_generator, mask_generator)
if set_lr_value:
print("Model learning rate (old): ", model.optimizer.lr.get_value()) # was 1e-4
model.optimizer.lr.set_value(new_lr_value)
print("Model learning rate(new): ", model.optimizer.lr.get_value())
samples_per_epoch = masks.shape[0]
model.fit_generator(
train_generator,
samples_per_epoch= samples_per_epoch,
nb_epoch = nb_epoch,
validation_data = ( scans_val, masks_val),
verbose=1)
model.save(model_save_name)
model.save_weights(model_save_name.replace("_model", "_weights", 1))
masks_pred = model.predict(scans_val, verbose=1)
dice_check = dice_coef_np(masks_val, masks_pred)
print ("dice_check: ", dice_check)
if use_large_validation:
masks_pred_large = model.predict(scans_val_large, batch_size =1, verbose=1)
dice_check = dice_coef_np(masks_val_large, masks_pred_large)
print ("Full dice_check: ", dice_check)
print("Model learning rate: ", model.optimizer.lr.get_value())
| [
"andre@stochniol.com"
] | andre@stochniol.com |
477013e5b80fbe6433a29b90140bcb738762033d | 0a53386c822a0da605cba397935620d2f919032d | /dfs/number-of-islands.py | 7659c48f20b2b5280443bb6f42eca0437fc2158c | [] | no_license | casprice/leetcode | 6eb893302db891b90efbeece21d64b9906ebe414 | 6cf53450dac3d7dfa70dd0adf001826f70f0d93d | refs/heads/master | 2021-05-20T11:53:44.014003 | 2021-03-02T06:19:01 | 2021-03-02T06:19:01 | 252,284,532 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,494 | py | """
Given an m x n 2d grid map of '1's (land) and '0's (water), return the number of islands.
An island is surrounded by water and is formed by connecting adjacent lands horizontally or vertically.
You may assume all four edges of the grid are all surrounded by water.
Example 1:
Input: grid = [
["1","1","1","1","0"],
["1","1","0","1","0"],
["1","1","0","0","0"],
["0","0","0","0","0"]
]
Output: 1
Example 2:
Input: grid = [
["1","1","0","0","0"],
["1","1","0","0","0"],
["0","0","1","0","0"],
["0","0","0","1","1"]
]
Output: 3
Constraints:
m == grid.length
n == grid[i].length
1 <= m, n <= 300
grid[i][j] is '0' or '1'.
"""
class Solution(object):
def numIslands(self, grid):
"""
:type grid: List[List[str]]
:rtype: int
"""
if grid == None or len(grid) == 0:
return 0
numIslands = 0
def sinkIsland(i, j, grid):
if (i < 0 or i >= len(grid) or j < 0 or j >= len(grid[0]) or grid[i][j] == "0"):
return
grid[i][j] = "0"
sinkIsland(i+1, j, grid)
sinkIsland(i-1, j, grid)
sinkIsland(i, j+1, grid)
sinkIsland(i, j-1, grid)
for i in range(len(grid)):
for j in range(len(grid[0])):
if grid[i][j] == "1":
numIslands += 1
sinkIsland(i, j, grid)
return numIslands | [
"caseyprice217@gmail.com"
] | caseyprice217@gmail.com |
6b06d564d2d0c4a0ab94bf7e6bb3e0a1bdeba2d5 | 25cb12b478069983bc1a95759e8532e477aa060a | /pythonclub/clubapp/views.py | 01245376092594b26ee893b35f5a77564f9123a8 | [] | no_license | loowa103118/itc172 | c5ef06caafef729c7459252700ac5aebab322443 | d1eecf4a8602e6edabb568393249d3370a28ca45 | refs/heads/master | 2021-02-16T17:34:28.275125 | 2020-03-05T20:56:18 | 2020-03-05T20:56:18 | 245,029,503 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 329 | py | from django.shortcuts import render
from .models import Meeting
# Create your views here.
def index (request):
return render(request, 'clubapp/index.html')
def getmeetings(request):
type_list=Meeting.objects.all()
return render(request, 'clubapp/meeting.html', {'type_list': type_list})
| [
"noreply@github.com"
] | noreply@github.com |
3d6cb95fc47c06f4f11195e9bf3a907133255e25 | 9d2ad070773c6d594143180a1414fce2fb58f724 | /InvertedIndex2/mapper.py | 87465af510d498438a6ca484cfadb8352df42a73 | [] | no_license | ndhuanhuan/Map-Reduce-Recipe | 6387c26a3ac32147358490e0728f817dddb305cb | c100019a0f2f4a3916fd4c4526ae96b27ba49384 | refs/heads/master | 2021-01-10T19:47:59.165228 | 2015-12-03T05:29:42 | 2015-12-03T05:29:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 575 | py | #! /usr/bin/python
import sys
def read_mapper_input(stdin):
for line in stdin:
yield line.rstrip()
def mapper():
for line in read_mapper_input(sys.stdin):
# one line is one document
data = line.strip().split(' ')
line_id = data[0]
tf_dict = dict()
for word in data[1:]:
tf_dict[word] = tf_dict.get(word, 0) + 1
for item in tf_dict.items():
print "{0}\t{1}\t{2}".format(item[0], line_id, item[1])
#word, docid, term frequency
if __name__ == "__main__":
mapper() | [
"linkin816@gmail.com"
] | linkin816@gmail.com |
91a7fe6d28615378758879c6f36075f5adedbe01 | b3ba44e47a9632007601a87df9ac5d39adb2bc4a | /PhaseTwo/classify.py | 0e8627690b4ac1384df0955c69d9829b7f447266 | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | FredrikBakken/TDT4305_Big-Data-Project | ece04fb81b3a664bfa24b6ce3bea421baa873f7e | 982ead4d4c76da8217b05b8352e7999204a8eb85 | refs/heads/master | 2021-01-25T12:19:25.123121 | 2018-04-24T11:11:55 | 2018-04-24T11:11:55 | 123,464,318 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,646 | py | '''
' Classify.py
'''
import argparse
from pyspark import SparkConf, SparkContext
# Used headings
PLACE_NAME = 4
TWEET_TEXT = 10
# Generate a mapped training set sample (place and tweet)
def generate_training_set(data):
training_sample = data.sample(False, 0.1, 5) # According to specifications in presentation
return training_sample.map(lambda pt : (pt[PLACE_NAME], pt[TWEET_TEXT].lower().split(' ')))
# Read input tweet text(s) and return as list of lists
def read_input_tweet(input_file):
tweets = []
with open(input_file) as f:
for line in f:
tweet = line.lower().strip().split(' ')
tweets.append(tweet)
return tweets
# Return total number of tweets
def get_total_number_of_tweets(training_set):
return training_set.count()
# Count number of tweets with specific word
# occurrences | Counts occurrence of each word in input tweet
# tweet | Tweet text
# input_tweet | Text from input file
def occurrence_counter(occurrences, tweet, input_tweet):
for i in range(len(input_tweet)):
if input_tweet[i] in tweet:
occurrences[i] += 1
return occurrences
# Handling of probability functionality
def handle_probability(training_set, total_number_of_tweets, input_tweet):
word_counter_list = [0] * len(input_tweet)
return training_set.aggregateByKey( (word_counter_list, 0),
lambda x, tweet : (occurrence_counter(x[0], tweet, input_tweet), x[1] + 1),
lambda rdd1, rdd2: ([rdd1[0][i] + rdd2[0][i] for i in range(len(rdd1[0]))], rdd1[1] + rdd2[1])) \
.filter(lambda x: all(i > 0 for i in x[1][0])) \
.map(lambda x : (x[0], get_location_probability(x[1], total_number_of_tweets)))
# Get probability for each location
def get_location_probability(incidents, total_number_of_tweets):
probability = (float(incidents[1]) / float(total_number_of_tweets))
for word_count in incidents[0]:
probability *= (float(word_count) / float(incidents[1]))
return probability
# Find the location(s) with the highest probable similarity
def find_highest_probability_location(probability_data):
if probability_data.count() <= 0:
return None
highest_probability = probability_data.max(key=lambda x : x[1])
return probability_data.filter(lambda x : x[1] == highest_probability[1]).collect()
# Store probability results to file
def store_probabilities(probability_data, output_file):
with open(output_file, 'a') as of:
if probability_data == None:
of.write('\n')
else:
for location in probability_data:
of.write(location[0] + '\t')
of.write(str(location[1]) + '\n')
def classifier(training_file, input_file, output_file):
conf = SparkConf().setMaster('local[*]').setAppName('TDT4305: Big Data Architecture - Project Phase 2')
sc = SparkContext(conf = conf)
raw_data = sc.textFile(training_file, use_unicode=False) # Set "use_unicode=True" if there is a TypeError
data = raw_data.map(lambda x: x.split('\n')[0].split('\t'))
# Training set file
training_set = generate_training_set(data)
# Input file
input_data = read_input_tweet(input_file)
# Get total number of tweets
total_number_of_tweets = get_total_number_of_tweets(training_set)
for input_tweet in input_data:
# Handle and calculate probabilities
probability_data = handle_probability(training_set, total_number_of_tweets, input_tweet)
# Find location(s) with highest probable similarity
highest_probability = find_highest_probability_location(probability_data)
# Store results to file
store_probabilities(highest_probability, output_file)
if __name__ == '__main__':
# Argparse documentation: https://docs.python.org/3/library/argparse.html
parser = argparse.ArgumentParser(description='Add paths for training, input, and output files.')
parser.add_argument('-training', metavar='-t', type=str, help='Full path of the training file.')
parser.add_argument('-input', metavar='-i', type=str, help='Full path of the input file.')
parser.add_argument('-output', metavar='-o', type=str, help='Full path of the output file.')
args = parser.parse_args()
if args.training != None and args.input != None and args.output != None:
classifier(args.training, args.input, args.output)
else:
print('Missing parameters, make sure to add necessary parameters when executing.')
| [
"fredda10x@gmail.com"
] | fredda10x@gmail.com |
ba0c3046a3a8852a216474f3af4020d288e50a42 | 84eb5adc8b4278e41e33e13c452c02a7e431474e | /getWifiPassword.py | 1fbe92836f4f823b0b829d4a49713ae054b0ad05 | [] | no_license | zhuixunfreedom/GetWifiPassword | 680b14e188630b3a65b1e34dea057b194f0b284b | 3e31d19c4d9facb465112993e6d2c44765e591e0 | refs/heads/master | 2020-07-26T07:30:48.568488 | 2019-09-15T10:50:25 | 2019-09-15T10:50:25 | 208,577,565 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,834 | py | import pywifi
import time
from pywifi import const
class PoJie:
def __init__(self, name):
self.name = name
wifi = pywifi.PyWiFi() # 抓取网卡接口
self.iface = wifi.interfaces()[0] # 获取网卡
self.iface.disconnect() # 断开所有连接
time.sleep(1)
if self.iface.status() in [const.IFACE_DISCONNECTED,
const.IFACE_INACTIVE]: # 测试是否已经断开网卡连接
print("已经成功断开网卡连接")
else:
print("网卡连接断开失败")
def solve(self):
x = 0
f = open('D:\password_8.txt', 'r')
while True:
line = f.readline().replace("\n", "")
x += 1
print('正在尝试第%d次' % x)
profile = pywifi.Profile() # 创建wifi配置对象
profile.ssid = self.name # wifi名称
profile.key = line # WiFi密码
profile.auth = const.AUTH_ALG_OPEN # 网卡的开放
profile.akm.append(const.AKM_TYPE_WPA2PSK) # wifi加密算法,一般是 WPA2PSK
profile.cipher = const.CIPHER_TYPE_CCMP # 加密单元
# self.iface.remove_all_network_profiles() # 删除所有的wifi文件
tem_profile = self.iface.add_network_profile(profile) # 添加新的WiFi文件
self.iface.connect(tem_profile) # 连接
time.sleep(3) # 连接需要时间
if self.iface.status() == const.IFACE_CONNECTED: # 判断是否连接成功
print("成功连接,密码是%s" % line)
break
else:
print("连接失败,密码是%s" % line)
if __name__ == "__main__":
name = 'yltwifi' # 需要破解的wifi名称
obj = PoJie(name=name)
obj.solve()
| [
"noreply@github.com"
] | noreply@github.com |
53cf62af1ec166ceb31aee038518a14e5a1f77e8 | d0fa2ee7a3e7dda964a7b7a9f6b4e54a0d8f394b | /service.py | a1dfb03bca5c81918027c699374f46fe6c344638 | [] | no_license | swachalit/instance_manager | 891b70748a500f4cf2f44feba1b3f91220fe069d | 22430b68e167f3c503eefbdbd95217826d838daa | refs/heads/master | 2020-03-07T22:16:28.551104 | 2016-11-06T13:01:04 | 2016-11-06T13:01:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,429 | py | # -*- coding: utf-8 -*-
import boto3
import sys
from botocore.exceptions import ClientError
def start_instances(instance_ids):
ec2_conn = boto3.client('ec2')
try:
starting = ec2_conn.start_instances(InstanceIds=instance_ids)
waiter = ec2_conn.get_waiter('instance_running')
waiter.wait(InstanceIds=instance_ids)
print("Started instances: %s" % ", ".join(instance_ids))
except ClientError as e:
print("Error while starting instances: %s" % e)
except:
print("Unknown error occured while starting instances: %s " % sys.exc_info()[0])
raise
def stop_instances(instance_ids):
ec2_conn = boto3.client('ec2')
try:
stopping = ec2_conn.stop_instances(InstanceIds=instance_ids)
waiter = ec2_conn.get_waiter('instance_stopped')
waiter.wait(InstanceIds=instance_ids)
print("Stopped instances: %s" % ", ".join(instance_ids))
except ClientError as e:
print("Error while stopping instances: %s" % e)
except:
print("Unknown error occured while stopping instances: %s " % sys.exc_info()[0])
raise
def get_instances(args):
"""
event:
{
"tags": {
"Purpose": "lambda-testing",
"Powersave": "true"
},
"state": "stop"
}
"""
ec2_conn = boto3.client('ec2')
if 'instance_id' in args.keys():
instances = ec2_conn.describe_instances(InstanceIds=[args['instance_id']])
if 'tags' in args.keys():
filters = []
for key, value in args['tags'].items():
filters.append(
{
'Name': 'tag:' + key,
'Values': [
value
]})
instances = ec2_conn.describe_instances(Filters=filters)
return instances
def get_instance_ids(instances):
instanceids_list = []
for reservation in instances['Reservations']:
for inst in reservation['Instances']:
instanceids_list.append(inst['InstanceId'])
return instanceids_list
def handler(event, context):
instances = get_instances(event)
instanceids = []
if len(instances) > 0:
instanceids = get_instance_ids(instances)
if event.get('state') == 'start' and len(instanceids) > 0:
print("Starting instances: %s" % ", ".join(instanceids))
start_instances(instanceids)
return "Ok!"
elif event.get('state') == 'stop' and len(instanceids) > 0:
print("Stopping instances: %s" % ", ".join(instanceids))
stop_instances(instanceids)
return "Ok!"
else:
return "No instances found"
| [
"lauriku@gmail.com"
] | lauriku@gmail.com |
896b169f6eb92d85606ba89c666380d61e7ab184 | d78ba4c4a3de3e0fc78ee7ac8ac615a2d2f40fd7 | /purepython/stepikcourse/01/06/task012.py | 4544eb20963bdc0911dafb5c4a41a0b8d83c873c | [] | no_license | vngilev/python_study | dd069461de383e50959920cb5201e491e7510f54 | 44a0d3ae60f8148e3b3e93599e3b62748528deaa | refs/heads/master | 2020-04-02T17:36:08.690187 | 2018-11-16T08:06:07 | 2018-11-16T08:06:07 | 154,664,003 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 223 | py | import pdb
class A:
def foo(self):
print("A")
class B(A):
pass
class C(A):
def foo(self):
print("C")
class D:
def foo(self):
print("D")
class E(B, C, D):
pass
pdb.set_trace()
E().foo() | [
"vngilev@yandex.ru"
] | vngilev@yandex.ru |
b66b553c25e7ecba6e29529142ca10f6964daa28 | c710de1d5d9af53140f4ad6ff3ddac509878c00e | /zqxt_tmpl/zqxt_tmpl/urls.py | ce908befded2f8a4c72d07225bac627594add005 | [] | no_license | zspo/Django | 417b5ce9a3b039354adf7a4365bbf1d84edeafe9 | e6b0a0acaaca268ff347975100f18daa0fcb6757 | refs/heads/master | 2020-04-12T18:09:30.647378 | 2018-12-23T10:39:00 | 2018-12-23T10:39:00 | 162,671,037 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 990 | py | """zqxt_tmpl URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
# from django.contrib import admin
# from django.urls import path
# urlpatterns = [
# path('', )
# path('admin/', admin.site.urls),
# ]
from django.conf.urls import include, url
from django.contrib import admin
from learn import views as learn_views
urlpatterns = [
url('', learn_views.home, name='home'),
url('admin/', admin.site.urls)
] | [
"songpo.zhang@foxmail.com"
] | songpo.zhang@foxmail.com |
2f899fa8e139719fb4ba62ee2665ec2bf83f1719 | f3212e12fc9a8783663a3f9123caa65b891045ae | /c_dcgan.py | 0dbbc95690e1f42466b9f67f14e7a6fccc153203 | [] | no_license | nenoNaninu/DeepPractice | 5995effcf74ef3468c6a575299b209b1ee1f82fc | f82442ca41ae5e3a8e5a4d1068844bd7a4c0100d | refs/heads/master | 2020-05-21T11:34:55.339672 | 2019-05-27T17:26:48 | 2019-05-27T17:26:48 | 186,030,410 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,370 | py | # -*- using:utf-8 -*-
from torchvision.datasets import ImageFolder
from torchvision import transforms
from torch.utils.data import DataLoader
from torch import nn, optim
import torch
from statistics import mean
from tqdm import tqdm
from torchvision.utils import save_image
import os
class ConditionalGenerator(nn.Module):
def __init__(self, class_num):
super(ConditionalGenerator, self).__init__()
self.class_num = class_num
self.main = nn.Sequential(
nn.ConvTranspose2d(100 + class_num, 32 * 8, 4, 1, 0, bias=False),
nn.BatchNorm2d(32 * 8),
nn.ReLU(inplace=True),
nn.ConvTranspose2d(32 * 8, 32 * 4, 4, 2, 1, bias=False),
nn.BatchNorm2d(32 * 4),
nn.ReLU(inplace=True),
nn.ConvTranspose2d(32 * 4, 32 * 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(32 * 2),
nn.ConvTranspose2d(32 * 2, 32, 4, 2, 1, bias=False),
nn.BatchNorm2d(32),
nn.ReLU(inplace=True),
nn.ConvTranspose2d(32, 3, 4, 2, 1, bias=False),
nn.Tanh()
)
def forward(self, x, labels):
label_tensor = torch.zeros(x.shape[0], self.class_num, 1, 1)
for idx, label in enumerate(labels):
label_tensor[idx, label, :, :] = 1
label_tensor = label_tensor.to("cuda:0")
# print("generate")
# print(x.shape)
# print(labels.shape)
x = torch.cat((x, label_tensor), dim=1)
# print(x.shape)
x = self.main(x)
return x
class ConditionalDiscriminator(nn.Module):
def __init__(self, class_num):
super(ConditionalDiscriminator, self).__init__()
self.class_num = class_num
self.main = nn.Sequential(
nn.Conv2d(3 + class_num, 32, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
nn.BatchNorm2d(32),
nn.Conv2d(32, 64, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
nn.BatchNorm2d(64),
nn.Conv2d(64, 32 * 4, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
nn.BatchNorm2d(32 * 4),
nn.Conv2d(32 * 4, 32 * 8, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
nn.BatchNorm2d(32 * 8),
nn.Conv2d(32 * 8, 1, 4, 1, 0, bias=False)
)
def forward(self, x, labels):
label_tensor = torch.zeros(x.shape[0], self.class_num, 64, 64)
for idx, label in enumerate(labels):
label_tensor[idx, label, :, :] = 1
label_tensor = label_tensor.to("cuda:0")
# print("discrimiante")
# print(x.shape)
# print(label.shape)
# print(label_tensor)
x = torch.cat((x, label_tensor), dim=1)
x = self.main(x)
return x.squeeze()
def train_dcgan(g, d, opt_g, opt_d, loader, batch_size):
log_loss_g = []
log_loss_d = []
ones = torch.ones(batch_size).to("cuda:0")
zeros = torch.zeros(batch_size).to("cuda:0")
loss_function = nn.BCEWithLogitsLoss()
print("start training")
for real_img, labels in tqdm(loader):
# print(real_img.shape)
# print(labels.shape)
bach_len = len(real_img)
real_img = real_img.to("cuda:0")
z = torch.randn(bach_len, 100, 1, 1).to("cuda:0")
fake_image = g(z, labels)
fake_image_tensor = fake_image.detach()
out = d(fake_image, labels)
loss_g = loss_function(out, ones[:bach_len])
log_loss_g.append(loss_g.item())
d.zero_grad()
g.zero_grad()
loss_g.backward()
opt_g.step()
real_out = d(real_img, labels)
loss_d_real = loss_function(real_out, ones[:bach_len])
fake_image = fake_image_tensor
fake_out = d(fake_image, labels)
loss_d_fake = loss_function(fake_out, zeros[:bach_len])
loss_d = loss_d_fake + loss_d_real
log_loss_d.append(loss_d.item())
d.zero_grad()
g.zero_grad()
loss_d.backward()
opt_d.step()
return mean(log_loss_g), mean(log_loss_d)
if __name__ == "__main__":
batch_size = 320
image_dataset = ImageFolder(
# "../../dataset/",
"../../../UECFOOD10/",
transforms.Compose([
transforms.Resize(64),
transforms.CenterCrop(64),
transforms.ToTensor(),
]))
train_loader = DataLoader(image_dataset, batch_size=batch_size, shuffle=True)
generator = ConditionalGenerator(10)
discriminator = ConditionalDiscriminator(10)
monitor_noize = torch.randn(batch_size, 100, 1, 1)
# concat用のラベルを作成
dummpy_label = torch.zeros(batch_size, dtype=torch.int32)
for idx in range(100):
dummpy_label[idx] = idx % 10
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# if torch.cuda.device_count() > 1:
# print("Let's use", torch.cuda.device_count(), "GPUs!")
# generator = nn.DataParallel(generator)
# discriminator = nn.DataParallel(discriminator)
generator.to(device)
discriminator.to(device)
monitor_noize = monitor_noize.to(device)
# dummpy_label = dummpy_label.to(device)
opt_discriminator = optim.Adam(discriminator.parameters(), lr=0.0002, betas=(0.5, 0.999))
opt_generator = optim.Adam(generator.parameters(), lr=0.0002, betas=(0.5, 0.999))
if not os.path.exists('./space/kadai/kadai5/cgan_save'):
os.makedirs('./space/kadai/kadai5/cgan_save')
if not os.path.exists('./space/kadai/kadai5/cgan_output_image'):
os.makedirs('./space/kadai/kadai5/cgan_output_image')
save_path = os.path.abspath('./space/kadai/kadai5/cgan_save')
output_image_path = os.path.abspath('./space/kadai/kadai5/cgan_output_image')
for epoch in range(300):
train_dcgan(generator, discriminator, opt_generator, opt_discriminator, train_loader, batch_size)
if epoch % 10 == 0:
torch.save(generator.state_dict(), "{0}/g_{1:03}.prm".format(save_path, epoch), pickle_protocol=4)
torch.save(discriminator.state_dict(), "{0}/d_{1:03}.prm".format(save_path, epoch), pickle_protocol=4)
generated_img = generator(monitor_noize, dummpy_label)
save_image(generated_img, "{0}/{1:03d}.jpg".format(output_image_path, epoch))
| [
"neno@neno.dev"
] | neno@neno.dev |
88c59e38f530f63fc4239938144ad7fa4298982c | 3f28a3af1595a540e00e656ff88259b3409ae3e4 | /data/scripts/generate_from_json.py | 8973b900d427240213ea2648dcf9979ca1978469 | [] | no_license | nmarsha3/homecarehelper | 7b84ef5bb45f1f2b1413d3845e4e61b33d83efec | b95fefdbaaf3a108554e3004eb24cc3994791726 | refs/heads/main | 2023-05-11T21:14:07.476495 | 2021-05-16T20:52:44 | 2021-05-16T20:52:44 | 349,526,117 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 199 | py | import sys
import json
names = {}
with open(sys.argv[1]) as f:
names = json.load(f)
outfile = open(sys.argv[2], "w+")
for key in names:
outfile.write(key)
outfile.write("\n")
outfile.close()
| [
"mrauch2@nd.edu"
] | mrauch2@nd.edu |
27d25a48451ddf4fd37788f53f17ab7d7bbbb843 | b71f656374293c5f1238fcb449aa4dde78632861 | /eudplib/eudlib/memiof/byterw.py | c6a45de2f0bcb03d62c384d553512caacbd340cb | [
"MIT"
] | permissive | tobeinged/eudplib | ce1cdc15f7ec6af857b4b64b5c826b3dd95d3e48 | 066c0faa200dc19e70cdb6979daf8f008b8ae957 | refs/heads/master | 2023-05-04T08:49:01.180147 | 2019-03-18T14:30:29 | 2019-03-18T14:30:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,708 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Copyright (c) 2014 trgk
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
from ... import core as c
from ... import ctrlstru as cs
from . import dwepdio as dwm
_epd, _suboffset = c.EUDCreateVariables(2)
class EUDByteReader:
"""Read byte by byte."""
def __init__(self):
self._dw = c.EUDVariable()
self._b = c.EUDCreateVariables(4)
self._suboffset = c.EUDVariable()
self._offset = c.EUDVariable()
# -------
@c.EUDMethod
def seekepd(self, epdoffset):
"""Seek EUDByteReader to specific epd player address"""
c.SeqCompute([
(self._offset, c.SetTo, epdoffset),
(self._suboffset, c.SetTo, 0)
])
c.SetVariables(self._dw, dwm.f_dwread_epd(epdoffset))
c.SetVariables([
self._b[0],
self._b[1],
self._b[2],
self._b[3],
], dwm.f_dwbreak(self._dw)[2:6])
@c.EUDMethod
def seekoffset(self, offset):
"""Seek EUDByteReader to specific address"""
global _epd, _suboffset
# convert offset to epd offset & suboffset
c.SetVariables([_epd, _suboffset], c.f_div(offset, 4))
c.SeqCompute([(_epd, c.Add, -0x58A364 // 4)])
# seek to epd & set suboffset
self.seekepd(_epd)
c.SeqCompute([
(self._suboffset, c.SetTo, _suboffset)
])
# -------
@c.EUDMethod
def readbyte(self):
"""Read byte from current address. Reader will advance by 1 bytes.
:returns: Read byte
"""
case0, case1, case2, case3, swend = [c.Forward() for _ in range(5)]
ret = c.EUDVariable()
# suboffset == 0
case0 << c.NextTrigger()
cs.EUDJumpIfNot(self._suboffset.Exactly(0), case1)
c.SeqCompute([
(ret, c.SetTo, self._b[0]),
(self._suboffset, c.Add, 1)
])
cs.EUDJump(swend)
# suboffset == 1
case1 << c.NextTrigger()
cs.EUDJumpIfNot(self._suboffset.Exactly(1), case2)
c.SeqCompute([
(ret, c.SetTo, self._b[1]),
(self._suboffset, c.Add, 1)
])
cs.EUDJump(swend)
# suboffset == 2
case2 << c.NextTrigger()
cs.EUDJumpIfNot(self._suboffset.Exactly(2), case3)
c.SeqCompute([
(ret, c.SetTo, self._b[2]),
(self._suboffset, c.Add, 1)
])
cs.EUDJump(swend)
# suboffset == 3
# read more dword
case3 << c.NextTrigger()
c.SeqCompute([
(ret, c.SetTo, self._b[3]),
(self._offset, c.Add, 1),
(self._suboffset, c.SetTo, 0)
])
c.SetVariables(self._dw, dwm.f_dwread_epd(self._offset))
c.SetVariables([
self._b[0],
self._b[1],
self._b[2],
self._b[3],
], dwm.f_dwbreak(self._dw)[2:6])
swend << c.NextTrigger()
return ret
class EUDByteWriter:
"""Write byte by byte"""
def __init__(self):
self._dw = c.EUDVariable()
self._suboffset = c.EUDVariable()
self._offset = c.EUDVariable()
self._b = [c.EUDLightVariable() for _ in range(4)]
@c.EUDMethod
def seekepd(self, epdoffset):
"""Seek EUDByteWriter to specific epd player addresss"""
c.SeqCompute([
(self._offset, c.SetTo, epdoffset),
(self._suboffset, c.SetTo, 0)
])
c.SetVariables(self._dw, dwm.f_dwread_epd(epdoffset))
c.SetVariables(self._b, dwm.f_dwbreak(self._dw)[2:6])
@c.EUDMethod
def seekoffset(self, offset):
"""Seek EUDByteWriter to specific address"""
global _epd, _suboffset
# convert offset to epd offset & suboffset
c.SetVariables([_epd, _suboffset], c.f_div(offset, 4))
c.SeqCompute([(_epd, c.Add, (0x100000000 - 0x58A364) // 4)])
self.seekepd(_epd)
c.SeqCompute([
(self._suboffset, c.SetTo, _suboffset)
])
@c.EUDMethod
def writebyte(self, byte):
"""Write byte to current position.
Write a byte to current position of EUDByteWriter. Writer will advance
by 1 byte.
.. note::
Bytes could be buffered before written to memory. After you
finished using writebytes, you must call `flushdword` to flush the
buffer.
"""
cs.EUDSwitch(self._suboffset)
for i in range(3):
if cs.EUDSwitchCase()(i):
cs.DoActions([
self._b[i].SetNumber(byte),
self._suboffset.AddNumber(1)
])
cs.EUDBreak()
if cs.EUDSwitchCase()(3):
cs.DoActions(self._b[3].SetNumber(byte))
self.flushdword()
cs.DoActions([
self._offset.AddNumber(1),
self._suboffset.SetNumber(0),
])
c.SetVariables(self._dw, dwm.f_dwread_epd(self._offset))
c.SetVariables(self._b, dwm.f_dwbreak(self._dw)[2:6])
cs.EUDEndSwitch()
@c.EUDMethod
def flushdword(self):
"""Flush buffer."""
# mux bytes
c.RawTrigger(actions=self._dw.SetNumber(0))
for i in range(7, -1, -1):
for j in range(4):
c.RawTrigger(
conditions=[
self._b[j].AtLeast(2 ** i)
],
actions=[
self._b[j].SubtractNumber(2 ** i),
self._dw.AddNumber(2 ** (i + j * 8))
]
)
dwm.f_dwwrite_epd(self._offset, self._dw)
| [
"phu54321@naver.com"
] | phu54321@naver.com |
3f9d44ac09fcb46e8ee4ff49748713005be0c764 | d9d371febea18529e881cc15e49c3716401318f7 | /Artificial Intelligence/Othello/mymod.py | 1aafaf852bec4dca91e31570dbd885ed52dc3b16 | [] | no_license | ryanjhelmlinger/Code-Examples | 691eb0cdc077195d8a8b4a16dc52404c92c92ff9 | a15e1a4414ded21c5a1d143d6a13c430b7323729 | refs/heads/master | 2022-01-24T03:25:44.378473 | 2019-08-26T13:08:03 | 2019-08-26T13:08:03 | 113,651,714 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,955 | py | ##################################################
#
# Torbert, 18 December 2015
#
##################################################
#
from subprocess import Popen
from subprocess import PIPE
from subprocess import TimeoutExpired
#
from time import time
#
##################################################
#
TIMEOUT = 1.5 # seconds allowed per move
#
theE = ' '
theX = '+'
theO = 'O'
#
fname = 'myprog.py'
#
##################################################
#
def st( alist ) :
#
return '' . join( alist )
#
#
##################################################
#
def getMove( fname , theboard , thepiece ) :
#
# TODO - check if no possible move
#
#------------------------ RUN THE PLAYER'S CODE ---#
#
strboard = st( theboard )
#
myargs = [ 'python3' , fname , strboard , thepiece ]
#
po = Popen( myargs , stdout = PIPE , stderr = PIPE )
#
# import io
# print( 'io' , io.DEFAULT_BUFFER_SIZE ) # 8192
#
try :
#
x , y = po . communicate( timeout = TIMEOUT )
#
except TimeoutExpired :
#
po . kill()
#
x , y = po . communicate()
#
print( '*** timeout' )
#
#
z = x . split()
#
if len( z ) > 0 :
#
themove = z[-1] . decode( 'utf-8' ) # last only
#
print( '*** themove' , themove )
#
#
# TO DO - error check... themove
#
#------------------------ END ---------------------#
#
# TO DO - default to random play
#
#
##################################################
#
theboard = [ theE ] * 64
#
theboard[27] = theX
theboard[36] = theX
theboard[28] = theO
theboard[35] = theO
#
# TODO - display the board
# TODO - play the entire game
#
thepiece = theX # first move
#
tic = time()
num = getMove( fname , theboard , thepiece )
toc = time()
#
print( num )
#
##################################################
#
# end of file
#
##################################################
| [
"noreply@github.com"
] | noreply@github.com |
800409a84e64c224bea3aaf3a75bc133d465f92c | f52f8372b8352a4d85d2611044728d38d2cb8d82 | /train.py | 9ac442d2583ae15e658fc6312a4b65ee2c77cbd2 | [] | no_license | tsmotlp/deepID | 5f21cde275af158fb01526fc67bfb310b1ce2ee8 | a8c53de50d75de7c8cd2f482a7b9b026097422ff | refs/heads/master | 2020-04-13T05:31:50.873682 | 2018-12-25T01:21:15 | 2018-12-25T01:21:15 | 162,994,788 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,113 | py | import argparse
import torch
import torch.utils.data as Data
from torch.autograd import Variable
import os
import cv2
import numpy as np
import random
from torchvision import transforms
from skimage import io as sio
from deepID_1 import deepID_1
from dataset import get_dataset
from vis_tools import Visualizer
import torch.nn.functional as F
parser = argparse.ArgumentParser (description='pytorch SRCNN')
parser.add_argument ('--batch_size', type=int, default=9000, help='training batch size')
parser.add_argument ('--num_epochs', type=int, default=1000, help='number of training epochs')
parser.add_argument ('--lr', type=float, default=1e-2, help='learning rate')
parser.add_argument ('--cuda', type=bool, default=True, help='use cuda?')
parser.add_argument ('--resume', type=str, default='model_para/param_epoch200.pkl', help='path to network checkpoint')
parser.add_argument ('--start_epoch', type=int, default=200, help='restart epoch number for training')
parser.add_argument ('--threads', type=int, default=0, help='number of threads')
parser.add_argument ('--momentum', type=float, default=0.9, help='momentum')
parser.add_argument ('--weight_decay', type=float, default=1e-4, help='weight decay')
parser.add_argument ('--step', type=int, default=100, help='Sets the learning rate to the initial LR decayed by momentum every n epochs, Default: n=100')
parser.add_argument ('--pretrained', type=str, default='', help='path to network parameters')
parser.add_argument ('--num_channels', type=int, default=3)
parser.add_argument ('--train_dir', type=str, default='./pig_datasets', help='LR image path to training data directory')
parser.add_argument ('--test_dir', type=str, default='./test_data', help='image path to testing data directory')
parser.add_argument ('--train_interval', type=int, default=50, help='interval for training to save image')
parser.add_argument ('--test_interval', type=int, default=10, help='interval for testing to save image')
opt = parser.parse_args ()
# 打印定义的变量
# print(opt)
# ...
seed = random.randint (1, 10000)
print ("Random Seed: ", seed)
torch.manual_seed (seed)
if opt.cuda:
torch.cuda.manual_seed (seed)
# 构建网络
print ('==>building network...')
network = deepID_1 (num_channels=opt.num_channels)
# loss函数
loss_func = torch.nn.CrossEntropyLoss ()
# 设置GPU
if opt.cuda and not torch.cuda.is_available (): # 检查是否有GPU
raise Exception ('No GPU found, please run without --cuda')
print ("===> Setting GPU")
if opt.cuda:
print ('cuda_mode:', opt.cuda)
network = network.cuda ()
loss_func = loss_func.cuda ()
# 设置优化器函数
print ("===> Setting Optimizer")
optimizer = torch.optim.Adam (network.parameters (), lr=opt.lr)
# 可视化
train_vis = Visualizer (env='training')
# 训练
def train(train_dataloader, network, optimizer, loss_func):
print ('==>Training...')
for epoch in range (opt.start_epoch, opt.num_epochs + 1):
# scheduler.step(epoch)
train_process (train_dataloader, network, optimizer, loss_func, epoch, epochs=opt.num_epochs)
save_checkpoint (network, epoch)
# 测试
def test(test_dataloader, network):
print ('==>Testing...')
test_process (test_dataloader, network)
# 每个epoch的训练程序
def train_process(dataloader, network, optimizer, loss_func, epoch=1, epochs=1):
lr = adjust_learning_rate (epoch - 1)
for param_group in optimizer.param_groups:
param_group["lr"] = lr
print ("epoch =", epoch, "lr =", optimizer.param_groups[0]["lr"])
for iteration, (inputs, labels) in enumerate (dataloader):
inputs = Variable (inputs) # 输入数据
labels = Variable (labels) # label
if opt.cuda:
inputs = inputs.cuda ()
labels = labels.cuda ()
# -----------------------------------------------
# training
# ------------------------------------------------
optimizer.zero_grad ()
preds = network (inputs) # 网络输出
# loss = loss_func (preds, labels)
loss = F.nll_loss(preds, torch.argmax(labels, dim=1))
train_correct = calcu_acc (preds, labels)
train_acc = train_correct
loss.backward ()
optimizer.step ()
train_vis.plot_many ({'loss':loss.item (), 'acc':train_acc})
# train_vis.plot ('acc', train_acc)
# 打印结果:
# print('fake_out:{} real_out:{} L1_loss:{}'.format (fake_out, real_out, L1_loss (fake_imgs, real_imgs),edge_loss (fake_imgs, real_imgs)))
print ('epoch:[{}/{}] batch:[{}/{}] loss:{:.10f} acc:{:.10f}'.format (epoch, epochs, iteration, len (dataloader), loss.data[0], train_acc))
# 测试程序
def test_process(test_dataloader, network):
train_correct = 0
for idx, (inputs, labels) in enumerate (test_dataloader):
inputs = Variable (inputs)
labels = Variable (labels)
if opt.cuda:
inputs = inputs.cuda ()
labels = labels.cuda ()
preds = network (inputs)
train_correct += calcu_acc (preds, labels)
print(train_correct / len(test_dataloader))
def calcu_acc(preds, labels):
num = 0
for i in range (len (labels)):
a = np.argmax(labels[i].cpu ().detach ().numpy ())
b = np.argmax (preds[i].cpu ().detach ().numpy ())
if a == b:
# print(a, b+1)
num += 1
print (num)
return num / len (labels)
# 设计自适应的学习率
def adjust_learning_rate(epoch):
lr = opt.lr * (0.1 ** (epoch // opt.step))
return lr
def save_checkpoint(network, epoch):
model_folder = "model_para/"
param_path = model_folder + "param_epoch{}.pkl".format (epoch)
state = {"epoch": epoch, "model": network}
if not os.path.exists (model_folder):
os.makedirs (model_folder)
torch.save (state, param_path)
print ("Checkpoint saved to {}".format (param_path))
# 判断网络是否已经训练过或者已经训练完成
if opt.pretrained: # 训练完成,进行测试
# 加载测试数据进行测试
print ('==>loading test data...')
test_dataset = get_dataset (opt.test_dir)
test_dataloader = Data.DataLoader (dataset=test_dataset, batch_size=opt.batch_size, shuffle=True,
num_workers=opt.threads)
if os.path.isfile (opt.pretrained):
print ('==> loading model {}'.format (opt.pretrained))
weights = torch.load (opt.pretrained)
network.load_state_dict (weights['model'].state_dict ())
# 进行测试
test (test_dataloader, network)
else:
print ('==> no network model found at {}'.format (opt.pretrained))
else: # 未训练完成,需要进行训练
# 加载训练数据
print ('==>loading training data...')
train_dataset = get_dataset (opt.train_dir)
train_dataloader = Data.DataLoader (dataset=train_dataset, batch_size=opt.batch_size, shuffle=True,
num_workers=opt.threads)
if opt.resume: # 部分训练,需要重新开始训练
if os.path.isfile (opt.resume):
checkpoint = torch.load (opt.resume)
opt.start_epoch = checkpoint['epoch'] + 1
print ('==>start training at epoch {}'.format (opt.start_epoch))
network.load_state_dict (checkpoint['model'].state_dict ())
print ("===> resume Training...")
train (train_dataloader, network, optimizer, loss_func)
else:
print ('==> cannot start training at epoch {}'.format (opt.start_epoch))
else:
train (train_dataloader, network, optimizer, loss_func)
| [
"noreply@github.com"
] | noreply@github.com |
f8b189694e1c922379443954f32ab60619f86fd9 | 717e17c69a562e842cbd29b6143a5a87f5e43482 | /review4/manage.py | 61f99e6ff1ff07610293ab33c21b076ac477bb7b | [
"MIT"
] | permissive | JeaDong/WEB | 906f486619e999157395920c94e9ab2dcfc6a555 | 5402c1852bdd11385987864a5e292bf3c8fe17c0 | refs/heads/master | 2022-12-09T15:06:03.476140 | 2018-04-03T06:26:23 | 2018-04-03T06:26:23 | 124,837,509 | 0 | 0 | null | 2022-12-08T00:00:26 | 2018-03-12T05:23:43 | Python | UTF-8 | Python | false | false | 715 | py | #!/usr/bin/env python
import os
from app import create_app, db
from app.models import User, Role, Post
from flask_script import Manager, Shell
from flask_migrate import Migrate,MigrateCommand
''', Mig'''
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
migrate = Migrate(app, db)
def make_shell_context():
return dict(app=app, db=db, User=User, Role=Role)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager.command
def test():
"""Run the unit tests."""
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if __name__ == '__main__':
manager.run() | [
"704404536@qq.com"
] | 704404536@qq.com |
c7db0c3ed1a90f40d7b254bb89c94c3c62eee791 | 70d3b3be21127984799c79e38bd25f93fe41f59e | /Shipment/migrations/0001_initial.py | d07b8d3c9652dd2bce13d59601f5bb0ceeaa9d77 | [] | no_license | LongCohol/HerokuKMOUWebsite | 693cd8a5e50aa896b943bce8cd1e381908e9e519 | 0e82347615f1b7154ee84e22b33d396cc6c19b73 | refs/heads/master | 2023-06-05T06:33:26.456358 | 2021-06-27T12:14:32 | 2021-06-27T12:14:32 | 380,637,903 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,958 | py | # Generated by Django 3.2 on 2021-06-27 21:12
import Shipment.models
from django.db import migrations, models
import django.utils.timezone
import override_existing
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Shipment',
fields=[
('number', models.BigAutoField(db_column='no', primary_key=True, serialize=False)),
('barcode', models.ImageField(blank=True, db_column='barcode', storage=override_existing.OverrideExisting(), upload_to=Shipment.models.barcode_path, verbose_name='Barcode Shipment')),
('colorpick', models.CharField(blank=True, db_column='color_status', default='', max_length=10)),
('kantor_id', models.CharField(blank=True, db_column='kantor_id', max_length=40)),
('insert_org', models.CharField(blank=True, db_column='insert_org', max_length=100)),
('correct_org', models.CharField(blank=True, db_column='correct_org', max_length=100)),
('reg_date', models.DateTimeField(auto_now=True, db_column='regdate', max_length=20)),
('company', models.CharField(blank=True, choices=[('SEOYANG', 'SEOYANG'), ('SUNAMI', 'SUNAMI'), ('MAN', 'MAN'), ('CENTRA', 'CENTRA'), ('CMSHIP', 'CMSHIP'), ('보성상사', '보성상사'), ('POSSM', 'POSSM'), ('DORVAL', 'DORVAL'), ('GLOVIS', 'GLOVIS'), ('MARUBISHI', 'MARUBISHI'), ('이강공사', '이강공사'), ('DAN MO', 'DAN MO'), ('EUCO', 'EUCO'), ('KNK', 'KNK'), ('KSS', 'KSS'), ('SUNRIO', 'SUNRIO'), ('FORTUNE WILL', 'FORTUNE WILL'), ('GOLTENS', 'GOLTENS'), ('SHI OCEAN', 'SHI OCEAN'), ('SAEHAN', 'SAEHAN'), ('JW', 'JW'), ('INTERGIS', 'INTERGIS'), ('KLCSM', 'KLCSM'), ('오션마린', '오션마린'), ('STX', 'STX'), ('GOWIN', 'GOWIN')], db_column='company', max_length=100, verbose_name='COMPANY')),
('vessel', models.CharField(blank=True, db_column='vessel', max_length=100, verbose_name='VESSEL')),
('by', models.CharField(blank=True, db_column='by1', max_length=50, verbose_name='BY')),
('BLno', models.CharField(blank=True, db_column='blno', max_length=50, verbose_name='BLNO')),
('docs', models.TextField(blank=True, db_column='doc', max_length=500, verbose_name='DOC')),
('odr', models.TextField(blank=True, db_column='odr', max_length=100, verbose_name='ODR')),
('supplier', models.TextField(blank=True, db_column='supplier', max_length=100, verbose_name='SUPPLIER')),
('quanty', models.CharField(blank=True, db_column='qty', max_length=10, verbose_name='QTY')),
('unit', models.CharField(blank=True, db_column='unit', max_length=10, verbose_name='UNIT')),
('size', models.TextField(blank=True, db_column='size', max_length=100, verbose_name='SIZE')),
('weight', models.CharField(blank=True, db_column='weight', max_length=10, verbose_name='WEIGHT')),
('in_date', models.DateField(blank=True, db_column='in1', default=django.utils.timezone.now, max_length=10, null=True, verbose_name='IN')),
('warehouse', models.CharField(blank=True, db_column='whouse', max_length=100, verbose_name='W/H1')),
('warehouse_lastupdate', models.CharField(blank=True, db_column='whouse_lastupdate', default='', max_length=100, verbose_name='W/H')),
('warehouse2', models.CharField(blank=True, db_column='whouse2', max_length=100, verbose_name='W/H2')),
('wh_timestamp', models.DateTimeField(blank=True, db_column='wh_timestamp1', max_length=20, null=True)),
('wh_timestamp2', models.DateTimeField(blank=True, db_column='wh_timestamp2', max_length=20, null=True)),
('port', models.CharField(blank=True, db_column='port', max_length=100, verbose_name='PORT')),
('out_date', models.DateField(blank=True, db_column='out1', max_length=10, null=True, verbose_name='OUT')),
('remark', models.TextField(blank=True, db_column='remark', max_length=500, verbose_name='REMARK')),
('memo', models.TextField(blank=True, db_column='memo', max_length=1000, verbose_name='MEMO')),
('image', models.ImageField(blank=True, db_column='img', default='', max_length=500, null=True, storage=override_existing.OverrideExisting(), upload_to=Shipment.models.image_path, verbose_name='IMG')),
('image1', models.ImageField(blank=True, db_column='img1', default='', max_length=500, null=True, storage=override_existing.OverrideExisting(), upload_to=Shipment.models.image_path)),
('image2', models.ImageField(blank=True, db_column='img2', default='', max_length=500, null=True, storage=override_existing.OverrideExisting(), upload_to=Shipment.models.image_path)),
('pdf_file', models.FileField(blank=True, db_column='pdf', default='', max_length=500, null=True, storage=override_existing.OverrideExisting(), upload_to=Shipment.models.pdf_path, verbose_name='PDF')),
('division', models.CharField(blank=True, choices=[('B', 'B'), ('L', 'L'), ('D', 'D')], db_column='division', max_length=10, verbose_name='DIVISION')),
('flag_status', models.CharField(blank=True, choices=[('STAY2', 'STAY2'), ('COMPLETED', 'COMPLETED'), ('BLANK', 'BLANK'), ('STAY1', 'STAY1'), ('START', 'START')], db_column='flg', max_length=10, verbose_name='STATE')),
('job_number', models.CharField(blank=True, db_column='jobno', max_length=50, verbose_name='JOB.NO')),
('work', models.CharField(blank=True, db_column='work', max_length=10)),
('work_regdate', models.DateTimeField(blank=True, db_column='work_regdate', max_length=20, null=True)),
],
options={
'db_table': 'pla_databoard',
},
),
]
| [
"controllabbusan@gmail.com"
] | controllabbusan@gmail.com |
cc997c66aa7c0603bbc734ce62e689cd06b97a65 | 1b5d39f9dd5126b6f21e83efe58b7e86ef8d94f2 | /CodeChef/LTIME80B/CARR.py | 22f1cdd88dba474004c0ee9865be462ca2cd7494 | [] | no_license | jai-dewani/Competitive-Programming | dfad61106a648b80cc97c85cc5c8bc5d1cd335d9 | a2006e53b671ba56d4b0a20dd81fd0e21d0b0806 | refs/heads/master | 2021-07-03T16:08:02.466423 | 2020-09-24T16:22:28 | 2020-09-24T16:22:28 | 178,812,685 | 1 | 2 | null | 2019-10-18T14:43:19 | 2019-04-01T07:51:47 | Python | UTF-8 | Python | false | false | 489 | py | from random import randint
mod = 10**9+7
for _ in range(int(input())):
n,m = map(int,input().strip().split())
# n = randint(1,10**10)
# m = randint(1,10**10)
answer = 0
fact = m*pow(m-1,n-1,mod)
# for i in range(n-1):
# fact *= (m-1)
answer += fact
if(n>2):
fact = m*pow(m-1,n-2,mod)
elif n==2:
fact = m
# for i in range(n-2):
# fact *= (m-1)
fact*= (n-1)
fact %= mod
answer += fact
print(answer%mod) | [
"jai.dewani.99@gmail.com"
] | jai.dewani.99@gmail.com |
1b5f9af5eea3b44bf9b4bcb7f3e8d9ed2a5ae4dd | 82f91128db634cb9dac106773975ef0440c46a5a | /week-10/dijkstra.py | 6fb56fa489c6af802c44988bde71f07fa2e0ff8c | [] | no_license | kedar-shenoy9/ADA-1BM17CS041 | c7184a50f370fd816b79f80b618d8a7384ed1893 | 5e4c9b488827683bdadcdc37933f8800aa4e3021 | refs/heads/master | 2020-07-01T15:57:45.365483 | 2019-11-27T16:26:28 | 2019-11-27T16:26:28 | 201,218,363 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,119 | py | def dijkstra(g):
n = len(g)
#initialize everythin
visited = [0]
dist = [g[0][i] for i in range(n)]
prev = [0 for i in range(n)]
count = 0
#repeat for all nodes
while count < n-1:
minimum = 999
#find the node with minimum cost to reach it
for i in range(n):
if dist[i] < minimum and i not in visited:
minimum = dist[i]
min_index = i
visited.append(min_index)
#consider all the edges starting at min_index to the nodes which are not visited
for i in range(n):
if dist[min_index]+graph[min_index][i] < dist[i] and i not in visited:
dist[i] = dist[min_index]+graph[min_index][i]
prev[i] = min_index
count += 1
#print the paths and weights
for i in range(1, n):
print("Distance from 0 to "+str(i)+" is "+str(dist[i]))
print("Path is :")
end = i
path = []
while end != 0:
path.append(end)
end = prev[end]
path.append(0)
print("->".join(map(str, path[::-1])))
if __name__ == "__main__":
n = int(input("Enter the number of nodes "))
graph = []
for _ in range(n):
l = list(map(int, input().split()))
graph.append(l)
dijkstra(graph)
| [
"mahesh.sadashiv@gmail.com"
] | mahesh.sadashiv@gmail.com |
522e14b8ff56c3c33652d345558e05d1fe24076f | 1d6d048c0697ee1cdd0dc2c063b0db09028a1bee | /KupiProdai/settings.py | f3290aa2402755828cf5cdab3e4902c4814d7689 | [] | no_license | adil2604/Kupi-Prodai_learn | 9db69dd422de19a70c0ef8003961cecf997b3cd7 | 4d587dfe32662a80c057af620f5bba77e83f2045 | refs/heads/master | 2022-04-12T05:07:09.190514 | 2020-02-11T15:36:49 | 2020-02-11T15:36:49 | 209,983,077 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,414 | py | """
Django settings for KupiProdai project.
Generated by 'django-admin startproject' using Django 2.2.5.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'x9(cut2a0^e0&9jx=w0@jy=vwvuf#)i^lgpyg4f_aj8*^!3qpz'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'Main',
'cart',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'KupiProdai.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.media'
],
},
},
]
WSGI_APPLICATION = 'KupiProdai.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'ru-RU'
CART_SESSION_ID = 'cart'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
MEDIA_ROOT=os.path.join(BASE_DIR,'templates','media')
MEDIA_URL='/'
print(MEDIA_ROOT)
STATIC_URL = '/templates/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "templates"),
'/',
] | [
"adil0@list.ru"
] | adil0@list.ru |
2bc4d6b64e071a8e2289f3883c9da318dc8cc57b | 2835a9ea0bc462d21c81f44655bf2bd18c5e85e1 | /DbHelper.py | 7c1b283ebdcddf9567604ff26f33d3d69f766588 | [] | no_license | Mashka4711/course_work | 0d4734dfd622915fd61a120e7bc2880bd4ef06a4 | a087254b242a3c4530462cd988559fd4236f7725 | refs/heads/master | 2020-05-31T18:17:20.842925 | 2019-06-05T16:32:34 | 2019-06-05T16:32:34 | 190,430,553 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,608 | py | import pymysql
class DbHelper:
def __init__(self):
self.conn = self.get_connection()
def get_connection(self):
return pymysql.connect(host="localhost", user="root",
passwd="root", db="db_clinic", charset="utf8")
def insert_patient_card(self, id, name, surname):
query = "INSERT INTO patient_card (id_card, name, surname) VALUES ('%s', '%s', '%s')" % (id, name, surname)
try:
curs_note = self.conn.cursor(pymysql.cursors.DictCursor)
curs_note.execute(query)
except pymysql.IntegrityError as err:
print("Error: {}".format(err))
self.conn.commit()
def update_patient_card(self, id, name, surname):
query = "UPDATE patient_card SET name = '%s', surname = '%s' WHERE id_card = '%s'" % (name, surname, id)
try:
curs_note = self.conn.cursor(pymysql.cursors.DictCursor)
curs_note.execute(query)
except pymysql.IntegrityError as err:
print("Error: {}".format(err))
self.conn.commit()
def get_patient_card(self, id_cp):
query = "SELECT * FROM patient_card WHERE id_card = '%s'" % (id_cp)
curs_notes = self.conn.cursor(pymysql.cursors.DictCursor)
curs_notes.execute(query)
notes = curs_notes.fetchall()
# print(notes)
return notes
def delete_patient_card(self, id):
del_query = "DELETE FROM patient_card WHERE id_card = '%s'" % id
curs_del = self.conn.cursor(pymysql.cursors.DictCursor)
curs_del.execute(del_query)
self.conn.commit()
def insert_doctor(self, id, name, surname, prof):
query = "INSERT INTO doctor (id_doctor, name, surname, specialization) VALUES ('%s', '%s', '%s', '%s')" % (id, name, surname, prof)
try:
curs_note = self.conn.cursor(pymysql.cursors.DictCursor)
curs_note.execute(query)
except pymysql.IntegrityError as err:
print("Error: {}".format(err))
self.conn.commit()
def update_doctor(self, id, name, surname, prof):
query = "UPDATE doctor SET name = '%s', surname = '%s', specialization = '%s' WHERE id_doctor = '%s'" % (name, surname, prof, id)
try:
curs_note = self.conn.cursor(pymysql.cursors.DictCursor)
curs_note.execute(query)
except pymysql.IntegrityError as err:
print("Error: {}".format(err))
self.conn.commit()
def get_doctor(self, id):
query = "SELECT * FROM doctor WHERE id_doctor = '%s'" % (id)
curs_notes = self.conn.cursor(pymysql.cursors.DictCursor)
curs_notes.execute(query)
notes = curs_notes.fetchall()
# print(notes)
return notes
def delete_doctor(self, id):
del_query = "DELETE FROM doctor WHERE id_doctor = '%s'" % id
curs_del = self.conn.cursor(pymysql.cursors.DictCursor)
curs_del.execute(del_query)
self.conn.commit()
# def get_doctors_count(self):
# notes_query = "SELECT COUNT(*) FROM doctor"
# curs_notes = self.conn.cursor(pymysql.cursors.DictCursor)
# curs_notes.execute(notes_query)
# notes = curs_notes.fetchall()
# for item in notes:
# count_bd = dict(item)
# return count_bd['COUNT(*)']
#
# def get_patients_count(self):
# notes_query = "SELECT COUNT(*) FROM patient_card"
# curs_notes = self.conn.cursor(pymysql.cursors.DictCursor)
# curs_notes.execute(notes_query)
# notes = curs_notes.fetchall()
# for item in notes:
# count_bd = dict(item)
# return count_bd['COUNT(*)']
def get_doctors_ids(self):
notes_query = "SELECT id_doctor FROM doctor"
curs_notes = self.conn.cursor(pymysql.cursors.DictCursor)
curs_notes.execute(notes_query)
notes = curs_notes.fetchall()
ids = []
for note in notes:
str_id = note['id_doctor']
ids.append(int(str_id))
# print(str_id)
return ids
def get_patients_ids(self):
notes_query = "SELECT id_card FROM patient_card"
curs_notes = self.conn.cursor(pymysql.cursors.DictCursor)
curs_notes.execute(notes_query)
notes = curs_notes.fetchall()
ids = []
for note in notes:
str_id = note['id_card']
ids.append(int(str_id))
# print(str_id)
return ids | [
"noreply@github.com"
] | noreply@github.com |
b822023d7a01737c76d15074d9a6e3c13364d22e | fb282bd27c52b60f7ef9afd923d2568ccfe4cf5c | /remoteServerPanel/Application/fileTransfer.py | eff7d2dba84c0c17c35c2bbbaae9439a24b71a97 | [] | no_license | serdaltopkaya/serverPanel | 6f70cc30bc69572b2f0c351de48154d7a52b122a | bc3222c8d68284c4cd6d8cde53a1a1ddc89f7d65 | refs/heads/master | 2021-01-19T19:35:26.200663 | 2013-06-16T05:35:09 | 2013-06-16T05:35:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,254 | py | import paramiko
import os
import sys
import edit1
class Server(object):
def __init__(self, username, password, host, port=22):
self.transport = paramiko.Transport((host, port))
self.transport.connect(username=username, password=password)
self.sftp = paramiko.SFTPClient.from_transport(self.transport)
def upload(self, local, remote):
if os.path.isfile(local):
self.sftp.put(local, remote)
else:
raise IOError('Could not find localFile %s !!' % local)
#self.sftp.put(local, remote)
def download(self, remote, local):
self.sftp.get(remote, local)
def openFile(self, remote):
notepad = edit1.Notepad(self.sftp.open(remote))
def close(self):
"""
Close the connection if it's active
"""
if self.transport.is_active():
self.sftp.close()
self.transport.close()
# with-statement support
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
if __name__=='__main__':
server1=Server('srdl','ser21','192.168.1.45')
local='/home/srdl/transfer.py'
remote='/home/srdl/transfer.py'
server1.openFile(remote)
| [
"serdal.topkaya@gmail.com"
] | serdal.topkaya@gmail.com |
dcf9d83ba4bfa75b310253049edaadb0ac26101c | 5c056604ecbfdd6e3d20c6d3b891855767c431b8 | /CIFAR-10/DRE-F-SP+RS/models/ResNet_extract.py | c0f759d4aeba34d997dc7326df08db4232fb134d | [] | no_license | pkulwj1994/cDR-RS | 135d1fc9504304ba0303fe5acc3594ea27531557 | 661d694d6a8dfb44885271bdfd92d6dc150a40f8 | refs/heads/main | 2023-08-30T05:37:07.449304 | 2021-11-16T03:17:42 | 2021-11-16T03:17:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,346 | py | '''
ResNet-based model to map an image from pixel space to a features space.
Need to be pretrained on the dataset.
codes are based on
@article{
zhang2018mixup,
title={mixup: Beyond Empirical Risk Minimization},
author={Hongyi Zhang, Moustapha Cisse, Yann N. Dauphin, David Lopez-Paz},
journal={International Conference on Learning Representations},
year={2018},
url={https://openreview.net/forum?id=r1Ddp1-Rb},
}
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
IMG_SIZE=32
NC=3
resize=(32,32)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, in_planes, planes, stride=1):
super(BasicBlock, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(self.expansion*planes)
)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.bn2(self.conv2(out))
out += self.shortcut(x)
out = F.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, in_planes, planes, stride=1):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, self.expansion*planes, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(self.expansion*planes)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(self.expansion*planes)
)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = F.relu(self.bn2(self.conv2(out)))
out = self.bn3(self.conv3(out))
out += self.shortcut(x)
out = F.relu(out)
return out
class ResNet_extract(nn.Module):
def __init__(self, block, num_blocks, num_classes=100, nc=NC, img_height=IMG_SIZE, img_width=IMG_SIZE):
super(ResNet_extract, self).__init__()
self.in_planes = 64
self.main = nn.Sequential(
nn.Conv2d(nc, 64, kernel_size=3, stride=1, padding=1, bias=False), # h=h
nn.BatchNorm2d(64),
nn.ReLU(),
self._make_layer(block, 64, num_blocks[0], stride=1), # h=h
self._make_layer(block, 128, num_blocks[1], stride=2),
self._make_layer(block, 256, num_blocks[2], stride=2),
self._make_layer(block, 512, num_blocks[3], stride=2),
nn.AvgPool2d(kernel_size=4)
)
self.classifier_1 = nn.Sequential(
nn.Linear(512*block.expansion, img_height*img_width*nc),
# nn.BatchNorm1d(img_height*img_width*nc),
# nn.ReLU(),
)
self.classifier_2 = nn.Sequential(
nn.Linear(img_height*img_width*nc, num_classes)
)
def _make_layer(self, block, planes, num_blocks, stride):
strides = [stride] + [1]*(num_blocks-1)
layers = []
for stride in strides:
layers.append(block(self.in_planes, planes, stride))
self.in_planes = planes * block.expansion
return nn.Sequential(*layers)
def forward(self, x):
# x = nn.functional.interpolate(x,size=resize,mode='bilinear',align_corners=True)
features = self.main(x)
features = features.view(features.size(0), -1)
features = self.classifier_1(features)
out = self.classifier_2(features)
return out, features
def ResNet18_extract(num_classes=10):
return ResNet_extract(BasicBlock, [2,2,2,2], num_classes=num_classes)
def ResNet34_extract(num_classes=10):
return ResNet_extract(BasicBlock, [3,4,6,3], num_classes=num_classes)
def ResNet50_extract(num_classes=10):
return ResNet_extract(Bottleneck, [3,4,6,3], num_classes=num_classes)
def ResNet101_extract(num_classes=10):
return ResNet_extract(Bottleneck, [3,4,23,3], num_classes=num_classes)
def ResNet152_extract(num_classes=10):
return ResNet_extract(Bottleneck, [3,8,36,3], num_classes=num_classes)
if __name__ == "__main__":
net = ResNet34_extract(num_classes=10).cuda()
x = torch.randn(16,3,32,32).cuda()
out, features = net(x)
print(out.size())
print(features.size())
def get_parameter_number(net):
total_num = sum(p.numel() for p in net.parameters())
trainable_num = sum(p.numel() for p in net.parameters() if p.requires_grad)
return {'Total': total_num, 'Trainable': trainable_num}
print(get_parameter_number(net))
| [
"dingx92@gmail.com"
] | dingx92@gmail.com |
eee76809dcb3a715af04c8d2cd523e99f898851c | 0761cfe0ac0f1db4d37b3d57f7c293adbf16b2b3 | /MakeTrainingData.py | d4b20b9cdfd83ffcd5bc9056249ded3f580d1401 | [] | no_license | cearlUmass/TwitterHMC | a18ac4d9344fc49b4ecf2306dcfb897a586d7cc7 | 137eddebe9c9eb6ae01351bc66dff35486d69354 | refs/heads/main | 2023-08-08T03:10:20.094061 | 2021-09-08T01:41:50 | 2021-09-08T01:41:50 | 389,268,170 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 482 | py | import pickle
import os
import pprint as pp
def make_training_data(file):
with open(file, 'rb') as follower_Data:
return pickle.load(follower_Data)
if __name__ == '__main__':
for filename in os.listdir('Data/Follower data dump'):
data = make_training_data('Data/Follower data dump/{0}'.format(filename))
pp.pprint(data)
# for user in data:
# d = len(data[user]['follows'])
# if d > 1:
# print(user) | [
"cearl@umass.edu"
] | cearl@umass.edu |
b03418c90f05f458148d1710dc8f13bbea4d0205 | dac73c1079d7f5a771fff9b20d4a42a329dbdad5 | /pygration/__version__.py | 2c1be0d9a485afcc95814cfdb6a21339130fc0cc | [
"MIT"
] | permissive | RafaelGSS/py.migration | 0e2de93e7a415980086e0a2d5258953670442dc3 | 327b8539479018bd6e08abc74ff6c72b0d3f0688 | refs/heads/master | 2020-03-27T16:59:02.793346 | 2018-09-05T13:55:03 | 2018-09-05T13:55:03 | 146,821,281 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 351 | py | # 8b d8 Yb dP 88""Yb db dP""b8 88 dP db dP""b8 888888
# 88b d88 YbdP 88__dP dPYb dP `" 88odP dPYb dP `" 88__
# 88YbdP88 8P 88""" dP__Yb Yb 88"Yb dP__Yb Yb "88 88""
# 88 YY 88 dP 88 dP""""Yb YboodP 88 Yb dP""""Yb YboodP 888888
VERSION = (0, 0, 2)
__version__ = '.'.join(map(str, VERSION)) | [
"rafael.nunu@hotmail.com"
] | rafael.nunu@hotmail.com |
f2d79542f326248beeeccdf981c0d6d9b68c644b | cc56916d3451a2c228fd5a4b649d4b37e4896665 | /dxb/libs/eslib.py | 0314fefce22303495f79e2a95dad6b7236a810c1 | [] | no_license | nyflxy/longan | 57ee63fe24720bfb6b6b1769acc5ba112dc08f18 | 5db84101dc11c4524e38f29f464ca522ec88aff4 | refs/heads/master | 2021-01-11T00:14:31.933873 | 2016-11-28T12:31:13 | 2016-11-28T12:31:13 | 70,554,505 | 0 | 1 | null | 2016-10-13T01:24:10 | 2016-10-11T04:00:19 | JavaScript | UTF-8 | Python | false | false | 15,071 | py | #coding=utf-8
import datetime
import elasticsearch
es = elasticsearch.Elasticsearch()
if __name__ == "__main__":
# 创建
es.index("newbie","link",{"name":"lxy","age":27,"create_date":datetime.datetime.now()},)
print es.search(index="my-index",doc_type="test-type")
print es.count("newbie","link",{
"query":{
"bool":{
"must":{"match":{"company.name":"qingdun"}},
# "must_not":{"match":{"name":"niyoufa"}},
}
}
})
es.delete("newbie","link",1)
es.delete_by_query("newbie",)
result = es.search("newbie","user",{
"query":{
"term":{
"age":25,
}
}
})
result = es.search("newbie", "user", {
"query": {
"terms": {
"age": [20,25,30],
}
}
})
result = es.search("newbie", "user", {
"query": {
"range": {
"age": {
"gte":25,
"lte":2
},
}
}
})
result = es.search("newbie", "user", {
"query": {
"exists": {
"field":"age"
}
}
})
result = es.search("newbie", "user", {
"query": {
"bool":{
"must":{"term":{"age":25}},
"must_not":{"term":{"name":"niyoufa"}},
"should":[
{"term":{"name":"lxy1"}},
]
}
}
})
result = es.search("newbie", "user", {
"query": {
"match_all":{}
}
})
result = es.search("newbie", "user", {
"query": {
"match": {"name":"niyoufa"}
}
})
result = es.search("newbie", "user", {
"query": {
"multi_match": {
"query":"full text search",
"fields":["name","age"]
}
}
})
filter
result = es.search("newbie","link",{
"query":{
"filtered": {
"query": {"match": {"name": "niyoufa"}},
"filter": {"term": {"age": 25}},
},
}
})
sort
result = es.search("newbie", "link", {
"query": {
"exists":{
"field":"age",
}
},
"sort":{"age":{"order":"desc"}}
})
result = es.search("newbie", "link", {
"query": {
"match": {"name":"niyoufa"}
}
})
print result.get("hits").get("hits")
print len(result.get("hits").get("hits"))
# coding=utf-8
import datetime, time, json, pdb
from es_settings import *
import cPickle as pickle
import logging
from xieli.models import *
from xieli.util.types import *
from django.conf import settings
import pyes
from pyes import *
from pyes.filters import GeoBoundingBoxFilter, GeoDistanceFilter, GeoPolygonFilter
from pyes.query import FilteredQuery, MatchAllQuery , Search
from pyes.sort import SortFactory, SortOrder, GeoSortOrder, ScriptSortOrder
from pyes.queryset import generate_model
ES_PATH = settings.ES_PATH
#ES_PATH = "http://dev.xielicheng.com:9200"
#ES_PATH ="http://192.168.1.113:9200"
#ES_PATH = "http://www.xieliapp.com:9200"
es_logger = logging.getLogger("utils")
# 连接es服务器
CONN_ES = pyes.ES(ES_PATH, timeout=200.0)
#连接es服务器
def _connect_index():
conn = pyes.ES(ES_PATH, timeout=200.0)
return conn
#创建index索引表
def create_index(name,index_type,FIELD_MAPPING):
try :
conn = _connect_index()
conn.indices.create_index(name)
conn.indices.put_mapping(index_type, {'properties':FIELD_MAPPING}, [name])
print "创建%s索引和%s表"%(name,index_type)
except Exception,e :
print "创建%s索引和%s表失败"%(name,index_type)
es_logger.error(str(e))
#删除index索引表
def delete_index(name):
try :
conn = pyes.ES(ES_PATH, timeout=200.0)
conn.indices.delete_index(name)
print "索引%s被删除"%name
except Exception,e:
print "删除索引%s失败"%name
es_logger.error(str(e))
#向es插入数据
def insert_into_es(params,index_name,index_type):
try :
CONN_ES.index(params,index_name,index_type)
try:
CONN_ES.indices.refresh(index_name)
except Exception, e:
pass
# print "插入数据:\n"
# print params
except Exception ,e :
# print "%s插入数据失败"%e
es_logger.error(str(e))
#获取es数据,形成类似django model对象
def get_index_model(index_name,index_type) :
from pyes.queryset import generate_model
return generate_model(index_name, index_type,es_url=ES_PATH)
#获取所有相关的记录
def march_query_alltag(field,query) :
#b = MatchQuery('interest_tag','美国')
b = MatchQuery(field,query)
return [i for i in CONN_ES.search(query =b)]
#must + should
def march_query_tag(field,query,sub_type):
must = pyes.TermQuery("sub_type",sub_type)
should = pyes.MatchQuery(field,query)
query = pyes.BoolQuery(must = must ,should = should)
return [i for i in CONN_ES.search(query =query)]
#搜索指定index,指定字段
def search_term(field,query,index_name,index_type):
q = TermQuery(field, query)
results = CONN_ES.search(query = q,indices=index_name,doc_types=index_type)
return [i for i in results]
#搜索多个字段
def search_more_term(field1,query1,field2,query2,index_name,index_type,kw=None,*arg):
must1 = pyes.TermQuery(field1,query1)
must2 = pyes.TermQuery(field2,query2)
must= [must1,must2]
if arg:
must3 = pyes.TermQuery(arg[0],arg[1])
must.append(must3)
query = pyes.BoolQuery(must = must)
if kw:
search = search_add_sort(query,kw["sort_field"],kw["sort_type"])
return [i for i in CONN_ES.search(search,indices=[index_name])]
return [i for i in CONN_ES.search(query =query,indices=index_name,doc_types=index_type) ]
#倒序 desc
def search_add_sort(query,sort_field,sort_type):
search = Search(query)
sort_order = SortOrder(sort_field, sort_type)
search.sort.add(sort_order)
return search
#按时间范围查询
def search_range_time(field,start_date,date_range,index_name,index_type):
if type(date_range) == type(-1) and date_range != -1:
#start_da = datetime.datetime.strptime(start_date, "%Y-%m-%dT%H:%M:%SZ").date()
start_da = datetime.datetime.strptime(start_date, "%Y-%m-%d").date()
end_date = (start_da + datetime.timedelta(days=date_range)).strftime('%Y-%m-%d')
must = pyes.RangeQuery(pyes.ESRange(field, from_value=start_date, to_value=end_date))
query = pyes.BoolQuery(must = must)
dd = [i for i in CONN_ES.search(query =query,indices=index_name,doc_types=index_type) ]
return dd
else:
raise
def get_data_id(data):
return data.get_id()
#此处id为es默认id
def delete_data(index_name,index_type,id):
CONN_ES.delete(index = index_name,doc_type = index_type,id = id)
#根据es对象删除数据
def delete_data_from_esobj(es_obj):
id = get_data_id(es_obj)
es_meta = es_obj.get_meta()
index_name = es_meta['index']
index_type = es_meta["type"]
CONN_ES.delete(index = index_name,doc_type = index_type,id = id)
def create_all_about_xieli_es_index():
try:
#create_index("messageglobalindex","MessageGlobal",GLOBAL_MESSAGE_FIELD_MAPPING)
#create_index("commentglobalindex","CommentGlobal",GLOBAL_COMMENT_FIELD_MAPPING)
#create_index("fileglobalindex","FileGlobal",GLOBAL_FILEIMAGE_FIELD_MAPPING)
#create_index("usernavigationglobalindex","UsernavigationgGlobal",GLOBAL_USERNAVIGATION_FIELD_MAPPING)
#create_index("participationglobalindex","ParticipationGlobal",GLOBAL_PARTICIPATION_FIELD_MAPPING)
delete_index("teamup")
create_index("teamup","CommonObject",ES_FIELD_MAPPING)
except Exception, e:
es_logger.error(str(e))
def delete_all_index():
delete_index("messageglobalindex")
delete_index("commentglobalindex")
delete_index("fileglobalindex")
delete_index("usernavigationglobalindex")
delete_index("participationglobalindex")
delete_index("teamup")
# author nyf
#根据条件获取从ES获取指定个数数据
#index_name : 索引名称
#index_type : 索引表名称
#query_params : 查询条件
#ordering : 排序字段
# start , end 数据标记
def get_document_from_es(index_name,index_type,query_params={},ordering="",start=0,end=1) :
try :
model = get_index_model(index_name,index_type)
except Exception ,e :
print e
return False
if ordering :
return model.objects.filter(**query_params).order_by(ordering)[start:end]
else :
return model.objects.filter(**query_params)[start:end]
#根据条件从ES中删除文档
#index_name : 索引名称
#index_type : 索引表名称
#query_params : 查询条件
def delete_document_from_es(index_name,index_type,query_params={}) :
try :
model = get_index_model(index_name,index_type)
except Exception ,e :
print e
return False
results = model.objects.filter(**query_params).all()
try :
for result in results :
result.delete()
except Exception ,e :
print e
return False
return True
#coding=utf8
#author = yxp
"""
配置elasticsearchp2.2
jdk1.8
本配置文件以 路径,loging为主
"""
from django.conf import settings
ES_PATH = settings.ES_PATH
#ES 定义index字段
"""
analyzed 使用分词器
analyzer 分词器类型
"""
ES_FIELD_MAPPING = {
"id" :
{"index":"no","type":u'integer'},
"sha1" :
{"index":"analyzed","type":u'string','store': 'yes'},
#标题
"title":
{"index":"analyzed","type":u'string','store': 'yes',},
#作者
"author" :
{"index":"analyzed","type":u'string','store': 'yes',},
#创建时间
"creation_time" :
{"index":"analyzed","type":u'date'},
#是否允许主动传播
"broadcast":
{"index":"no","type":u'boolean'},
#参与人数
"nb_participants" :
{"index":"analyzed","type":u'integer'},
#插件类型: 调查问卷,监督举报等
"plugin" :
{"index":"analyzed","type":u'string'},
#功能类别标签:排行榜,求安慰等
"func_tags":
{"index":"analyzed","type":u'string',},
#行业大标签 list
"topic_tags" :
{"index":"analyzed","type":'string','store': 'yes'},
#兴趣小标签 list
"interest_tag":
{"index":"analyzed","type":'string','store': 'yes'},
#描述
"description" :
{"index":"no","type":u'string'},
#版本
"_version_":
{"index":"analyzed","type":u'long'},
#地理位置,经纬度 [经度,纬度]
"geo":
{"index":"analyzed","type":u'geo_point','store': 'yes',},
#发布活动时的参与者限制条件列表
"limits" :
{"index":"analyzed","type":u'string'},
#参与类型 0 :所有用户 1:联系人
"participant_type" :
{"index":"no","type":u'integer'},
#图片列表
"image_sha1s":
{"index":"no","type":u'string'},
#分享设置 1:可以分享 0:不可以分享
"can_be_shared" :
{"index":"no","type":u'integer'},
#分享次数
"nb_shares" :
{"index":"analyzed","type":u'integer'},
#多少人已经完成任务或已签到
"nb_completes":
{"index":"analyzed","type":u'integer'},
#根据坐标反解析出的地理位置信息,比如海淀区学清路38号
"loc" :
{"index":"analyzed","type":u'string'},
#城市
"city" :
{"index":"analyzed","type":u'string'},
#百度地图对应的城市编码
"city_code":
{"index":"analyzed","type":u'integer'},
#发起人类型:0表示以个人名义发起,1表示以公司名义发起
"organizer_type" :
{"index":"analyzed","type":u'integer'},
#是否有红包, 缺省免费没有
"has_bonus" :
{"index":"no","type":u'boolean'},
#此项投票或是任务的红包总金额
"total_amount":
{"index":"analyzed","type":u'float'},
#红包派发给多少人
"nb_rewarded_people":
{"index":"analyzed","type":u'integer'},
#红包派发类型: 0:最先参与的若干个人;1:根据结果审批的若干个人;
"bonus_type" :
{"index":"analyzed","type":u'integer'},
#红包是否已经派发0 :未派发 1:已派发
"is_bonus_paid":
{"index":"analyzed","type":u'integer',},
#红包发放是否已经结算:0 :未结算 1:已结算
"is_account" :
{"index":"analyzed","type":u'integer',},
"creator_sha1" :
{"index":"analyzed","type":u'string',},
"sub_type" :
{"index":"analyzed","type":u'integer',},
"status" :
{"index":"analyzed","type":u'integer',},
}
#分布式comment全局id存储
GLOBAL_COMMENT_FIELD_MAPPING = {
"user_sha1" :
{"index":"not_analyzed","type":u'string','store': 'yes'},
"obj_sha1" :
{"index":"not_analyzed","type":u'string','store': 'yes'},
"global_object_id":
{"index":"not_analyzed","type":u'string','store': 'yes',},
"sha1":
{"index":"not_analyzed","type":u'string','store': 'yes',},
}
#分布式paticipation全局id存储
GLOBAL_PARTICIPATION_FIELD_MAPPING = {
"user_sha1" :
{"index":"analyzed","type":u'string','store': 'yes'},
"obj_sha1" :
{"index":"analyzed","type":u'string','store': 'yes'},
"global_object_id":
{"index":"analyzed","type":u'string','store': 'yes',},
}
#分布式usenav全局id存储
GLOBAL_USERNAVIGATION_FIELD_MAPPING = {
"user_sha1" :
{"index":"analyzed","type":u'string','store': 'yes'},
"global_object_id":
{"index":"analyzed","type":u'string','store': 'yes',},
"time":
{"index":"not_analyzed","type":u'date','store': 'yes',"format": "yyyy-MM-dd"},
}
#分布式fileimage全局id存储
GLOBAL_FILEIMAGE_FIELD_MAPPING = {
"sha1" :
{"index":"analyzed","type":u'string','store': 'yes'},
"global_object_id":
{"index":"analyzed","type":u'string','store': 'yes',},
}
#分布式message全局id存储
GLOBAL_MESSAGE_FIELD_MAPPING = {
"sha1" :
{"index":"not_analyzed","type":u'string','store': 'yes'},
"user_sha1" :
{"index":"not_analyzed","type":u'string','store': 'yes'},
"global_object_id":
{"index":"not_analyzed","type":u'string','store': 'yes',},
"obj_sha1":
{"index":"not_analyzed","type":u'string','store': 'yes',},
"comment_sha1":
{"index":"not_analyzed","type":u'string','store': 'yes',},
"type" :
{"index":"not_analyzed","type":u'integer'},
"creation_time" :
{"index":"not_analyzed","type":u'date'},
"already_read":
{"index":"not_analyzed","type":u'integer'},
}
| [
"niyoufa@tmlsystem.com"
] | niyoufa@tmlsystem.com |
8c3dff6729a6e03970fa298972d23866dabed387 | 8ea28cd0bc3f120faed01b0a2a5cd435a01ef809 | /liclient/__init__.py | 37bf8ec6e9ca51943c83086d42f59b7882242f33 | [
"MIT"
] | permissive | Work4Labs/LinkedIn-Client-Library | 9e02ce941e149f3113da936f96bd8401f74bc5c1 | eb08c78e35d6e37b415a9da9820a7977fd895f81 | refs/heads/master | 2021-01-20T16:41:53.968545 | 2017-07-07T21:21:36 | 2017-07-07T21:21:36 | 23,850,972 | 0 | 0 | null | 2017-07-07T21:21:37 | 2014-09-09T21:30:18 | Python | UTF-8 | Python | false | false | 20,308 | py | #! usr/bin/env python
import datetime
import re
import time
import urllib
import urlparse
import oauth2 as oauth
from httplib2 import HttpLib2ErrorWithResponse
import json
from parsers.lixml import LinkedInXMLParser
from lxml import etree
from lxml.builder import ElementMaker
class LinkedInAPI(object):
def __init__(self, ck, cs):
self.consumer_key = ck
self.consumer_secret = cs
self.api_profile_url = 'http://api.linkedin.com/v1/people/~'
self.api_profile_connections_url = 'http://api.linkedin.com/v1/people/~/connections'
self.api_network_update_url = 'http://api.linkedin.com/v1/people/~/network'
self.api_comment_feed_url = 'http://api.linkedin.com/v1/people/~/network/updates/' + \
'key={NETWORK UPDATE KEY}/update-comments'
self.api_update_status_url = 'http://api.linkedin.com/v1/people/~/current-status'
self.api_share = 'http://api.linkedin.com/v1/people/~/shares'
self.api_mailbox_url = 'http://api.linkedin.com/v1/people/~/mailbox'
self.base_url = 'https://api.linkedin.com'
self.li_url = 'http://www.linkedin.com'
self.request_token_path = '/uas/oauth/requestToken'
self.access_token_path = '/uas/oauth/accessToken'
self.authorize_path = '/uas/oauth/authorize'
self.consumer = oauth.Consumer(self.consumer_key, self.consumer_secret)
self.valid_network_update_codes = ['ANSW', 'APPS', 'CONN', 'JOBS',
'JGRP', 'PICT', 'RECU', 'PRFU',
'QSTN', 'STAT']
def get_request_token(self, redirect_url=None):
"""
Get a request token based on the consumer key and secret to supply the
user with the authorization URL they can use to give the application
access to their LinkedIn accounts
"""
client = oauth.Client(self.consumer)
request_token_url = self.base_url + self.request_token_path
additional_param = {}
if redirect_url:
additional_param = {
'body': "oauth_callback=%s" % urllib.quote_plus(redirect_url),
'headers': {'Content-Type': 'application/x-www-form-urlencoded'}
}
resp, content = client.request(request_token_url, 'POST', **additional_param)
request_token = dict(urlparse.parse_qsl(content))
return request_token
def get_access_token(self, request_token, verifier):
"""
Get an access token based on the generated request_token and the
oauth verifier supplied in the return URL when a user authorizes their
application
"""
token = oauth.Token(
request_token['oauth_token'],
request_token['oauth_token_secret']
)
token.set_verifier(verifier)
client = oauth.Client(self.consumer, token)
access_token_url = self.base_url + self.access_token_path
resp, content = client.request(access_token_url, 'POST')
access_token = dict(urlparse.parse_qsl(content))
return access_token
def get_user_profile(self, access_token, selectors=None, **kwargs):
"""
Get a user profile. If keyword argument "id" is not supplied, this
returns the current user's profile, else it will return the profile of
the user whose id is specificed. The "selectors" keyword argument takes
a list of LinkedIn compatible field selectors.
"""
# Now using json api - GL
kwargs['format'] = 'json'
url = self.api_profile_url
assert isinstance(selectors, (tuple, list)), '"Keyword argument "selectors" must be of type "list" or "tuple"'
if selectors:
url = self.prepare_field_selectors(selectors, url)
user_token, url = self.prepare_request(access_token, url, kwargs)
client = oauth.Client(self.consumer, user_token)
resp, content = client.request(url, 'GET')
if resp.status >= 500:
raise HttpLib2ErrorWithResponse(resp.reason, resp, content)
return resp, json.loads(content)
def get_user_connections(self, access_token, selectors=None, **kwargs):
"""
Get the connections of the current user. Valid keyword arguments are
"count" and "start" for the number of profiles you wish returned. Types
are automatically converted from integer to string for URL formatting
if necessary.
"""
# Now using json api - GL
kwargs['format'] = 'json'
if selectors:
url = self.prepare_field_selectors(selectors, self.api_profile_connections_url)
user_token, url = self.prepare_request(access_token, url, kwargs)
client = oauth.Client(self.consumer, user_token)
resp, content = client.request(url, 'GET')
if resp.status >= 500:
raise HttpLib2ErrorWithResponse(resp.reason, resp, content)
return resp, json.loads(content)
def get_network_updates(self, access_token, **kwargs):
"""Get network updates for the current user. Valid keyword arguments are
"count", "start", "type", "before", and "after". "Count" and "start" are for the number
of updates to be returned. "Type" specifies what type of update you are querying.
"Before" and "after" set the time interval for the query. Valid argument types are
an integer representing UTC with millisecond precision or a Python datetime object.
"""
if 'type' in kwargs.keys():
assert isinstance(kwargs['type'], (tuple, list)), 'Keyword argument "type" must be of type "list"'
[self.check_network_code(c) for c in kwargs['type']]
if 'before' in kwargs.keys():
kwargs['before'] = self.dt_obj_to_string(kwargs['before']) if kwargs.get('before') else None
if 'after' in kwargs.keys():
kwargs['after'] = self.dt_obj_to_string(kwargs['after']) if kwargs.get('after') else None
user_token, url = self.prepare_request(access_token, self.api_network_update_url, kwargs)
client = oauth.Client(self.consumer, user_token)
resp, content = client.request(url, 'GET')
content = self.clean_dates(content)
return LinkedInXMLParser(content).results
def get_comment_feed(self, access_token, network_key):
"""
Get a comment feed for a particular network update. Requires the update key
for the network update as returned by the API.
"""
url = re.sub(r'\{NETWORK UPDATE KEY\}', network_key, self.api_comment_feed_url)
user_token, url = self.prepare_request(access_token, url)
client = oauth.Client(self.consumer, user_token)
resp, content = client.request(url, 'GET')
content = self.clean_dates(content)
return LinkedInXMLParser(content).results
def submit_comment(self, access_token, network_key, bd):
"""
Submit a comment to a network update. Requires the update key for the network
update that you will be commenting on. The comment body is the last positional
argument. NOTE: The XML will be applied to the comment for you.
"""
bd_pre_wrapper = '<?xml version="1.0" encoding="UTF-8"?><update-comment><comment>'
bd_post_wrapper = '</comment></update-comment>'
xml_request = bd_pre_wrapper + bd + bd_post_wrapper
url = re.sub(r'\{NETWORK UPDATE KEY\}', network_key, self.api_comment_feed_url)
user_token, url = self.prepare_request(access_token, url)
client = oauth.Client(self.consumer, user_token)
return client.request(url, method='POST', body=xml_request, headers={'Content-Type': 'application/xml'})
def set_status_update(self, access_token, bd):
"""
Set the status for the current user. The status update body is the last
positional argument. NOTE: The XML will be applied to the status update
for you.
WARNING: the status to set should be utf-8 encoded before passing it to that function
"""
bd_pre_wrapper = '<?xml version="1.0" encoding="UTF-8"?><current-status>'
bd_post_wrapper = '</current-status>'
xml_request = bd_pre_wrapper + bd + bd_post_wrapper
user_token, url = self.prepare_request(access_token, self.api_update_status_url)
client = oauth.Client(self.consumer, user_token)
return client.request(url, method='PUT', body=xml_request)
def share(self, access_token, share_content):
'''
WARNING: all the parameter of the share content to set should be utf-8
encoded before passing it to that function
'''
user_token, url = self.prepare_request(access_token, self.api_share)
client = oauth.Client(self.consumer, user_token)
resp, content = client.request(
url,
method='POST',
body=json.dumps(share_content),
headers={
'x-li-format': 'json',
'Content-Type': 'application/json'
}
)
if resp.status >= 500:
raise HttpLib2ErrorWithResponse(resp.reason, resp, content)
return resp, json.loads(content)
def search(self, access_token, data, field_selector_string=None):
"""
Use the LinkedIn Search API to find users. The criteria for your search
should be passed as the 2nd positional argument as a dictionary of key-
value pairs corresponding to the paramters allowed by the API. Formatting
of arguments will be done for you (i.e. lists of keywords will be joined
with "+")
"""
srch = LinkedInSearchAPI(data, access_token, field_selector_string)
client = oauth.Client(self.consumer, srch.user_token)
rest, content = client.request(srch.generated_url, method='GET')
# print content # useful for debugging...
return LinkedInXMLParser(content).results
def send_message(self, access_token, recipients, subject, body):
"""
Send a message to a connection. "Recipients" is a list of ID numbers,
"subject" is the message subject, and "body" is the body of the message.
The LinkedIn API does not allow HTML in messages. All XML will be applied
for you.
"""
assert isinstance(recipients, (tuple, list)), '"Recipients argument" (2nd position) must be of type "list"'
mxml = self.message_factory(recipients, subject, body)
user_token, url = self.prepare_request(access_token, self.api_mailbox_url)
client = oauth.Client(self.consumer, user_token)
return client.request(url, method='POST', body=mxml, headers={'Content-Type': 'application/xml'})
def send_invitation(self, access_token, recipients, subject, body, **kwargs):
"""
Send an invitation to a user. "Recipients" is an ID number OR email address
(see below), "subject" is the message subject, and "body" is the body of the message.
The LinkedIn API does not allow HTML in messages. All XML will be applied
for you.
NOTE:
If you pass an email address as the recipient, you MUST include "first_name" AND
"last_name" as keyword arguments. Conversely, if you pass a member ID as the
recipient, you MUST include "name" and "value" as keyword arguments. Documentation
for obtaining those values can be found on the LinkedIn website.
"""
if 'first_name' in kwargs.keys():
mxml = self.invitation_factory(recipients, subject, body,
first_name=kwargs['first_name'], last_name=kwargs['last_name'])
else:
mxml = self.invitation_factory(recipients, subject, body,
name=kwargs['name'], value=kwargs['value'])
user_token, url = self.prepare_request(access_token, self.api_mailbox_url)
client = oauth.Client(self.consumer, user_token)
return client.request(url, method='POST', body=mxml, headers={'Content-Type': 'application/xml'})
def prepare_request(self, access_token, url, kws={}):
user_token = oauth.Token(access_token['oauth_token'],
access_token['oauth_token_secret'])
prep_url = url
if kws and 'id' in kws.keys():
prep_url = self.append_id_args(kws['id'], prep_url)
del kws['id']
for k in kws:
if kws[k]:
if '?' not in prep_url:
prep_url = self.append_initial_arg(k, kws[k], prep_url)
else:
prep_url = self.append_sequential_arg(k, kws[k], prep_url)
prep_url = re.sub('&&', '&', prep_url)
return user_token, prep_url
def append_id_args(self, ids, prep_url):
assert isinstance(ids, (tuple, list)), 'Keyword argument "id" must be a list'
if len(ids) > 1:
prep_url = re.sub('/~', '::(', prep_url) # sub out the ~ if a user wants someone else's profile
for i in ids:
prep_url += 'id=' + i + ','
prep_url = re.sub(',$', ')', prep_url)
else:
prep_url = re.sub('~', 'id=' + ids[0], prep_url)
return prep_url
def append_initial_arg(self, key, args, prep_url):
assert '?' not in prep_url, 'Initial argument has already been applied to %s' % prep_url
if isinstance(args, (tuple, list)):
prep_url += '?' + key + '=' + str(args[0])
if len(args) > 1:
prep_url += ''.join(['&' + key + '=' + str(arg) for arg in args[1:]])
else:
prep_url += '?' + key + '=' + str(args)
return prep_url
def append_sequential_arg(self, key, args, prep_url):
if isinstance(args, (tuple, list)):
prep_url += '&' + ''.join(['&' + key + '=' + str(arg) for arg in args])
else:
prep_url += '&' + key + '=' + str(args)
return prep_url
def prepare_field_selectors(self, selectors, url):
prep_url = url
selector_string = ':('
selector_string += ','.join(selectors)
selector_string += ')'
prep_url += selector_string
return prep_url
def check_network_code(self, code):
if code not in self.valid_network_update_codes:
raise ValueError('Code %s not a valid update code' % code)
def clean_dates(self, content):
data = etree.fromstring(content)
for d in data.iter(tag=etree.Element):
try:
trial = int(d.text)
if len(d.text) > 8:
dt = datetime.datetime.fromtimestamp(float(trial) / 1000)
d.text = dt.strftime('%m/%d/%Y %I:%M:%S')
except:
continue
return etree.tostring(data)
def dt_obj_to_string(self, dtobj):
if isinstance(dtobj, (int, str, long)):
return dtobj
elif hasattr(dtobj, 'timetuple'):
return time.mktime(int(dtobj.timetuple()) * 1000)
else:
raise TypeError('Inappropriate argument type - use either a datetime object, \
string, or integer for timestamps')
def message_factory(self, recipients, subject, body):
rec_path = '/people/'
E = ElementMaker()
MAILBOX_ITEM = E.mailbox_item
RECIPIENTS = E.recipients
RECIPIENT = E.recipient
PERSON = E.person
SUBJECT = E.subject
BODY = E.body
recs = [RECIPIENT(PERSON(path=rec_path + r)) for r in recipients]
mxml = MAILBOX_ITEM(
RECIPIENTS(
*recs
),
SUBJECT(subject),
BODY(body)
)
return re.sub('mailbox_item', 'mailbox-item', etree.tostring(mxml))
def invitation_factory(self, recipient, subject, body, **kwargs):
id_rec_path = '/people/id='
email_rec_path = '/people/email='
E = ElementMaker()
MAILBOX_ITEM = E.mailbox_item
RECIPIENTS = E.recipients
RECIPIENT = E.recipient
PERSON = E.person
SUBJECT = E.subject
BODY = E.body
CONTENT = E.item_content
REQUEST = E.invitation_request
CONNECT = E.connect_type
FIRST = E.first_name
LAST = E.last_name
AUTH = E.authorization
NAME = E.name
VALUE = E.value
if not '@' in recipient:
recs = RECIPIENT(PERSON(path=id_rec_path + recipient))
auth = CONTENT(REQUEST(CONNECT('friend'), AUTH(NAME(kwargs['name']), VALUE(kwargs['value']))))
else:
recs = RECIPIENT(
PERSON(
FIRST(kwargs['first_name']),
LAST(kwargs['last_name']),
path=email_rec_path + recipient
)
)
auth = CONTENT(REQUEST(CONNECT('friend')))
mxml = MAILBOX_ITEM(
RECIPIENTS(
*recs
),
SUBJECT(subject),
BODY(body),
auth
)
return re.sub('_', '-', etree.tostring(mxml))
class LinkedInSearchAPI(LinkedInAPI):
def __init__(self, params, access_token, field_selector_string=None):
self.api_search_url = 'http://api.linkedin.com/v1/people-search'
if field_selector_string:
self.api_search_url += ':' + field_selector_string
self.routing = {
'keywords': self.keywords,
'name': self.name,
'current_company': self.current_company,
'current_title': self.current_title,
'location_type': self.location_type,
'network': self.network,
'sort_criteria': self.sort_criteria
}
self.user_token, self.generated_url = self.do_process(access_token, params)
def do_process(self, access_token, params):
assert type(params) == type(dict()), 'The passed parameters to the Search API must be a dictionary.'
user_token = oauth.Token(
access_token['oauth_token'],
access_token['oauth_token_secret']
)
url = self.api_search_url
for p in params:
if self.routing.get(p):
self.routing.get(p)(url, params[p])
del params[p]
url = self.process_remaining_params(url, params)
return user_token, url
def process_remaining_params(self, prep_url, ps):
for p in ps:
prep_url = self.append_arg(p, ps[p], prep_url)
return prep_url
def keywords(self, url, ps):
return self.list_argument(url, ps, 'keywords')
def name(self, url, ps):
return self.list_argument(url, ps, 'name')
def current_company(self, url, ps):
return self.true_false_argument(url, ps, 'current-company')
def current_title(self, url, ps):
return self.true_false_argument(url, ps, 'current-title')
def location_type(self, prep_url, ps):
assert ps in ('I', 'Y'), 'Valid parameter types for search-location-type are "I" and "Y"'
self.append_arg('search-location-type', ps, prep_url)
def network(self, prep_url, ps):
assert ps in ('in', 'out'), 'Valid parameter types for network are "in" and "out"'
return self.append_arg('network', ps, prep_url)
def sort_criteria(self, prep_url, ps):
assert ps in ('recommenders', 'distance', 'relevance'), 'Valid parameter types for sort-criteria \
are "recommenders", "distance", and "relevance"'
return self.append_arg('sort-criteria', ps, prep_url)
def true_false_argument(self, prep_url, ps, arg):
ps = 'true' if ps else 'false'
return self.append_arg(arg, ps, prep_url)
def list_argument(self, prep_url, ps, arg):
li = '+'.join(ps)
return self.append_arg(arg, li, prep_url)
def append_arg(self, key, arg, prep_url):
try:
prep_url = self.append_initial_arg(key, arg, prep_url)
except AssertionError:
prep_url = self.append_sequential_arg(key, arg, prep_url)
return prep_url
| [
"mvergerdelbove@work4labs.com"
] | mvergerdelbove@work4labs.com |
8bf8fcd3f39efc095e0fe222385967b76bddb28a | 1262804db6b8760ce60f98970ff142b4c69e12a3 | /models/fcn.py | 42e8a2af525be4785c333a6b2b89dfbbe4b667ef | [] | no_license | Defaultin/PSP-Net | 82537c5f21a34e3bde3c88a0084e2700bfad9f31 | 283acb7e2e4803e52a7a77d92faa58b0164de9c7 | refs/heads/master | 2020-09-22T00:17:59.151619 | 2019-11-30T09:09:48 | 2019-11-30T09:09:48 | 224,983,591 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,905 | py | from keras.models import *
from keras.layers import *
from .config import IMAGE_ORDERING
from .model_utils import get_segmentation_model
from .vgg16 import get_vgg_encoder
from .mobilenet import get_mobilenet_encoder
from .basic_models import vanilla_encoder
from .resnet50 import get_resnet50_encoder
# crop o1 wrt o2
def crop( o1 , o2 , i ):
o_shape2 = Model( i , o2 ).output_shape
if IMAGE_ORDERING == 'channels_first':
output_height2 = o_shape2[2]
output_width2 = o_shape2[3]
else:
output_height2 = o_shape2[1]
output_width2 = o_shape2[2]
o_shape1 = Model( i , o1 ).output_shape
if IMAGE_ORDERING == 'channels_first':
output_height1 = o_shape1[2]
output_width1 = o_shape1[3]
else:
output_height1 = o_shape1[1]
output_width1 = o_shape1[2]
cx = abs( output_width1 - output_width2 )
cy = abs( output_height2 - output_height1 )
if output_width1 > output_width2:
o1 = Cropping2D( cropping=((0,0) , ( 0 , cx )), data_format=IMAGE_ORDERING )(o1)
else:
o2 = Cropping2D( cropping=((0,0) , ( 0 , cx )), data_format=IMAGE_ORDERING )(o2)
if output_height1 > output_height2 :
o1 = Cropping2D( cropping=((0,cy) , ( 0 , 0 )), data_format=IMAGE_ORDERING )(o1)
else:
o2 = Cropping2D( cropping=((0, cy ) , ( 0 , 0 )), data_format=IMAGE_ORDERING )(o2)
return o1 , o2
def fcn_8( n_classes , encoder=vanilla_encoder , input_height=416, input_width=608 ):
img_input , levels = encoder( input_height=input_height , input_width=input_width )
[f1 , f2 , f3 , f4 , f5 ] = levels
o = f5
o = ( Conv2D( 4096 , ( 7 , 7 ) , activation='relu' , padding='same', data_format=IMAGE_ORDERING))(o)
o = Dropout(0.5)(o)
o = ( Conv2D( 4096 , ( 1 , 1 ) , activation='relu' , padding='same', data_format=IMAGE_ORDERING))(o)
o = Dropout(0.5)(o)
o = ( Conv2D( n_classes , ( 1 , 1 ) ,kernel_initializer='he_normal' , data_format=IMAGE_ORDERING))(o)
o = Conv2DTranspose( n_classes , kernel_size=(4,4) , strides=(2,2) , use_bias=False, data_format=IMAGE_ORDERING )(o)
o2 = f4
o2 = ( Conv2D( n_classes , ( 1 , 1 ) ,kernel_initializer='he_normal' , data_format=IMAGE_ORDERING))(o2)
o , o2 = crop( o , o2 , img_input )
o = Add()([ o , o2 ])
o = Conv2DTranspose( n_classes , kernel_size=(4,4) , strides=(2,2) , use_bias=False, data_format=IMAGE_ORDERING )(o)
o2 = f3
o2 = ( Conv2D( n_classes , ( 1 , 1 ) ,kernel_initializer='he_normal' , data_format=IMAGE_ORDERING))(o2)
o2 , o = crop( o2 , o , img_input )
o = Add()([ o2 , o ])
o = Conv2DTranspose( n_classes , kernel_size=(16,16) , strides=(8,8) , use_bias=False, data_format=IMAGE_ORDERING )(o)
model = get_segmentation_model(img_input , o )
model.model_name = "fcn_8"
return model
def fcn_32( n_classes , encoder=vanilla_encoder , input_height=416, input_width=608 ):
img_input , levels = encoder( input_height=input_height , input_width=input_width )
[f1 , f2 , f3 , f4 , f5 ] = levels
o = f5
o = ( Conv2D( 4096 , ( 7 , 7 ) , activation='relu' , padding='same', data_format=IMAGE_ORDERING))(o)
o = Dropout(0.5)(o)
o = ( Conv2D( 4096 , ( 1 , 1 ) , activation='relu' , padding='same', data_format=IMAGE_ORDERING))(o)
o = Dropout(0.5)(o)
o = ( Conv2D( n_classes , ( 1 , 1 ) ,kernel_initializer='he_normal' , data_format=IMAGE_ORDERING))(o)
o = Conv2DTranspose( n_classes , kernel_size=(64,64) , strides=(32,32) , use_bias=False , data_format=IMAGE_ORDERING )(o)
model = get_segmentation_model(img_input , o )
model.model_name = "fcn_32"
return model
def fcn_8_vgg(n_classes , input_height=416, input_width=608 ):
model = fcn_8( n_classes , get_vgg_encoder , input_height=input_height, input_width=input_width )
model.model_name = "fcn_8_vgg"
return model
def fcn_32_vgg(n_classes , input_height=416, input_width=608 ):
model = fcn_32( n_classes , get_vgg_encoder , input_height=input_height, input_width=input_width )
model.model_name = "fcn_32_vgg"
return model
def fcn_8_resnet50(n_classes , input_height=416, input_width=608 ):
model = fcn_8( n_classes , get_resnet50_encoder , input_height=input_height, input_width=input_width )
model.model_name = "fcn_8_resnet50"
return model
def fcn_32_resnet50(n_classes , input_height=416, input_width=608 ):
model = fcn_32( n_classes , get_resnet50_encoder , input_height=input_height, input_width=input_width )
model.model_name = "fcn_32_resnet50"
return model
def fcn_8_mobilenet(n_classes , input_height=416, input_width=608 ):
model = fcn_8( n_classes , get_mobilenet_encoder , input_height=input_height, input_width=input_width )
model.model_name = "fcn_8_mobilenet"
return model
def fcn_32_mobilenet(n_classes , input_height=416, input_width=608 ):
model = fcn_32( n_classes , get_mobilenet_encoder , input_height=input_height, input_width=input_width )
model.model_name = "fcn_32_mobilenet"
return model
if __name__ == '__main__':
m = fcn_8( 101 )
m = fcn_32( 101 ) | [
"gorynovich0101@gmail.com"
] | gorynovich0101@gmail.com |
edb27b8e4742f4f29adc0b8efd1e48b5aed3f29e | a30e64b8b7813c42e45e85243305e58152eda0de | /django_workshop/wsgi.py | fb54c791f0ca348107316a1c7f131bf2f12bddd6 | [] | no_license | DavideTonin99/django_workshop | ad7532835b201af567457a981177d6fe6f3ff7fa | 4d0d827babe07b87f1d9fdc29e2f5bc8cb6fa5ca | refs/heads/master | 2021-04-27T03:51:29.591838 | 2018-02-27T17:51:38 | 2018-02-27T17:51:38 | 122,720,853 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 408 | py | """
WSGI config for django_workshop project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_workshop.settings")
application = get_wsgi_application()
| [
"davide9935@gmail.com"
] | davide9935@gmail.com |
e4148f0f2cb3c1a939064361f891a0e0ccd8520d | 0fdd74280cb91e19cfd0d3ac2d87587ff991fefe | /helloworldasyncio.py | a787c718331c73dd0b5fc71ebc2e4219f6ba6f27 | [] | no_license | DonAfraidOfTiger/zmq-http-server | c5548d0701fdebc800f8e25f711fd3750e2b1520 | fffb67e3f693f5a2e5fbd76f123b2b62e220c8c3 | refs/heads/master | 2020-03-16T05:25:30.207628 | 2018-05-08T01:50:30 | 2018-05-08T01:50:30 | 132,531,967 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,592 | py | # coding=utf-8
import zmq
from zmq.asyncio import Context, Poller
import asyncio
import zlib
import pickle
context = zmq.asyncio.Context()
socket = context.socket(zmq.STREAM)
socket.bind('tcp://*:5555')
cnt=0
async def worker(name):
while True:
# Wait for next request from client
#print(1)
while True:
message =await socket.recv_multipart(copy=True)
#print(2)
#print("Received request: %s" % message)
if len(message[1])>0:
#print(name,message[1].decode())
break
if False:
#socket.send_multipart([message[0],b'hello world'])
global cnt
cnt+=1
body="Hello,World!,%03d,%012d\r\n" %(name,cnt)
header="HTTP/1.0 200 OK\r\n"+ \
"Content-Type: text/plain\r\n"+ \
"Content-Length: %s\r\n" %len(body.encode())
http_response=header+"\r\n" + body
await socket.send_multipart((message[0], http_response.encode()), flags=zmq.SNDMORE)
else:
http_response=b'HTTP/1.0 200 OK\r\nContent-Type: text/plain\r\nContent-Length: 13\r\n\r\nhello worlf\r\n'
await socket.send_multipart((message[0], http_response), flags=zmq.SNDMORE)
await socket.send_multipart((message[0], b''), flags=zmq.SNDMORE)
#await socket.send_multipart((message[0],b''), flags=zmq.SNDMORE)
asyncio.get_event_loop().run_until_complete(asyncio.wait([
worker(i) for i in range(120)
#sender(),
]))
| [
"noreply@github.com"
] | noreply@github.com |
e484863b83b18d2756248bd07c84329a4dcdeb44 | 72d888337ee2172f44a92050243e86c02d0d080b | /adventofcode_2015/day06_part1.py | 338591f47b27ed3af2d54dd3af4b0292d399c498 | [
"Unlicense"
] | permissive | vesche/snippets | a87a120c27cd74317bb6169397feb1be6b0c47eb | a619c6fd82bafaf2cd444ab1fc4e3095a27e3a71 | refs/heads/master | 2023-04-08T21:46:30.617356 | 2022-02-17T16:44:33 | 2022-02-17T16:44:33 | 41,107,034 | 7 | 1 | Unlicense | 2023-03-31T14:52:16 | 2015-08-20T16:35:42 | Python | UTF-8 | Python | false | false | 1,255 | py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
#
# Advent of Code, Day 06 - Part 1
# https://github.com/vesche
#
coords = {}
for x in range(1000):
for y in range(1000):
coords[(x,y)] = "off"
def lights(update, a, b, c, d):
for X in range(int(a), int(c)+1):
for Y in range(int(b), int(d)+1):
if update == "on":
coords[(X,Y)] = "on"
elif update == "off":
coords[(X,Y)] = "off"
else:
if coords[(X,Y)] == "on":
coords[(X,Y)] = "off"
else:
coords[(X,Y)] = "on"
def main():
with open("day06_input.txt", 'r') as f:
for i in f.read().splitlines():
i = i.split()
if i[0] == "turn":
update = i[1]
a, b = i[2].split(',')
c, d = i[4].split(',')
lights(update, a, b, c, d)
else:
update = i[0]
a, b = i[1].split(',')
c, d = i[3].split(',')
lights(update, a, b, c, d)
count_on = 0
for i in coords:
if coords[i] == "on":
count_on += 1
print count_on
if __name__ == "__main__":
main()
| [
"austinjackson892@gmail.com"
] | austinjackson892@gmail.com |
694e2929e713631f5ceeec1de242dc66c9e42600 | 7aa0c57e728786ea2dd3b04a2ec4d070746f3af6 | /catkin_ws/src/bno_python/src/pwm.py | 3009b69ac7eaa9e352b640e173f7be9b9619de38 | [] | no_license | umblauka/mmWave_mapping | 76feec09f577720437e62facc368446e4b5839d9 | 6d7c90d06669b0f7e3dfe821adac9a731c3c53e1 | refs/heads/master | 2020-05-25T04:49:58.121060 | 2019-05-20T12:31:25 | 2019-05-20T12:31:25 | 187,636,258 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,188 | py | #!usr/bin/env python3
import RPi.GPIO as IO
import time
from simple_pid import PID
import rospy
from sensor_msgs.msg import Imu
import asciichartpy as asc
import os
from pyquaternion import Quaternion
control_input = [0]
first = 1
direction = 1;
def callback(data):
if first == 1:
bearing = data.orientation.z
pid.setpoint = bearing
global first
first=0
clear()
u=data.orientation.z
#quat = Quaternion(data.orientation.w, data.orientation.x, data.orientation.y, data.orientation.z)
#unit_quat = quat.normalised
print("control input: " +str(u))
print("setpoint: " + str(pid.setpoint))
# u_norm = (1-u)/2
#control_input.append(u)
control = pid(u)
control2 = pid1(u)
#flip control signals if turning left is closer to the goal
right.ChangeDutyCycle(control)
left.ChangeDutyCycle(control)
# print(asc.plot(control_input))
print ("error: " + str(abs(u-pid.setpoint)))
# forward()
#calibrate()
if (abs(u - pid.setpoint) < 0.05 ):
forward()
#backwards()
#if direction == 1:
# forward()
# direction = 0
#if (direction == 0):
# backwards()
# direction = 1
# print(control_input)
# print(u)
print("right:" + str(control) + " left:" + str(control2))
def forward():
right.ChangeDutyCycle(6.5)
left.ChangeDutyCycle(8.5)
def backwards():
right.ChangeDutyCycle(8.5)
left.ChangeDutyCycle(6.5)
def turn_right():
# right.ChangeDutyCycle(right_stop)
left.ChangeDutyCycle(7.5)
def spin_in_place():
left.ChangeDutyCycle(7.5)
right.ChangeDutyCycle(7.5)
def calibrate():
left.ChangeDutyCycle(7.5)
right.ChangeDutyCycle(7.5)
clear = lambda: os.system('clear')
setpoint = 0.59
left_stop = 7.1
right_stop = 7.08
left_backwards_max = 6.5
right_backwards_max = 8.5
left_forwards_max = 8.5
right_forwards_max = 6.5
#pid = PID (4,1.5,0.3, setpoint)
pid = PID (10,1.5,2, setpoint)
pid1 = PID(10,1.5,2, setpoint)
pid.output_limits = (6.5, 8.5)
pid1.output_limits = (6.5,8.5)
IO.setwarnings(False)
IO.setmode(IO.BCM)
IO.setup(13,IO.OUT)
IO.setup(19,IO.OUT)
right=IO.PWM(19,50)
left=IO.PWM(13,50)
right.start(0)
left.start(0)
#forward()
rospy.init_node('base_movement')
sub = rospy.Subscriber('/imu0',Imu,callback)
rospy.spin()
#while 1:
# calibrate()
| [
"11karl.laanemets@gmail.com"
] | 11karl.laanemets@gmail.com |
d2d53550d8562b31f2ef00de641a54b3c591e3fd | 5bb8b4c7faeebd16da16ecbcd4a98aabaf688e8f | /data_tools/walker/src-cikm/build_graph/preprocess_venue_word.py | 3d7fc4f53a23214e5cb8bba6ec763cd94551ca7c | [] | no_license | xiaoqinzhe/vrdetection | 014fc2b61c9b30dd2699fdba41089b18b7f060be | 604a812a21a98d72ba8e23a716eb72153bdaa7c4 | refs/heads/master | 2023-07-04T07:44:12.141404 | 2021-08-01T06:21:17 | 2021-08-01T06:21:17 | 150,063,473 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 942 | py | #coding:utf-8
file_name = '../dataset/paper_title_venue.txt'
venues = set()
word_df = {}
with open(file_name) as file:
for line in file:
paper_id, title, venue = line.strip().split()
words = title.split('-')
for word in words:
if word not in word_df:
word_df[word] = set()
word_df[word].add(venue)
venues.add(venue)
venues.remove('none')
for word, venue in word_df.items():
if 'none' in venue:
venue.remove('none')
venues = list(venues)
venues.sort()
with open('../dataset/venues.txt', 'w') as file:
for venue in venues:
file.write('{}\n'.format(venue))
words = list(word_df.keys())
words.sort()
with open('../dataset/word_df.txt', 'w') as file:
for word in words:
if len(word)==1 or len(word_df[word])<3:
continue
df = len(word_df[word])/len(venues)
file.write('{} {:.4f}\n'.format(word, df))
| [
"xiaoqinzhe@qq.com"
] | xiaoqinzhe@qq.com |
3369509fe59e3139cd5150e48548b19e8ef544dd | 782778e2093eb57324b4b0072f7f0f8d298de05a | /python/queue_with_2stacks.py | 8de8f5b026b456cf51f341684c3e852311953fed | [] | no_license | zyuma/algo | 249d4f6d0169cae9e950e69843a126cc7341dc06 | f6dbbad4b3873f08bc5a92cb3d7813b1e544b8fd | refs/heads/master | 2016-09-06T11:37:31.536799 | 2014-04-09T08:45:06 | 2014-04-09T08:45:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,193 | py | class Node:
def __init__(self, x):
self.val = x
self.next = None
class Stack:
def __init__(self):
self.top = None
def push(self, n):
n.next = self.top
self.top = n
def pop(self):
if(self.top == None):
return None
temp = self.top
self.top = self.top.next
return temp
def print_stack(self):
cursor = self.top
while cursor != None:
print cursor.val
cursor = cursor.next
"""
s = Stack()
s.push(Node(1))
s.push(Node(2))
s.push(Node(3))
s.print_stack()
s.pop()
s.print_stack()
"""
class queue_with_2stacks:
def __init__(self):
self.s1 = Stack()
self.s2 = Stack()
def enqueue(self, n):
self.s1.push(n)
if(self.s2.top == None):
while(self.s1.top != None):
self.s2.push(self.s1.pop())
def dequeue(self):
temp = self.s2.pop()
if(self.s2.top == None):
while(self.s1.top != None):
self.s2.push(self.s1.pop())
return temp
def status(self):
print "s1:"
self.s1.print_stack()
print "s2:"
self.s2.print_stack()
q = queue_with_2stacks()
n1 = Node(1)
n2 = Node(2)
n3 = Node(3)
q.enqueue(n3)
q.status()
q.enqueue(n2)
q.status()
q.enqueue(n1)
q.status()
q.dequeue()
q.status()
q.dequeue()
q.status()
q.enqueue(Node(5))
q.status()
| [
"yuma.tsuboi@gmail.com"
] | yuma.tsuboi@gmail.com |
40c2c6e92aa15b987b3c9d8cf88fb313c1bd848e | e299b7f08640f59f7322265be6626df88629e90e | /reorder_frames.py | 01f27d43da46b4210560f8a60fcab7a0defb48ff | [] | no_license | achntrl/reorder-frames | 3e7f1328222c2b3175e61a88d1f930c3bb1e57f3 | 9a62200d1d5db1b6f208fb68ec2a5f91fe7e1852 | refs/heads/master | 2021-09-02T03:35:51.547903 | 2017-12-30T00:38:06 | 2017-12-30T00:48:41 | 115,757,625 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,543 | py | import numpy as np
import json
from shutil import copy2
import os
from math import isnan, sqrt
from utils import full_size_image_path, new_image_path
def reorder_frames():
""" Cleans frames that shouldn't be include in the video and
reorder the frames. """
with open('matching_data.json', 'r') as f:
data = json.load(f)
N = len(os.listdir('full_size_frames/'))
# Filter out invalid frames:
# Invalid frames have a low number of feature matching other images which
# make the their score go high
to_plot = np.zeros([N, N])
for d in data:
to_plot[d['frames'][0], d['frames'][1]] = compute_score(d)
size = len(to_plot)
invalid_frames = []
for i in range(size):
if np.median(np.concatenate((to_plot[:i, i], to_plot[i, (i+1):]))) >= 1000:
invalid_frames.append(i)
print("Invalid frames:", invalid_frames)
clean_data = list(filter(lambda d: d['frames'][0] not in invalid_frames
and d['frames'][1] not in invalid_frames, data))
data = clean_data
for d in data[:]:
data.append(generate_data_for_transpose(d))
# Attempt 2
img_orders = []
median_weights = []
for i in range(N):
if i in invalid_frames:
continue
frames = list(filter(lambda x: x['frames'][0] == i, data)) + [{
"frames": [i, i],
"mean": 0,
"median": 0,
"matches": 0,
"fraction": 0,
"x": 0,
"y": 0,
}]
sorted_frames = sorted(frames, key=lambda x: np.sign(x['x']) * distance(x))
img_order = [x['frames'][1] for x in sorted_frames]
median_weight = [1/compute_score(x) if compute_score(x) != 0 else 0 for x in sorted_frames]
img_orders.append(img_order)
median_weights.append(median_weight)
np_orders = np.array(img_orders)
np_orders
np_weights = np.array(median_weights)
img_order = []
for i in range(len(np_orders)):
img_order += [most_common_weighted(list(np_orders[:, i]), list(np_weights[:, i]))]
# img_order += [most_common(list(np_orders[:, i]))]
print("# of misplaced images:", compare_results(img_order))
for i, n in enumerate(img_order):
copy2(full_size_image_path(n), new_image_path(i))
####################
# Helper functions #
####################
def compute_score(d):
""" Compute a score for a given pair of frame (the lower the
closer the frames). """
median = d['median'] if (not isnan(d['median']) and d['median'] != 0) else 1000
return 1 / ((d['fraction'] + 0.0001) * (1 + d['matches'])) * median
def generate_data_for_transpose(d):
""" Take a frame (i, j) and returns an entry for frame (j, i). """
new_d = dict(d)
new_d['frames'] = d['frames'][::-1]
new_d['x'] = - d['x']
new_d['y'] = - d['y']
return new_d
def compare_results(img_order):
""" Compute the number of frame in an incorrect position. The
correct sequence have been generated manually after a good enough
sequence was automatically generated. """
good_sequence = [
30, 1, 79, 23, 93, 66, 4, 51, 52, 54, 20, 72, 49, 59, 22, 32, 106, 64, 9, 104, 15, 18, 61,
35, 108, 62, 3, 68, 109, 92, 41, 14, 48, 65, 97, 84, 81, 8, 53, 50, 94, 26, 11, 95, 55, 91,
6, 110, 102, 98, 99, 34, 47, 74, 111, 40, 57, 85, 58, 16, 28, 86, 0, 25, 80, 21, 38, 19, 29,
113, 24, 107, 60, 43, 27, 75, 63, 39, 71, 70, 105, 82, 89, 37, 33, 112, 31, 7, 87, 2, 13,
77, 45, 103, 12, 44, 56, 5, 67, 96
]
count = 0
for i, order in enumerate(img_order):
if good_sequence[i] != order:
count += 1
print("frame", order, "is at position", i, "and should be at position", good_sequence.index(order))
return count
def distance(data):
""" Compute euclidian distance. """
return sqrt(data['x'] ** 2 + data['y'] ** 2)
def most_common(arr):
""" Return the most frequent occurence in an array. """
return max(set(arr), key=arr.count)
def most_common_weighted(arr, weights):
""" Returns the most frequent occurence in array. The frequency
is weighted by the array of weights. """
frequencies = {}
for i, e in enumerate(arr):
if e not in frequencies.keys():
frequencies[e] = weights[i]
else:
frequencies[e] += weights[i]
return max(frequencies, key=frequencies.get)
if __name__ == '__main__':
reorder_frames()
| [
"alexandre.chaintreuil@gmail.com"
] | alexandre.chaintreuil@gmail.com |
f16779f7b11a1e0e672840e1929dec5a51a2c82b | fb285569b73f6bd6724f276757efe8a6ab71d591 | /inference.py | 4c63acce228f70e1f182dbb575ed4692545a2184 | [
"BSD-3-Clause"
] | permissive | zrb250/tacotron2 | d0d6dc0b1ba3a1ec31a7bab42c965096bfba000f | 404f8a4a530776f215259b19aeb24a6e6f4c6da3 | refs/heads/master | 2020-09-10T02:25:48.601636 | 2019-12-05T06:53:00 | 2019-12-05T06:53:00 | 221,625,633 | 1 | 0 | BSD-3-Clause | 2019-11-14T06:15:03 | 2019-11-14T06:15:02 | null | UTF-8 | Python | false | false | 3,783 | py | import matplotlib
matplotlib.use('agg')
import matplotlib.pylab as plt
import sys
sys.path.append('waveglow/')
import numpy as np
import torch
from hparams import create_hparams
from scipy.io.wavfile import write
from train import load_model
from text import text_to_sequence
import os
# from denoiser import Denoiser
def plot_data(data, prename="0", figsize=(16, 4)):
fig, axes = plt.subplots(1, len(data), figsize=figsize)
for i in range(len(data)):
axes[i].imshow(data[i], aspect='auto', origin='bottom',
interpolation='none')
if not os.path.exists("img"):
os.mkdir("img")
plt.savefig(os.path.join("img", str(prename) + "_model_test.jpg"))
def get_WaveGlow():
waveglow_path = 'checkout'
print("load waveglow model !!")
waveglow_path = os.path.join(waveglow_path, "waveglow_256channels.pt")
wave_glow = torch.load(waveglow_path)['model']
wave_glow = wave_glow.remove_weightnorm(wave_glow)
wave_glow.cuda().eval()
for m in wave_glow.modules():
if 'Conv' in str(type(m)):
setattr(m, 'padding_mode', 'zeros')
return wave_glow
def get_Tacotron2(hparams):
checkpoint_path = "checkout"
checkpoint_path = os.path.join(checkpoint_path, "tacotron2_statedict.pt")
print("load tacotron2 model !!")
model = load_model(hparams)
model.load_state_dict(torch.load(checkpoint_path)['state_dict'])
_ = model.cuda().eval()
return model
def main():
hparams = create_hparams()
hparams.sampling_rate = 22050
model = get_Tacotron2(hparams);
waveglow = get_WaveGlow();
# text = "Waveglow is really awesome!"
texts = [
"PRIH1NTIH0NG , IH0N TH AO1NLIY0 SEH1NS WIH1TH HHWIH1CH W AA1R AE1T PRIY0ZEH1NT KAH0NSER1ND , DIH1FER0Z FRAH1M MOW2ST IH1F NAA1T FRAH1M AH0L TH AA1RTS AE1ND KRAE1FTS REH2PRIH0ZEH1NTIH0D IH0N TH EH2KSAH0BIH1SHAH0N",
"AE1ND DIH0TEY1LIH0NG PAH0LIY1S IH0N SAH0VIH1LYAH0N KLOW1DHZ TOW0 B SKAE1TER0D THRUW0AW1T TH SAY1ZAH0BAH0L KRAW1D .",
"AY1 LAH1V YUW1 VEH1RIY0 MAH1CH",
"SAY1AH0NTIH0STS AE1T TH SER1N LAE1BRAH0TAO2RIY0 SEY1 DHEY1 HHAE1V DIH0SKAH1VER0D AH0 NUW1 PAA1RTAH0KAH0L .",
"PREH1ZIH0DAH0NT TRAH1MP MEH1T WIH1TH AH1DHER0 LIY1DER0Z AE1T TH GRUW1P AH1V TWEH1NTIY0 KAA1NFER0AH0NS .",
"LEH1TS GOW1 AW2T TOW0 TH EH1RPAO2RT . TH PLEY1N LAE1NDAH0D TEH1N MIH1NAH0TS AH0GOW2 .",
"IH0N BIY1IH0NG KAH0MPEH1RAH0TIH0VLIY0 MAA1DER0N .",
"VIH1PKIH0D",
"VIH1P KIH0D"
]
if not os.path.exists("results"):
os.mkdir("results")
for text in texts:
sequence = np.array(text_to_sequence(text, ['english_cleaners']))[None, :]
sequence = torch.autograd.Variable(
torch.from_numpy(sequence)).cuda().long()
mel_outputs, mel_outputs_postnet, _, alignments = model.inference(sequence)
plot_data((mel_outputs.float().data.cpu().numpy()[0],
mel_outputs_postnet.float().data.cpu().numpy()[0],
alignments.float().data.cpu().numpy()[0].T), text[:10])
#print("mel_out:", mel_outputs)
#print("mel_out_postnet:", mel_outputs_postnet)
#print("alignments:", alignments)
with torch.no_grad():
audio = waveglow.infer(mel_outputs_postnet, sigma=0.666)
audio = audio * hparams.max_wav_value;
audio = audio.squeeze()
audio = audio.cpu().numpy()
audio = audio.astype('int16')
write("results/{}_synthesis.wav".format(text), hparams.sampling_rate, audio)
print("complete:",text)
# audio_denoised = denoiser(audio, strength=0.01)[:, 0]
# ipd.Audio(audio_denoised.cpu().numpy(), rate=hparams.sampling_rate)
if __name__ == "__main__":
main();
| [
"zhuribing@vipkid.com.cn"
] | zhuribing@vipkid.com.cn |
08378529a99ab4fa465e18b6e34e8649525886dd | 52f3a75f7c88df928d00e3c203b751a860bdf627 | /SimulationScripts/mlSimulator.py | d2179eaef3cc86c8269a9265acd643e6cd11695c | [] | no_license | meiwenPKU/ML-based-Flow-Table-Eviction | 03fe2680d4d12b3eeb02ce4a8f5a519c30649dea | 5027820986b52783a9e8087b5241a7e5e915537c | refs/heads/master | 2020-04-05T09:35:01.054156 | 2019-01-25T18:32:24 | 2019-01-25T18:32:24 | 156,764,434 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,534 | py | '''
This script is used to simulate the scenario where ml model is applied to pick which flow entry should be evicted from the table.
'''
import pandas as pd
import numpy as np
from sklearn.preprocessing import StandardScaler
from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier
from sklearn.externals import joblib
from datetime import datetime
from sklearn.metrics import recall_score
import os, sys, getopt
# the first element is for cross flows, and the second is for non-cross
def main(argv):
input_file = ''
try:
opts, args = getopt.getopt(argv,"hi:s:T:m:N:l:p:v:r:",["ifile=","statFile=","tableSize=","modelFile=","Nlast=","labelEncoder=","probThreshold=","interval=","timeRange"])
except getopt.GetoptError:
print 'test.py -i <inputfile> -s <statFile> -T <tableSize> -m <modelFile> -N <Nlast> -r <timeRange>'
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print 'test.py -i <inputfile> -s <statFile> -T <tableSize> -m <modelFile> -N <Nlast> -r <timeRange>'
sys.exit()
elif opt in ("-i", "--ifile"):
input_file = arg
elif opt in ("-s", "--statFile"):
stat_file = arg
elif opt in ("-T","--tableSize"):
tableSize = int(arg)
elif opt in ("-m", "--modelFile"):
modelfile = arg
elif opt in ("-N", "--Nlast"):
N_last = int(arg)
elif opt in ("-l", "--labelEncoder"):
labelEncoder = arg
elif opt in ("-p", "--probThreshold"):
pe = float(arg)
elif opt in ("-v", "--interval"):
interval = int(arg)
elif opt in ("-r", "--timeRange"):
timeRange = int(arg)
class flowEntry:
def __init__(self,numPkt,start,end):
self.numPkt = numPkt
self.start = start
self.duration = end - start
self.arrived = 0
class flowTableEntry:
def __init__(self,length,t_last_pkt,protocol):
self.t_last_pkt = t_last_pkt
self.v_interval = []
self.v_len = [length]
self.prob_end = 0
self.protocol = protocol
self.lastUpdate = 0
self.isActive = True
numPredictInactive = 2*[0]
numPredictActive = 2*[0]
numCorrectPredictActive = 2*[0]
numCorrectPredictInactive = 2*[0]
numCapMiss = 2*[0]
numActiveFlow = 2*[0]
#get the flow statistics from stat file
data_stat = pd.read_csv(stat_file)
data_stat['srcPort'] = data_stat['srcPort'].astype(int)
data_stat['dstPort'] = data_stat['dstPort'].astype(int)
data_stat['Start'] = data_stat['Start'].astype(float)
data_stat['End'] = data_stat['End'].astype(float)
v_flows = {}
trainedFlows = set()
for index, entry in data_stat.iterrows():
flowID = entry['srcAddr']+"-"+str(entry['srcPort'])+'-'+entry['dstAddr']+'-'+str(entry['dstPort'])+'-'+entry['Protocol']
v_flows[flowID] = flowEntry(entry['Packets'],entry['Start'], entry['End'])
if entry['Start'] < timeRange:
trainedFlows.add(flowID)
#load the scaler model and built RF model
rf = joblib.load(modelfile)
le = joblib.load(labelEncoder)
protocols = le.classes_
flowTable = {}
fullFlowTable = {}
def removeHPU(cur_time):
for key, entry in flowTable.iteritems():
if cur_time - entry.lastUpdate < interval and entry.lastUpdate != 0:
continue
# get the feature vector of the entry
sample = [cur_time - entry.t_last_pkt]
if len(entry.v_interval) != 0:
sample.append(np.mean(entry.v_interval))
sample.append(np.std(entry.v_interval))
else:
sample.append(0)
sample.append(0)
sample.append((le.transform([entry.protocol]))[0])
for i in range(0,N_last):
if i >= N_last - len(entry.v_len):
sample.append(entry.v_len[i-N_last+len(entry.v_len)])
else:
sample.append(-1)
# do the prediction
entry.prob_end = rf.predict_proba(np.array(sample).reshape(1,-1))[0,1]
# update the stats
index = int(key not in trainedFlows)
if entry.isActive:
numPredictActive[index] += 1
if entry.prob_end < 0.5:
numCorrectPredictActive[index] += 1
else:
numPredictInactive[index] += 1
if entry.prob_end > 0.5:
numCorrectPredictInactive[index] += 1
else:
print "negative false prediction: %s, %f" % (', '.join(map(str, sample)), entry.prob_end)
entry.lastUpdate = cur_time
if entry.prob_end > 0.9:
print "remove %r flow entry with id=%s, tLastVisit=%s, time=%s, confidence=%f" % (flowTable[key].isActive, key,entry.t_last_pkt, cur_time, entry.prob_end)
if flowTable[key].isActive:
numActiveFlow[index] -= 1
print flowTable[key].__dict__
del flowTable[key]
return
# get the flow entry with maximal prob_end
lru = flowTable.values()[0]
for key,x in flowTable.items():
if x.prob_end >= lru.prob_end:
lru = x
lru_key = key
if lru.prob_end < pe:
lru = flowTable.values()[0]
for key, x in flowTable.items():
if x.t_last_pkt <= lru.t_last_pkt:
lru = x
lru_key = key
print "remove %r flow entry with id=%s, tLastVisit=%s, time=%s, confidence=%f" % (flowTable[lru_key].isActive, lru_key,lru.t_last_pkt, cur_time, lru.prob_end)
index = int(lru_key not in trainedFlows)
if flowTable[lru_key].isActive:
numActiveFlow[index] -= 1
print flowTable[lru_key].__dict__
del flowTable[lru_key]
numMissHit = 0
# read the raw data from traces chunk by chunk
for chunk in pd.read_csv(input_file, usecols=['Time','Source','Destination','Protocol','Length','SrcPort','DesPort'], chunksize=1000000):
for index, entry in chunk.iterrows():
if entry['Time'] <= timeRange or (entry['Protocol'] != 'TCP' and entry['Protocol'] != 'UDP'):
continue
if type(entry['SrcPort']) is not str and type(entry['DesPort']) is not str and (np.isnan(entry['SrcPort']) or np.isnan(entry['DesPort'])):
continue
entry['SrcPort'] = str(int(entry['SrcPort']))
entry['DesPort'] = str(int(entry['DesPort']))
flowID = entry['Source']+"-"+entry['SrcPort']+'-'+entry['Destination']+'-'+entry['DesPort']+'-'+entry['Protocol']
v_flows[flowID].arrived += 1
index = int(flowID not in trainedFlows)
if flowID not in flowTable:
#this is a new flow
numActiveFlow[index] += 1
if len(flowTable) == tableSize:
removeHPU(entry['Time'])
flowTable[flowID] = flowTableEntry(entry['Length'],entry['Time'],entry['Protocol'])
numMissHit +=1
if flowID in fullFlowTable:
numCapMiss[index] += 1
fullFlowTable[flowID] += 1
else:
fullFlowTable[flowID] = 0
if numMissHit % 100 == 0:
print "TableSize=%d, numMissHit=%d, numCapMissCross=%d, numCapMissNonCross=%d, numActiveFlowCross=%d, numActiveFlowNonCross=%d, numActivePredictCross=%d, numActivePredictNonCross=%d, numInactivePredictCross=%d, numInactivePredictNonCross=%d, numActiveCorrectPredictCross=%d, numActiveCorrectPredictNonCross=%d, numInactiveCorrectPredictCross=%d, numInactiveCorrectPredictNonCross=%d, time=%f" % (len(flowTable),numMissHit,numCapMiss[0],numCapMiss[1],numActiveFlow[0], numActiveFlow[1], numPredictActive[0], numPredictActive[1], numPredictInactive[0], numPredictInactive[1], numCorrectPredictActive[0], numCorrectPredictActive[1], numCorrectPredictInactive[0], numCorrectPredictInactive[1], entry['Time'])
numPredictActive = 2*[0]
numPredictInactive = 2*[0]
numCorrectPredictActive = 2*[0]
numCorrectPredictInactive = 2*[0]
else:
# this is not a new flow
flowTable[flowID].v_interval.append(entry['Time']-flowTable[flowID].t_last_pkt)
if len(flowTable[flowID].v_interval) > N_last-1:
flowTable[flowID].v_interval = flowTable[flowID].v_interval[1:]
flowTable[flowID].t_last_pkt = entry['Time']
flowTable[flowID].lastUpdate = 0
flowTable[flowID].v_len.append(entry['Length'])
if len(flowTable[flowID].v_len) > N_last:
flowTable[flowID].v_len = flowTable[flowID].v_len[1:]
if flowTable[flowID].isActive:
if flowTable[flowID].t_last_pkt >= v_flows[flowID].start + v_flows[flowID].duration:
flowTable[flowID].isActive = False
numActiveFlow[index] -= 1
print "numMissHit=%d" % numMissHit
print "numCapMissCross=%d" % numCapMiss[0]
print "numCapMissNonCross=%d" % numCapMiss[1]
print fullFlowTable
if __name__ == "__main__":
main(sys.argv[1:])
| [
"yhmpku@live.cn"
] | yhmpku@live.cn |
6b1c0c3eb554c65e899485fe8633016f842b2573 | 72bd16274c66d5ad218b15a22e398ff48e1524d6 | /primary/C048.py | 5f448d3bda472787cdd69fc01acc9db9c577f06b | [] | no_license | DearZack/LearnPython | baaa5468c7e8e96665d1c777581ef81635730ca1 | 8baf170b75d9186384f4404cd001e50fc0b70ff2 | refs/heads/master | 2021-01-16T19:37:34.974170 | 2018-01-14T13:24:36 | 2018-01-14T13:24:36 | 100,184,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4 | py | #略 | [
"zack4work@gmail.com"
] | zack4work@gmail.com |
5a0f58aac33d8bad2c16cd0bc92a93704417daad | 4cdc9ba739f90f6ac4bcd6f916ba194ada77d68c | /剑指offer/第五遍/32-2.分行从上到下打印二叉树.py | cac8702eac2082f33f6071a4d95e0ccd60552e50 | [] | no_license | leilalu/algorithm | bee68690daf836cc5807c3112c2c9e6f63bc0a76 | 746d77e9bfbcb3877fefae9a915004b3bfbcc612 | refs/heads/master | 2020-09-30T15:56:28.224945 | 2020-05-30T03:28:39 | 2020-05-30T03:28:39 | 227,313,730 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,022 | py | """
从上到下按层打印二叉树,同一层的节点按从左到右的顺序打印,每一层打印到一行。
例如:
给定二叉树: [3,9,20,null,null,15,7],
3
/ \
9 20
/ \
15 7
返回其层次遍历结果:
[
[3],
[9,20],
[15,7]
]
"""
class Solution:
def levelOrder(self, root):
# 首先判断输入为空的情况
if not root:
return []
res = []
queue = [root]
thisLevel = 1
nextLevel = 0
level = []
while queue:
node = queue.pop(0)
level.append(node.val)
thisLevel -= 1
if node.left:
queue.append(node.left)
nextLevel += 1
if node.right:
queue.append(node.right)
nextLevel += 1
if thisLevel == 0:
res.append(level)
level = []
thisLevel = nextLevel
nextLevel = 0
return res
| [
"244492644@qq.com"
] | 244492644@qq.com |
059d63260d55c64602ebf59de0199850c1cd57bb | 56362ab3daa28f80c6732d392464dd48c7373b7a | /segment_config_api/models/workspaces.py | 29e9190ab11ed87d2eab750693332a44933bd512 | [
"MIT"
] | permissive | isabella232/segment-config-api | 749a34bfb6ed4cacb176b95bf8cb4f5a92ca0412 | 52b6fa41546f593d0ae08b8616bb54f04a84b29a | refs/heads/master | 2023-08-27T18:05:33.231026 | 2021-11-02T16:47:56 | 2021-11-02T16:47:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,908 | py | from segment_config_api.models.base_model import BaseModel
from segment_config_api.models.sources import SourcesModel
from segment_config_api.models.tracking_plans import TrackingPlansModel
from segment_config_api.models.deletion_and_suppression import RegulationsModel, \
SuppressedUsersModel
from segment_config_api.models.iam import RolesModel, InvitesModel
from segment_config_api.models.event_delivery_metrics import WorkSpaceEventDeliveryMetricsModel
class WorkspacesModel(BaseModel):
def __init__(self, api):
super().__init__(api, f'workspaces')
def workspace(self, name):
return WorkspaceModel(self, name)
def list(self):
return self.send_request('GET')
class WorkspaceModel(BaseModel):
def __init__(self, workspaces, name):
super().__init__(workspaces.api, f'{workspaces.model_path}/{name}')
@property
def sources(self):
return SourcesModel(self)
def source(self, name):
return self.sources.source(name)
@property
def tracking_plans(self):
return TrackingPlansModel(self)
def tracking_plan(self, plan_id):
return self.tracking_plans.tracking_plan(plan_id)
@property
def regulations(self):
return RegulationsModel(self)
def regulation(self, regulation_id):
return self.regulations().regulation(regulation_id)
@property
def roles(self):
return RolesModel(self)
@property
def invites(self):
return InvitesModel(self)
def invite(self, invite_id):
self.invites.invite(invite_id)
def batch_get_summary_metrics(self, source_destination_pairs):
return WorkSpaceEventDeliveryMetricsModel(self) \
.batch_get_summary(source_destination_pairs)
@property
def suppressed_users(self):
return SuppressedUsersModel(self)
def get(self):
return self.send_request('GET') | [
"hi@michaelerasm.us"
] | hi@michaelerasm.us |
0d7b99ef5458910e5dd77581ccf277ae57f85510 | 64e3864b2b422ca4b24519e278d72e2801b2b892 | /WSTradeEngine/btdemo.py | 737682ab5f549b15bdb5908b15efaed15f9a4907 | [] | no_license | jpgithub/jprepost | fec6863ac9aa79ce94e66f5d2a9760e4ecab4acd | 54aa322bacaf9f278cf38f80073b8f596aa417a4 | refs/heads/master | 2023-06-09T07:53:28.521629 | 2022-04-23T23:41:30 | 2022-04-23T23:41:30 | 6,928,161 | 0 | 0 | null | 2023-05-31T18:08:14 | 2012-11-29T20:39:54 | Python | UTF-8 | Python | false | false | 537 | py | from backtesting import Backtest, Strategy
from backtesting.lib import crossover
from backtesting.test import SMA, GOOG
class SmaCross(Strategy):
def init(self):
price = self.data.Close
self.ma1 = self.I(SMA, price, 10)
self.ma2 = self.I(SMA, price, 20)
def next(self):
if crossover(self.ma1, self.ma2):
self.buy()
elif crossover(self.ma2, self.ma1):
self.sell()
bt = Backtest(GOOG, SmaCross, commission=.002, exclusive_orders=True)
stats = bt.run()
bt.plot() | [
"jqpan00@gmail.com"
] | jqpan00@gmail.com |
19d921d73586dd898f2432195f2c2e8cdc1662c8 | a9003f17bc0ac4c4bc9347c6a1a646b3cf45298b | /LearningLog/fibo.py | 0386b62642776285ec0834346647b7d97d82b841 | [] | no_license | peterluo/LearningPythonDiary | 7ab5ec0988fa1fb5987022247394ce0124e93600 | cbdd976b88e15b5c45c0f667a2181442e7be5acc | refs/heads/master | 2021-01-10T05:33:25.525685 | 2018-05-15T13:52:39 | 2018-05-15T13:52:39 | 8,559,040 | 22 | 29 | null | null | null | null | UTF-8 | Python | false | false | 357 | py | #! /usr/bin/env python
#coding=utf-8
#Fibonacci numbers module
def fib(n):
a, b=0, 1
while b< n:
print b,
a, b=b, a+b
def fib2(n):
result = []
a, b=0, 1
while b< n:
result.append(b)
a, b= b, a+b
return result
if __name__ == '__main__':
import sys
fib(int(sys.argv[1]))
| [
"peterluo2010@gmail.com"
] | peterluo2010@gmail.com |
ac3f0b3550b4fd6ceba4b487c04ea89b68b66f38 | 9f41fb8612193d546715801e958137cf821e4979 | /generate_data_array.py | 2a86aefb098d2e3aba0e0b08a3431343f1011c83 | [] | no_license | wsimkins/NS-F-W | aef8cd5d1168b83be6adf5544ba30e13bf2ae044 | 62eb8073a86db4ee1396b0e50cc5b245e2bca1a8 | refs/heads/master | 2021-05-01T05:19:10.418276 | 2017-03-15T19:44:17 | 2017-03-15T19:44:17 | 79,740,185 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,815 | py | #CS122: Group Project - Chess Heatmaps
# for generating heatmaps
#Names: Will Simkins, Natalie Gray, Steven Cooklev
import numpy as np
import re
LETTER_TO_NUM = {"a":1, "b":2, "c":3, "d":4, "e":5, "f":6, "g":7, "h":8}
PIECE_TO_LETTER = {"rook":"R", "knight":"N", "bishop":"B", "queen":"Q", "king":"K", "pawn":"P"}
KNIGHT_DIFFS = [(1, 2), (1, -2), (-1, 2), (-1, -2), (2, 1), (2, -1), (-2, 1), (-2, -1)]
def generate_moved_to_data(move_list, color, piece):
heatmap_data = np.zeros((8,8))
kingside = 0
for move_tup in move_list:
move = move_tup[0]
destination_tuples = []
destination = re.search("[a-h][1-8]", move)
if destination:
destination = destination.group()
destination_tuples = [(LETTER_TO_NUM[destination[0]], int(destination[1]))]
if destination_tuple[1] >= 5:
kingside += 1
elif move == "0-0":
kingside += 1
if piece == "rook":
if color == "white":
destination_tuples = [(6, 1)]
else:
destination_tuples = [(6, 8)]
if piece == "king":
if color == "white":
destination_tuples = [(7, 1)]
else:
destination_tuples = [(7, 8)]
if piece == "all":
if color == "white":
destination_tuples = [(6, 1), (7, 1)]
else:
destination_tuples = [(6, 8), (7, 8)]
elif move == "0-0-0":
if piece == "rook":
if color == "white":
destination_tuples = [(4, 1)]
else:
destination_tuples = [(4, 8)]
if piece == "king":
if color == "white":
destination_tuples = [(3, 1)]
else:
destination_tuples = [(3, 8)]
if piece == "all":
if color == "white":
destination_tuples = [(3, 1), (4, 1)]
else:
destination_tuples = [(3, 8), (4, 8)]
for destination_tuple in destination_tuples:
heatmap_data[tuple(np.subtract(destination_tuple, (1, 1)))] += 1
return np.rot90(heatmap_data).astype("int"), kingside
def generate_time_spent_data(white_move_list, black_move_list):
STARTING_SQUARES = {"white":{"rook":[(1, 1), (8, 1)], "knight":[(2, 1), (7, 1)],
"bishop":[(3, 1), (6, 1)], "queen":[(4, 1)], "king":[(5, 1)],
"pawn":[(1, 2), (2, 2), (3, 2), (4, 2), (5, 2), (6, 2), (7, 2), (8, 2)],
"all":[(1, 1), (2, 1), (3, 1), (4, 1), (5, 1), (6, 1), (7, 1), (8, 1),
(1, 2), (2, 2), (3, 2), (4, 2), (5, 2), (6, 2), (7, 2), (8, 2)]},
"black":{"rook":[(1, 8), (8, 8)], "knight":[(2, 8), (7, 8)],
"bishop":[(3, 8), (6, 8)], "queen":[(4,8)], "king":[(5, 8)],
"pawn":[(1, 7), (2, 7), (3, 7), (4, 7), (5, 7), (6, 7), (7, 7), (8, 7)],
"all":[(1, 7), (2, 7), (3, 7), (4, 7), (5, 7), (6, 7), (7, 7), (8, 7),
(1, 8), (2, 8), (3, 8), (4, 8), (5, 8), (6, 8), (7, 8), (8, 8)]}}
white_data = {"rook":np.zeros((8,8)), "knight":np.zeros((8,8)), "bishop": np.zeros((8,8)), "queen":np.zeros((8,8)), "king":np.zeros((8,8)), "pawn":np.zeros((8,8))}
black_data = {"rook":np.zeros((8,8)), "knight":np.zeros((8,8)), "bishop": np.zeros((8,8)), "queen":np.zeros((8,8)), "king":np.zeros((8,8)), "pawn":np.zeros((8,8))}
cur_board = np.array([["BR", "BN", "BB", "BQ", "BK", "BB", "BN", "BR"],
["BP", "BP", "BP", "BP", "BP", "BP", "BP", "BP"],
["e ", "e ", "e ", "e ", "e ", "e ", "e ", "e "],
["e ", "e ", "e ", "e ", "e ", "e ", "e ", "e "],
["e ", "e ", "e ", "e ", "e ", "e ", "e ", "e "],
["e ", "e ", "e ", "e ", "e ", "e ", "e ", "e "],
["WP", "WP", "WP", "WP", "WP", "WP", "WP", "WP"],
["WR", "WN", "WB", "WQ", "WK", "WB", "WN", "WR"]])
cur_locs = STARTING_SQUARES
for piece in white_data.keys():
white_ss = STARTING_SQUARES["white"][piece]
black_ss = STARTING_SQUARES["black"][piece]
for i in range(len(white_ss)):
white_data[piece][8 - white_ss[i][1]][white_ss[i][0] - 1] = 1
black_data[piece][8 - black_ss[i][1]][black_ss[i][0] - 1] = 1
white_aggression = 0
black_aggression = 0
print(white_move_list)
print(black_move_list)
for move_num in range(len(white_move_list)):
en_passant = False
white_move = white_move_list[move_num][0]
black_move = None
if move_num < len(black_move_list):
black_move = black_move_list[move_num][0]
if white_move[0] == "K":
destination = re.search("[a-h][1-8]", white_move).group()
destination_tuple = (LETTER_TO_NUM[destination[0]], int(destination[1]))
prev_loc = cur_locs["white"]["king"][0]
cur_locs["white"]["king"][0] = destination_tuple
cur_board[convert_tup(prev_loc)] = "e "
cur_board[convert_tup(destination_tuple)] = "WK"
elif white_move[0] == "Q":
destination = re.search("[a-h][1-8]", white_move).group()
destination_tuple = (LETTER_TO_NUM[destination[0]], int(destination[1]))
prev_loc = cur_locs["white"]["queen"][0]
cur_locs["white"]["queen"][0] = destination_tuple
cur_board[convert_tup(prev_loc)] = "e "
cur_board[convert_tup(destination_tuple)] = "WQ"
elif white_move[0] == "B":
destination = re.search("[a-h][1-8]", white_move).group()
destination_tuple = (LETTER_TO_NUM[destination[0]], int(destination[1]))
bishop_locs = cur_locs["white"]["bishop"]
if len(bishop_locs) == 1:
prev_loc = bishop_locs[0]
cur_locs["white"]["bishop"] = []
else:
for i in range(len(bishop_locs)):
loc = bishop_locs[i]
diff = tuple(np.subtract(destination_tuple, loc))
if diff[1] != 0:
div = diff[0] / diff[1]
if div == 1 or div == -1:
prev_loc = loc
del bishop_locs[i]
break
cur_locs["white"]["bishop"].append(destination_tuple)
cur_board[convert_tup(prev_loc)] = "e "
cur_board[convert_tup(destination_tuple)] = "WB"
elif white_move[0] == "N":
destination = re.search("[a-h][1-8]", white_move).group()
destination_tuple = (LETTER_TO_NUM[destination[0]], int(destination[1]))
knight_locs = cur_locs["white"]["knight"]
disambig_let = re.search("[a-h]{2}", white_move)
disambig_num = re.search("[1-8][a-h]", white_move)
if len(knight_locs) == 1:
prev_loc = knight_locs[0]
cur_locs["white"]["knight"] = []
elif disambig_let:
letter = disambig_let.group()[0]
file_num = LETTER_TO_NUM[letter]
for i in range(len(knight_locs)):
loc = knight_locs[i]
if loc[0] == file_num:
prev_loc = loc
del knight_locs[i]
break
elif disambig_num:
rank_num = int(LETTER_TO_NUM[disambig_num.group()[1]])
for i in range(len(knight_locs)):
loc = knight_locs[i]
if loc[1] == rank_num:
prev_loc = loc
del knight_locs[i]
break
else:
for i in range(len(knight_locs)):
loc = knight_locs[i]
diff = tuple(np.subtract(destination_tuple, loc))
if diff in KNIGHT_DIFFS:
prev_loc = loc
del knight_locs[i]
break
cur_locs["white"]["knight"].append(destination_tuple)
cur_board[convert_tup(prev_loc)] = "e "
cur_board[convert_tup(destination_tuple)] = "WN"
elif white_move[0] == "R":
destination = re.search("[a-h][1-8]", white_move).group()
destination_tuple = (LETTER_TO_NUM[destination[0]], int(destination[1]))
rook_locs = cur_locs["white"]["rook"]
disambig_let = re.search("[a-h]{2}", white_move)
disambig_num = re.search("[1-8][a-h]", white_move)
if len(rook_locs) == 1:
prev_loc = rook_locs[0]
cur_locs["white"]["rook"] = []
elif disambig_let:
letter = disambig_let.group()[0]
file_num = LETTER_TO_NUM[letter]
for i in range(len(rook_locs)):
loc = rook_locs[i]
if loc[0] == file_num:
prev_loc = loc
del rook_locs[i]
break
elif disambig_num:
rank_num = int(LETTER_TO_NUM[disambig_num.group()[1]])
for i in range(len(rook_locs)):
loc = rook_locs[i]
if loc[1] == rank_num:
prev_loc = loc
del rook_locs[i]
break
else:
for i in range(len(rook_locs)):
loc = rook_locs[i]
if loc[0] == destination_tuple[0]:
blocked = False
for j in range(min(loc[1], destination_tuple[1]) + 1, max(loc[1], destination_tuple[1])):
if cur_board[8 - loc[0]][j - 1] != "e ":
blocked = True
break
if not blocked:
prev_loc = loc
del rook_locs[i]
break
if loc[1] == destination_tuple[1]:
blocked = False
for j in range(min(loc[0], destination_tuple[0]), max(loc[0], destination_tuple[0])):
if cur_board[8 - loc[1]][j - 1] != "e ":
blocked = True
break
if not blocked:
prev_loc = loc
del rook_locs[i]
break
cur_locs["white"]["rook"].append(destination_tuple)
cur_board[convert_tup(prev_loc)] = "e "
cur_board[convert_tup(destination_tuple)] = "WR"
elif white_move[0].islower():
destination = re.search("[a-h][1-8]", white_move).group()
destination_tuple = (LETTER_TO_NUM[destination[0]], int(destination[1]))
pawn_locs = cur_locs["white"]["pawn"]
if "x" not in white_move:
for i in range(len(pawn_locs)):
loc = pawn_locs[i]
if loc[0] == destination_tuple[0] and loc[1] == destination_tuple[1] - 1:
prev_loc = loc
del pawn_locs[i]
break
for i in range(len(pawn_locs)):
loc = pawn_locs[i]
if loc[0] == destination_tuple[0] and loc[1] == 2 and destination_tuple[1] == 4:
prev_loc = loc
del pawn_locs[i]
break
else:
letter = white_move[0]
file_num = LETTER_TO_NUM[letter]
for i in range(len(pawn_locs)):
loc = pawn_locs[i]
if loc[0] == file_num and loc[1] == destination_tuple[1] - 1:
prev_loc = loc
del pawn_locs[i]
break
if cur_board[convert_tup(destination_tuple)] == "e ":
en_passant = True
captured_square = convert_tup((destination_tuple[0], destination_tuple[1] - 1))
cur_board[captured_square] = "e "
cur_locs["black"]["pawn"].remove((destination_tuple[0], destination_tuple[1] - 1))
if destination_tuple[1] != 8:
cur_locs["white"]["pawn"].append(destination_tuple)
cur_board[convert_tup(prev_loc)] = "e "
cur_board[convert_tup(destination_tuple)] = "WP"
else:
cur_board[convert_tup(prev_loc)] = "e "
if "Q" in white_move:
cur_locs["white"]["queen"].append(destination_tuple)
cur_board[convert_tup(destination_tuple)] = "WQ"
elif "R" in white_move:
cur_locs["white"]["rook"].append(destination_tuple)
cur_board[convert_tup(destination_tuple)] = "WR"
elif "B" in white_move:
cur_locs["white"]["bishop"].append(destination_tuple)
cur_board[convert_tup(destination_tuple)] = "WB"
elif "N" in white_move:
cur_locs["white"]["knight"].append(destination_tuple)
cur_board[convert_tup(destination_tuple)] = "WN"
if white_move == "0-0":
prev_loc = (0,0)
destination_tuple = (0,0)
cur_locs["white"]["rook"].remove((8, 1))
cur_locs["white"]["rook"].append((6, 1))
cur_locs["white"]["king"] = [(7,1)]
cur_board[7][7] = "e "
cur_board[7][4] = "e "
cur_board[7][6] = "WK"
cur_board[7][5] = "WR"
if white_move == "0-0-0":
prev_loc = (0,0)
destination_tuple = (0,0)
cur_locs["white"]["rook"].remove((1, 1))
cur_locs["white"]["rook"].append((4, 1))
cur_locs["white"]["king"] = [(3,1)]
cur_board[7][0] = "e "
cur_board[7][4] = "e "
cur_board[7][2] = "WK"
cur_board[7][3] = "WR"
if "x" in white_move and not en_passant:
for piece in cur_locs["black"].keys():
for i in range(len(cur_locs["black"][piece])):
loc = cur_locs["black"][piece][i]
if loc == destination_tuple:
del cur_locs["black"][piece][i]
break
if determine_aggression(prev_loc, destination_tuple, "white"):
white_aggression += 1
en_passant = False
if black_move:
if black_move[0] == "K":
destination = re.search("[a-h][1-8]", black_move).group()
destination_tuple = (LETTER_TO_NUM[destination[0]], int(destination[1]))
prev_loc = cur_locs["black"]["king"][0]
cur_locs["black"]["king"][0] = destination_tuple
cur_board[convert_tup(prev_loc)] = "e "
cur_board[convert_tup(destination_tuple)] = "BK"
elif black_move[0] == "Q":
destination = re.search("[a-h][1-8]", black_move).group()
destination_tuple = (LETTER_TO_NUM[destination[0]], int(destination[1]))
prev_loc = cur_locs["black"]["queen"][0]
cur_locs["black"]["queen"][0] = destination_tuple
cur_board[convert_tup(prev_loc)] = "e "
cur_board[convert_tup(destination_tuple)] = "BQ"
elif black_move[0] == "B":
destination = re.search("[a-h][1-8]", black_move).group()
destination_tuple = (LETTER_TO_NUM[destination[0]], int(destination[1]))
bishop_locs = cur_locs["black"]["bishop"]
if len(bishop_locs) == 1:
prev_loc = bishop_locs[0]
cur_locs["black"]["bishop"] = []
else:
for i in range(len(bishop_locs)):
loc = bishop_locs[i]
diff = tuple(np.subtract(destination_tuple, loc))
div = diff[0] / diff[1]
if diff[1] != 0:
if div == 1 or div == -1:
prev_loc = loc
del bishop_locs[i]
break
cur_locs["black"]["bishop"].append(destination_tuple)
cur_board[convert_tup(prev_loc)] = "e "
cur_board[convert_tup(destination_tuple)] = "BB"
elif black_move[0] == "N":
destination = re.search("[a-h][1-8]", black_move).group()
destination_tuple = (LETTER_TO_NUM[destination[0]], int(destination[1]))
knight_locs = cur_locs["black"]["knight"]
disambig_let = re.search("[a-h]{2}", black_move)
disambig_num = re.search("[1-8][a-h]", black_move)
if len(knight_locs) == 1:
prev_loc = knight_locs[0]
cur_locs["black"]["knight"] = []
elif disambig_let:
letter = disambig_let.group()[0]
file_num = LETTER_TO_NUM[letter]
for i in range(len(knight_locs)):
loc = knight_locs[i]
if loc[0] == file_num:
prev_loc = loc
del knight_locs[i]
break
elif disambig_num:
rank_num = int(LETTER_TO_NUM[disambig_num.group()[1]])
for i in range(len(knight_locs)):
loc = knight_locs[i]
if loc[1] == rank_num:
prev_loc = loc
del knight_locs[i]
break
else:
for i in range(len(knight_locs)):
loc = knight_locs[i]
diff = tuple(np.subtract(destination_tuple, loc))
if diff in KNIGHT_DIFFS:
prev_loc = loc
del knight_locs[i]
break
cur_locs["black"]["knight"].append(destination_tuple)
cur_board[convert_tup(prev_loc)] = "e "
cur_board[convert_tup(destination_tuple)] = "BN"
elif black_move[0] == "R":
destination = re.search("[a-h][1-8]", black_move).group()
destination_tuple = (LETTER_TO_NUM[destination[0]], int(destination[1]))
rook_locs = cur_locs["black"]["rook"]
disambig_let = re.search("[a-h]{2}", black_move)
disambig_num = re.search("[1-8][a-h]", black_move)
if len(rook_locs) == 1:
prev_loc = rook_locs[0]
cur_locs["black"]["rook"] = []
elif disambig_let:
letter = disambig_let.group()[0]
file_num = LETTER_TO_NUM[letter]
for i in range(len(rook_locs)):
loc = rook_locs[i]
if loc[0] == file_num:
prev_loc = loc
del rook_locs[i]
break
elif disambig_num:
rank_num = int(LETTER_TO_NUM[disambig_num.group()[1]])
for i in range(len(rook_locs)):
loc = rook_locs[i]
if loc[1] == rank_num:
prev_loc = loc
del rook_locs[i]
break
else:
for i in range(len(rook_locs)):
loc = rook_locs[i]
if loc[0] == destination_tuple[0]:
blocked = False
for j in range(min(loc[1], destination_tuple[1]), max(loc[1], destination_tuple[1])):
if cur_board[8 - loc[0]][j - 1] != "e ":
blocked = True
break
if not blocked:
prev_loc = loc
del rook_locs[i]
break
elif loc[1] == destination_tuple[1]:
blocked = False
for j in range(min(loc[0], destination_tuple[0]), max(loc[0], destination_tuple[0])):
if cur_board[8 - loc[1]][j - 1] != "e ":
blocked = True
break
if not blocked:
prev_loc = loc
del rook_locs[i]
break
cur_locs["black"]["rook"].append(destination_tuple)
cur_board[convert_tup(prev_loc)] = "e "
cur_board[convert_tup(destination_tuple)] = "BR"
elif black_move[0].islower():
destination = re.search("[a-h][1-8]", black_move).group()
destination_tuple = (LETTER_TO_NUM[destination[0]], int(destination[1]))
pawn_locs = cur_locs["black"]["pawn"]
if "x" not in black_move:
for i in range(len(pawn_locs)):
loc = pawn_locs[i]
if loc[0] == destination_tuple[0] and loc[1] == destination_tuple[1] + 1:
prev_loc = loc
del pawn_locs[i]
break
for i in range(len(pawn_locs)):
loc = pawn_locs[i]
if loc[0] == destination_tuple[0] and loc[1] == 7 and destination_tuple[1] == 5:
prev_loc = loc
del pawn_locs[i]
break
else:
letter = black_move[0]
file_num = LETTER_TO_NUM[letter]
for i in range(len(pawn_locs)):
loc = pawn_locs[i]
if loc[0] == file_num and loc[1] == destination_tuple[1] + 1:
prev_loc = loc
del pawn_locs[i]
break
if cur_board[convert_tup(destination_tuple)] == "e ":
en_passant = True
captured_square = convert_tup((destination_tuple[0], destination_tuple[1] + 1))
cur_board[captured_square] = "e "
cur_locs["white"]["pawn"].remove((destination_tuple[0], destination_tuple[1] + 1))
if destination_tuple[1] != 1:
cur_locs["black"]["pawn"].append(destination_tuple)
cur_board[convert_tup(prev_loc)] = "e "
cur_board[convert_tup(destination_tuple)] = "BP"
else:
cur_board[convert_tup(prev_loc)] = "e "
if "Q" in black_move:
cur_locs["black"]["queen"].append(destination_tuple)
cur_board[convert_tup(destination_tuple)] = "BQ"
elif "R" in black_move:
cur_locs["black"]["rook"].append(destination_tuple)
cur_board[convert_tup(destination_tuple)] = "BR"
elif "B" in black_move:
cur_locs["black"]["bishop"].append(destination_tuple)
cur_board[convert_tup(destination_tuple)] = "BB"
elif "N" in black_move:
cur_locs["black"]["knight"].append(destination_tuple)
cur_board[convert_tup(destination_tuple)] = "BN"
if black_move == "0-0":
prev_loc = (0,0)
destination_tuple = (0, 0)
cur_locs["black"]["rook"].remove((8, 8))
cur_locs["black"]["rook"].append((6, 8))
cur_locs["black"]["king"] = [(7, 8)]
cur_board[0][7] = "e "
cur_board[0][4] = "e "
cur_board[0][6] = "BK"
cur_board[0][5] = "BR"
if black_move == "0-0-0":
prev_loc = (0,0)
destination_tuple = (0, 0)
cur_locs["black"]["rook"].remove((1, 8))
cur_locs["black"]["rook"].append((4, 8))
cur_locs["black"]["king"] = [(3, 8)]
cur_board[0][0] = "e "
cur_board[0][4] = "e "
cur_board[0][2] = "BK"
cur_board[0][3] = "BR"
if "x" in black_move and not en_passant:
for piece in cur_locs["white"].keys():
for i in range(len(cur_locs["white"][piece])):
loc = cur_locs["white"][piece][i]
if loc == destination_tuple:
del cur_locs["white"][piece][i]
break
if determine_aggression(prev_loc, destination_tuple, "black"):
black_aggression += 1
for piece in cur_locs["white"].keys():
for loc in cur_locs["white"][piece]:
if piece != "all":
white_data[piece][convert_tup(loc)] += 1
for loc in cur_locs["black"][piece]:
if piece != "all":
black_data[piece][convert_tup(loc)] += 1
for piece in white_data.keys():
white_data[piece] = white_data[piece].astype("int")
black_data[piece] = black_data[piece].astype("int")
return white_data, black_data, white_aggression, black_aggression
def generate_captures_heatmap(move_list):
heatmap_data = np.zeros((8,8))
for move in move_list:
move = move[0]
if "x" in move:
destination = re.search("[a-h][1-8]", move).group()
destination_tuple = (LETTER_TO_NUM[destination[0]], int(destination[1]))
heatmap_data[convert_tup(destination_tuple)] += 1
return heatmap_data
def calculate_trade_statistics(white_move_lists, black_move_lists, num_moves_white, num_moves_black):
white_captures = 0
black_captures = 0
white_recaptures = 0
black_recaptures = 0
num_white_moves = 0
num_black_moves = 0
for i in range(len(white_move_lists)):
white_move_list = white_move_lists[i]
black_move_list = black_move_lists[i]
for move_num in range(len(white_move_list)):
white_move = white_move_list[move_num][0]
if "x" in white_move:
white_captures += 1
black_move = black_move_list[move_num - 1][0]
if "x" in black_move:
white_capture_loc = re.search("[a-h][1-8]", white_move).group()
black_capture_loc = re.search("[a-h][1-8]", black_move).group()
if white_capture_loc == black_capture_loc:
white_recaptures += 1
for move_num in range(len(black_move_list)):
black_move = black_move_list[move_num][0]
if "x" in black_move:
black_captures += 1
white_move = white_move_list[move_num][0]
if "x" in white_move:
black_capture_loc = re.search("[a-h][1-8]", black_move).group()
white_capture_loc = re.search("[a-h][1-8]", white_move).group()
if white_capture_loc == black_capture_loc:
black_recaptures += 1
white_capture_percent = float(white_captures)/num_white_moves
black_capture_percent = float(black_captures)/num_black_moves
white_recapture_percent = float(white_recaptures)/white_captures
black_recapture_percent = float(black_recaptures)/black_captures
return white_capture_percent, black_capture_percent, white_recapture_percent, black_recapture_percent
def convert_tup(tup):
return (8 - tup[1], tup[0] - 1)
def determine_aggression(prev_loc, destination_tuple, color):
if color == "white":
if destination_tuple[1] > prev_loc[1]:
return True
else:
if destination_tuple[1] < prev_loc[1]:
return True
return False
| [
"wsimkins@cs.uchicago.edu"
] | wsimkins@cs.uchicago.edu |
34291debc85164d7bdf03f1960026837e992523d | ad54a035bb0b0ec73853516e4b62daf3598b601b | /django_fullstack/library_revisited/manage.py | 045355e8b7f85761526cd3505808d46bf113c5a1 | [] | no_license | odionfross/django | ce34c29fdc9f828ca0544826c9ef1b35efd324fb | 759d2ddf9322984dd97a967dd1f08e7ee644d9c9 | refs/heads/master | 2022-12-11T15:29:17.506133 | 2020-09-16T04:22:10 | 2020-09-16T04:22:10 | 295,921,376 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 637 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'library_revisited.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"https://www.linkedin.com/in/odionfross/"
] | https://www.linkedin.com/in/odionfross/ |
ea6c48d22148088e90850dbf412004e72a4b7fb4 | 63944e6a5694fe55190681915d0df9e041e7944a | /src/settings.py | 072d0fc52eb8d84b1edcd0cbedf511d3ec923dd4 | [] | no_license | vchristodoulou/WSNM-WebService | 2771e154bb499b3f525f602f1418289ed7e55108 | 9b399092437c750b771deda34885171601e8fc36 | refs/heads/master | 2022-12-25T21:18:00.981979 | 2020-10-08T08:43:31 | 2020-10-08T08:46:54 | 299,975,452 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 174 | py | import os
from dotenv import load_dotenv
load_dotenv()
APP_SETTINGS = os.getenv("APP_SETTINGS")
SERVER_IP = os.getenv("SERVER_IP")
SERVER_PORT = os.getenv("SERVER_PORT")
| [
"vchristodoulou1@gmail.com"
] | vchristodoulou1@gmail.com |
cbdcdfa01186aa01782a59fec2f927d1407ab164 | 87ababaa3cf1cb427a59ccca0739193be3386519 | /endopy/utils/mlutils.py | 253da73dd11eeb86aa010c07d56881613d516ffb | [
"MIT"
] | permissive | ikestar99/endopy | f54a6567d218c72ed976faa8aed60107f46bb9ed | f8d330d93ad12fa1654f717fef8a0ce28540a1c9 | refs/heads/main | 2023-07-26T00:19:28.325543 | 2021-09-09T20:59:26 | 2021-09-09T20:59:26 | 349,279,633 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 870 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 8 03:04:49 2021
@author: ike
"""
import numpy as np
import torch
DEVICE = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
# def normalizeProbArray(probArray):
# negMins = -np.min(probArray, axis=0)
# probArray += (negMins * (negMins > 0))
# rowSums = np.sum(probArray, axis=0) + (np.sum(probArray, axis=0) == 0)
# return probArray / rowSums
def makePrediction(model, batch, pbar=None):
out = model(batch["In"].double().to(DEVICE)).cpu().detach().numpy()
if pbar is not None:
pbar.update(1)
return np.mean(out, axis=0)
def getAveragePrediction(model, loader, pbar):
model.eval(); torch.set_grad_enabled(False)
runAvg = np.mean(np.array(
[makePrediction(model, batch, pbar) for batch in loader]), axis=0)
return runAvg
| [
"noreply@github.com"
] | noreply@github.com |
616bc2f9eedb173fae2c4e924b031aca3eaed1e1 | a2ac73af04a07bb070cd85c88778608b561dd3e4 | /addons/account_check_writing/account_voucher.py | d5cb90cc89990e5b9c4c9de82486dc995d9007a3 | [] | no_license | sannareddy/openerp-heimai | c849586d6099cc7548dec8b3f1cc7ba8be49594a | 58255ecbcea7bf9780948287cf4551ed6494832a | refs/heads/master | 2021-01-15T21:34:46.162550 | 2014-05-13T09:20:37 | 2014-05-13T09:20:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75 | py | /usr/share/pyshared/openerp/addons/account_check_writing/account_voucher.py | [
"549636719@qq.com"
] | 549636719@qq.com |
02c7b63353fb98d8d93122e25b126b28ddccf128 | d97398d74e7c0ac8fa9d0259bf67909c6921d9fa | /Desafio78.py | f41d3f96c20aeac9e63d0427ba3e0616467c4666 | [] | no_license | robertolopesmendonca/Exercicios-Python | 228550690e8e857895de3bd4749a6fdfee5e111a | 8357f033f5a54dbcf54a395755f1254b10d4fc6f | refs/heads/master | 2023-01-05T14:57:58.894445 | 2020-10-18T15:38:00 | 2020-10-18T15:38:00 | 274,805,370 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 894 | py | print("""
078) Faça um programa que leia 5 valores numéricos de guarde-os em uma lista. No final,
mostre qual foi o maior e o menor valor digitado e suas respectivas posições na lista.
""")
maior = 0
menor = 0
listanum = []
for n in range(0, 5):
listanum.append(int(input(f'Entre com o valor para a posição {n}: ')))
if n == 0:
maior = menor = listanum[n]
else:
if listanum[n] > maior:
maior = listanum[n]
if listanum[n] < menor:
menor = listanum[n]
print('=-' * 30)
print(f'Você digitou os valores {listanum}')
print(f'O maior valor é {maior} nas posições ', end='')
for i, v in enumerate(listanum):
if v == maior:
print(f'{i}...', end='')
print()
print(f'O menor valor é {menor} nas posições ', end='' )
for i, v in enumerate(listanum):
if v == menor:
print(f'{i}...', end='')
print()
| [
"42980513+robertolopesmendonca@users.noreply.github.com"
] | 42980513+robertolopesmendonca@users.noreply.github.com |
835ca7c6367f26216698af5590c4115896ebb101 | 3abcde3ca444d7612e24a0faf9b89af61d9bad6d | /backend/core/process_QTL_scoring.py | dd3f5a8b86566535f8a4e91cc33fd0522df3d444 | [] | no_license | cbib/COBRA | fa43d5600beaf36d3dcab98bc7b8faa940a02aea | 54f43d3d2867b4f228dccc6630416808e258be77 | refs/heads/master | 2022-09-25T00:20:57.688447 | 2021-02-16T21:53:28 | 2021-02-16T21:53:28 | 29,740,646 | 2 | 3 | null | 2022-09-01T22:15:54 | 2015-01-23T16:10:43 | HTML | UTF-8 | Python | false | false | 11,387 | py | #!/usr/bin/env python
# encoding: utf-8
import sys
sys.path.append("..")
sys.path.append(".")
from config import *
from helpers.basics import load_config
from helpers.logger import Logger
from helpers.db_helpers import *
from helpers.path import data_dir
from bson.son import SON
# Script supposed to be run in the background to populate the DB with available datasets
## Setup
from numbers import Number
import collections
#from math import logc
if "log" not in globals():
logger = Logger.init_logger('DATA_PROCESSOR_%s'%(cfg.language_code), load_config())
logger.info("Running %s",sys.argv[0])
#score all genes in interactions tables !!!
#score all genes in litterature interaction tables !!!
species_to_process=species_col.find({},{"full_name":1})
new_results=[]
for species in species_to_process:
# need to rebuid the markers file by type:
#SSR pos_start pos end
#SNP pos
# markers_to_process=genetic_markers_col.find({"species":species['full_name']}{})
print species['full_name']
if species['full_name'] == "Prunus persica" or species['full_name']== "Prunus armeniaca":
markers_to_process=list(genetic_markers_col.find({'mapping_file.Species':species['full_name']},{"mapping_file.Start":1,"mapping_file.Marker ID":1,"mapping_file.Map ID":1,"mapping_file.Chromosome":1,"_id":0} ))
counter=0
for markers in markers_to_process:
counter+=1
for m in markers['mapping_file']:
#check if column exists
if 'Start' in m and 'Chromosome' in m:
#skip the first line
if m['Start']!="Location":
#test if value is set
if m['Start']!="" and m['Chromosome']!="":
logger.info("markers position %s chrom %s id %s map id %s",m['Start'],m['Chromosome'],m['Marker ID'],m['Map ID'])
#pos=int(m['Position'])
qtl_to_process=list(qtls_col.aggregate([
#{'$match' : {'species': species['full_name']}},
{'$project' : {'mapping_file':1,'_id':0}},
{'$unwind':'$mapping_file'},
{'$match' : {"$or": [ { 'mapping_file.Colocalizing marker':{'$regex':m['Marker ID'], '$options': 'xi' }}, {'mapping_file.Marker ID':{'$regex':m['Marker ID'], '$options': 'xi' }} ]}},
#{'$match' : {'mapping_file.Map ID': m['Map ID'],"$or": [ { 'mapping_file.Colocalizing marker':{'$regex':m['Marker ID'], '$options': 'xi' }}, {'mapping_file.Marker ID':{'$regex':m['Marker ID'], '$options': 'xi' }} ]}},
{
'$project':
{
'mapping_file.Trait Name':1,
'mapping_file.Trait Alias':1,
'mapping_file.Study':1,
'_id': 0
}
}
]
, useCursor=False))
cursor_to_table(qtl_to_process)
gene_list=[]
gene_to_process=list(full_mappings_col.aggregate(
[
{'$match' : {'type':'full_table', 'species': species['full_name']}},
{'$project' : {'mapping_file':1,'_id':0}},
{'$unwind':'$mapping_file'},
{'$match' : {'mapping_file.Chromosome': m['Chromosome'],"$and": [ { "mapping_file.End": { "$gt": m['Start'] } }, { "mapping_file.Start": { "$lt": m['Start'] } } ]}},
{
'$project':
{
'mapping_file.Gene ID':1,
'mapping_file.Start':1,
'mapping_file.End':1,
'_id': 0
}
}
]
, useCursor=False))
for s in gene_to_process:
for l in s.keys():
q=s.get('mapping_file',"NA")
logger.info(q['Gene ID'])
if q['Gene ID'] not in gene_list:
gene_list.append(q['Gene ID'])
if len(gene_to_process)>0:
logger.info("count: %d marker : %s gene number %d",counter, m['Marker ID'],len(gene_list))
cursor_to_table(qtl_to_process)
cursor_to_table(gene_to_process)
for gene in gene_list:
for s in qtl_to_process:
for l in s.keys():
q=s.get('mapping_file',"NA")
#if q['Trait Name'].contains "resistance" +3
if "resistance" in q['Trait Name'] or "Resistance" in q['Trait Name']:
logger.info("resistance--- %s",q['Trait Name'])
#full_mappings_col.update({'species':species["full_name"],"mapping_file.Gene ID":gene},{'$inc': {'mapping_file.$.Score_QTL': 2 } })
else:
logger.info("other--- %s",q['Trait Name'])
#full_mappings_col.update({'species':species["full_name"],"mapping_file.Gene ID":gene},{'$inc': {'mapping_file.$.Score_QTL': 1 } })
plaza_results=full_mappings_col.find({'species':"Prunus persica",'mapping_file.Gene ID':gene},{'mapping_file.$.Plaza ID': 1 } )
for p in plaza_results:
for values in p['mapping_file']:
plaza_id=values['Plaza ID']
ortholog_result=orthologs_col.find({'species':species["full_name"],'mapping_file.Plaza gene id':plaza_id},{'mapping_file.$':1,'_id':0});
for ortholog in ortholog_result:
ortholog_list=ortholog['mapping_file'][0]['orthologs_list_identifier']
if ortholog_list.find(",") != -1:
ortholog_split_list=ortholog_list.split(',')
for ortholog_id in ortholog_split_list:
if ortholog_id!=plaza_id:
full_mappings_col.update({"mapping_file.Plaza ID":ortholog_id},{"$inc": {'mapping_file.$.Score_orthologs': 0.5 } })
else:
if ortholog_list!=plaza_id:
full_mappings_col.update({"mapping_file.Plaza ID":ortholog_list},{"$inc": {'mapping_file.$.Score_orthologs': 0.5 } })
#cursor_to_table(gene_to_process)
#for features in gene_to_process:
# for pos in features['mapping_file']:
# logger.info("gene: %s",pos['Gene ID'])
#logger.info("gene: %s start: %s end: %s",pos[0],pos[1],pos[2])
#elif species['full_name']=="Cucumis melo":
# markers_to_process=list(genetic_markers_col.find({'species':species['full_name']},{"mapping_file.Start":1,"mapping_file.Marker ID":1,"mapping_file.Chromosome":1,"_id":0} ))
# for markers in markers_to_process:
# for m in markers['mapping_file']:
# if 'Start' in m and 'Chromosome' in m:
# if m['Start']!="" and m['Chromosome']!="":
# logger.info("markers position %s chrom %s id %s",m['Start'],m['Chromosome'],m['Marker ID'])
#qtl_to_process=list(qtls_col.aggregate([
# {'$project' : {'mapping_file':1,'_id':0}},
# {'$unwind':'$mapping_file'},
# {'$match' : {"$or": [ { 'mapping_file.Marker ID':{'$regex':m['Marker ID'], '$options': 'xi' }}, {'mapping_file.Marker ID 2':{'$regex':m['Marker ID'], '$options': 'xi' }} ]}},
# {
# '$project':
# {
# 'mapping_file.QTL Name':1,
# 'mapping_file.Alias':1,
# 'mapping_file.QTL ID':1,
# 'mapping_file.Chromosome':1,
# 'mapping_file.Map ID':1,
# 'mapping_file.Start':1,
# 'mapping_file.End':1,
# 'mapping_file.Marker ID':1,
# 'mapping_file.Marker ID 2':1,
# '_id': 0
# }
# }
#]
#, useCursor=False))
#cursor_to_table(qtl_to_process)
# gene_to_process=list(full_mappings_col.aggregate(
# [
# {'$match' : {'type':'full_table', 'species': species['full_name']}},
# {'$project' : {'mapping_file':1,'_id':0}},
# {'$unwind':'$mapping_file'},
# {'$match' : {'mapping_file.Chromosome': m['Chromosome'],"$and": [ { "mapping_file.End": { "$gt": m['Start'] } }, { "mapping_file.Start": { "$lt": m['Start'] } } ]}},
# {
# '$project':
# {
# 'mapping_file.Gene ID':1,
# 'mapping_file.Start':1,
# 'mapping_file.End':1,
# '_id': 0
# }
# }
# ]
# , useCursor=False))
# cursor_to_table(gene_to_process)
#else:
# print "this species QTL is not described"
| [
"bdartigues@gmail.com"
] | bdartigues@gmail.com |
311729967843c5ec8099011965d0fc07f899187d | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-iotedge/huaweicloudsdkiotedge/v2/model/container_configs_dto.py | 5f77f61d48e99914c232542c25d08d3c747de972 | [
"Apache-2.0"
] | permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 6,284 | py | # coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ContainerConfigsDTO:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'privileged': 'bool',
'host_network': 'bool',
'restart_policy': 'str',
'container_port_list': 'list[ContainerPortDTO]'
}
attribute_map = {
'privileged': 'privileged',
'host_network': 'host_network',
'restart_policy': 'restart_policy',
'container_port_list': 'container_port_list'
}
def __init__(self, privileged=None, host_network=None, restart_policy=None, container_port_list=None):
"""ContainerConfigsDTO
The model defined in huaweicloud sdk
:param privileged: 开启容器特权模式
:type privileged: bool
:param host_network: 是否使用主机网络模式
:type host_network: bool
:param restart_policy: 重启策略,容器执行健康检查后失败后的策略
:type restart_policy: str
:param container_port_list: 容器端口映射值
:type container_port_list: list[:class:`huaweicloudsdkiotedge.v2.ContainerPortDTO`]
"""
self._privileged = None
self._host_network = None
self._restart_policy = None
self._container_port_list = None
self.discriminator = None
if privileged is not None:
self.privileged = privileged
if host_network is not None:
self.host_network = host_network
self.restart_policy = restart_policy
if container_port_list is not None:
self.container_port_list = container_port_list
@property
def privileged(self):
"""Gets the privileged of this ContainerConfigsDTO.
开启容器特权模式
:return: The privileged of this ContainerConfigsDTO.
:rtype: bool
"""
return self._privileged
@privileged.setter
def privileged(self, privileged):
"""Sets the privileged of this ContainerConfigsDTO.
开启容器特权模式
:param privileged: The privileged of this ContainerConfigsDTO.
:type privileged: bool
"""
self._privileged = privileged
@property
def host_network(self):
"""Gets the host_network of this ContainerConfigsDTO.
是否使用主机网络模式
:return: The host_network of this ContainerConfigsDTO.
:rtype: bool
"""
return self._host_network
@host_network.setter
def host_network(self, host_network):
"""Sets the host_network of this ContainerConfigsDTO.
是否使用主机网络模式
:param host_network: The host_network of this ContainerConfigsDTO.
:type host_network: bool
"""
self._host_network = host_network
@property
def restart_policy(self):
"""Gets the restart_policy of this ContainerConfigsDTO.
重启策略,容器执行健康检查后失败后的策略
:return: The restart_policy of this ContainerConfigsDTO.
:rtype: str
"""
return self._restart_policy
@restart_policy.setter
def restart_policy(self, restart_policy):
"""Sets the restart_policy of this ContainerConfigsDTO.
重启策略,容器执行健康检查后失败后的策略
:param restart_policy: The restart_policy of this ContainerConfigsDTO.
:type restart_policy: str
"""
self._restart_policy = restart_policy
@property
def container_port_list(self):
"""Gets the container_port_list of this ContainerConfigsDTO.
容器端口映射值
:return: The container_port_list of this ContainerConfigsDTO.
:rtype: list[:class:`huaweicloudsdkiotedge.v2.ContainerPortDTO`]
"""
return self._container_port_list
@container_port_list.setter
def container_port_list(self, container_port_list):
"""Sets the container_port_list of this ContainerConfigsDTO.
容器端口映射值
:param container_port_list: The container_port_list of this ContainerConfigsDTO.
:type container_port_list: list[:class:`huaweicloudsdkiotedge.v2.ContainerPortDTO`]
"""
self._container_port_list = container_port_list
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ContainerConfigsDTO):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"hwcloudsdk@huawei.com"
] | hwcloudsdk@huawei.com |
b686b39238f5f30b0276c7fce01f77cfec98e68b | ed711369a8b390099934f8bc261b4e4a14094f16 | /works/migrations/0001_initial.py | 50f1094aab81627e70920f7c96b649fc3f4407bd | [] | no_license | merlinsbeard/artwork-portfolio | edb434418c86c7f68436950fa4bf58a3be17d3c3 | 1881651263b505310ec78e48da15dc4b48760f2c | refs/heads/master | 2022-12-16T04:56:36.486848 | 2020-04-11T14:37:36 | 2020-04-11T14:37:36 | 79,799,221 | 1 | 0 | null | 2022-12-08T00:39:31 | 2017-01-23T11:42:14 | Python | UTF-8 | Python | false | false | 823 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-17 09:34
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Work',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('description', models.TextField()),
('slug', models.SlugField()),
('link', models.URLField()),
('image', models.ImageField(upload_to='image/')),
('hidden', models.BooleanField(default=False)),
],
),
]
| [
"bjpaat@dailywarrior.ph"
] | bjpaat@dailywarrior.ph |
db26b2cdc544678e8d28dc70d4b26e50d0d81efa | 6f76defa97ade798a805c93a002847f01e0223af | /683.py | 00e5976ef52b6d9fcc4c6c1f052f4c6a8ad81eca | [] | no_license | Mr-Deason/leetcode | f8b68a11f7766bfd3767c8ec61a6648a4848c075 | e0211f6388f637e5aa81f3233b9a5b0f9f1c1268 | refs/heads/master | 2021-07-13T20:23:13.411943 | 2017-10-17T06:08:25 | 2017-10-17T06:08:25 | 107,225,117 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,303 | py | class Solution(object):
def kEmptySlots(self, flowers, k):
"""
:type flowers: List[int]
:type k: int
:rtype: int
"""
#TLE
ans = -1
n = len(flowers)
import numpy as np
bloom = np.array([flowers[0]])
for day in range(2, n+1):
pos = flowers[day-1]
find = np.searchsorted(bloom, pos)
if find > 0 and bloom[find-1] == pos-k-1:
return day
if find < day-1 and bloom[find] == pos+k+1:
return day
bloom = np.insert(bloom, find, pos)
return ans
def stringToIntegerList(input):
input = input.strip()
input = input[1:-1]
if not input:
return []
return [int(number) for number in input.split(",")]
def main():
import sys
def readlines():
for line in sys.stdin:
yield line.strip('\n')
lines = readlines()
while True:
try:
line = lines.next()
flowers = stringToIntegerList(line)
line = lines.next()
k = int(line)
ret = Solution().kEmptySlots(flowers, k)
out = str(ret)
print out
except StopIteration:
break
if __name__ == '__main__':
main() | [
"mr.d_style@hotmail.com"
] | mr.d_style@hotmail.com |
f8ca76fd22f085e062460354e8d995add278d7e1 | 589ac0a71099f4ee6857a31986305f0df2c16ede | /Bio/Phylo/NewickIO.py | f0579a6eca4eaf30aceb11ae08827e40072c94c4 | [
"LicenseRef-scancode-biopython"
] | permissive | barendt/biopython | 802aad89005b302b6523a934071796edbd8ac464 | 391bcdbee7f821bff3e12b75c635a06bc1b2dcea | refs/heads/rna | 2021-11-09T19:11:56.345314 | 2010-05-01T02:44:42 | 2010-05-01T02:44:42 | 636,700 | 0 | 0 | NOASSERTION | 2021-11-05T13:10:14 | 2010-04-29T02:35:46 | Python | UTF-8 | Python | false | false | 9,781 | py | # Copyright (C) 2009 by Eric Talevich (eric.talevich@gmail.com)
# Based on Bio.Nexus, copyright 2005-2008 by Frank Kauff & Cymon J. Cox.
# All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""I/O function wrappers for the Newick file format.
See: U{ http://evolution.genetics.washington.edu/phylip/newick_doc.html }
"""
__docformat__ = "epytext en"
from cStringIO import StringIO
from Bio.Phylo import Newick
# Definitions retrieved from Bio.Nexus.Trees
NODECOMMENT_START = '[&'
NODECOMMENT_END = ']'
class NewickError(Exception):
"""Exception raised when Newick object construction cannot continue."""
pass
# ---------------------------------------------------------
# Public API
def parse(handle):
"""Iterate over the trees in a Newick file handle.
@return: a generator of Bio.Phylo.Newick.Tree objects.
"""
return Parser(handle).parse()
def write(trees, handle, plain=False, **kwargs):
"""Write a trees in Newick format to the given file handle.
@return: number of trees written.
"""
return Writer(trees).write(handle, plain=plain, **kwargs)
# ---------------------------------------------------------
# Input
class Parser(object):
"""Parse a Newick tree given a file handle.
Based on the parser in Bio.Nexus.Trees.
"""
def __init__(self, handle):
self.handle = handle
@classmethod
def from_string(cls, treetext):
handle = StringIO(treetext)
return cls(handle)
def parse(self, values_are_support=False, rooted=False):
"""Parse the text stream this object was initialized with."""
self.values_are_support = values_are_support
self.rooted = rooted
buf = ''
for line in self.handle:
buf += line.rstrip()
if buf.endswith(';'):
yield self._parse_tree(buf)
buf = ''
if buf:
# Last tree is missing a terminal ';' character -- that's OK
yield self._parse_tree(buf)
def _parse_tree(self, text):
"""Parses the text representation into an Tree object."""
# XXX what global info do we have here? Any? Use **kwargs?
return Newick.Tree(root=self._parse_subtree(text))
def _parse_subtree(self, text):
"""Parse (a,b,c...)[[[xx]:]yy] into subcomponents, recursively."""
text = text.strip().rstrip(';')
if text.count('(')!=text.count(')'):
raise NewickError("Parentheses do not match in (sub)tree: " + text)
# Text is now "(...)..." (balanced parens) or "..." (leaf node)
if text.count('(') == 0:
# Leaf/terminal node -- recursion stops here
return self._parse_tag(text)
# Handle one layer of the nested subtree
# XXX what if there's a paren in a comment or other string?
close_posn = text.rfind(')')
subtrees = []
# Locate subtrees by counting nesting levels of parens
plevel = 0
prev = 1
for posn in range(1, close_posn):
if text[posn] == '(':
plevel += 1
elif text[posn] == ')':
plevel -= 1
elif text[posn] == ',' and plevel == 0:
subtrees.append(text[prev:posn])
prev = posn + 1
subtrees.append(text[prev:close_posn])
# Construct a new clade from trailing text, then attach subclades
clade = self._parse_tag(text[close_posn+1:])
clade.clades = [self._parse_subtree(st) for st in subtrees]
return clade
def _parse_tag(self, text):
"""Extract the data for a node from text.
@return: Clade instance containing any available data
"""
# Extract the comment
comment_start = text.find(NODECOMMENT_START)
if comment_start != -1:
comment_end = text.find(NODECOMMENT_END)
if comment_end == -1:
raise NewickError('Error in tree description: '
'Found %s without matching %s'
% (NODECOMMENT_START, NODECOMMENT_END))
comment = text[comment_start+len(NODECOMMENT_START):comment_end]
text = text[:comment_start] + text[comment_end+len(NODECOMMENT_END):]
else:
comment = None
clade = Newick.Clade(comment=comment)
# Extract name (taxon), and optionally support, branch length
# Float values are support and branch length, the string is name/taxon
values = []
for part in (t.strip() for t in text.split(':')):
if part:
try:
values.append(float(part))
except ValueError:
assert clade.name is None, "Two string taxonomies?"
clade.name = part
if len(values) == 1:
# Real branch length, or support as branch length
if self.values_are_support:
clade.support = values[0]
else:
clade.branch_length = values[0]
elif len(values) == 2:
# Two non-taxon values: support comes first. (Is that always so?)
clade.support, clade.branch_length = values
elif len(values) > 2:
raise NewickError("Too many colons in tag: " + text)
return clade
# ---------------------------------------------------------
# Output
class Writer(object):
"""Based on the writer in Bio.Nexus.Trees (str, to_string)."""
def __init__(self, trees):
self.trees = trees
def write(self, handle, **kwargs):
"""Write this instance's trees to a file handle."""
count = 0
for treestr in self.to_strings(**kwargs):
handle.write(treestr + '\n')
count += 1
return count
def to_strings(self, support_as_branchlengths=False,
branchlengths_only=False, plain=False,
plain_newick=True, ladderize=None,
max_support=1.0):
"""Return an iterable of PAUP-compatible tree lines."""
# If there's a conflict in the arguments, we override plain=True
if support_as_branchlengths or branchlengths_only:
plain = False
make_info_string = self._info_factory(plain, support_as_branchlengths,
branchlengths_only, max_support)
def newickize(clade):
"""Convert a node tree to a Newick tree string, recursively."""
if clade.is_terminal(): #terminal
return ((clade.name or '')
+ make_info_string(clade, terminal=True))
else:
subtrees = (newickize(sub) for sub in clade)
return '(%s)%s' % (','.join(subtrees),
make_info_string(clade))
# Convert each tree to a string
for tree in self.trees:
if ladderize in ('left', 'LEFT', 'right', 'RIGHT'):
# Nexus compatibility shim, kind of
tree.ladderize(reverse=(ladderize in ('right', 'RIGHT')))
rawtree = newickize(tree.root) + ';'
if plain_newick:
yield rawtree
continue
# Nexus-style (?) notation before the raw Newick tree
treeline = ['tree', (tree.name or 'a_tree'), '=']
if tree.weight != 1:
treeline.append('[&W%s]' % round(float(tree.weight), 3))
if tree.rooted:
treeline.append('[&R]')
treeline.append(rawtree)
yield ' '.join(treeline)
def _info_factory(self, plain, support_as_branchlengths,
branchlengths_only, max_support):
"""Return a function that creates a nicely formatted node tag."""
if plain:
# Plain tree only. That's easy.
def make_info_string(clade, terminal=False):
return ''
elif support_as_branchlengths:
# Support as branchlengths (eg. PAUP), ignore actual branchlengths
def make_info_string(clade, terminal=False):
if terminal:
# terminal branches have 100% support
return ':%1.2f' % max_support
else:
return ':%1.2f' % (clade.support)
elif branchlengths_only:
# write only branchlengths, ignore support
def make_info_string(clade, terminal=False):
return ':%1.5f' % (clade.branch_length)
else:
# write support and branchlengths (e.g. .con tree of mrbayes)
def make_info_string(clade, terminal=False):
if terminal:
return ':%1.5f' % (clade.branch_length or 1.0)
else:
if (clade.branch_length is not None
and hasattr(clade, 'support')
and clade.support is not None):
# we have blen and suppport
return '%1.2f:%1.5f' % (clade.support,
clade.branch_length)
elif clade.branch_length is not None:
# we have only blen
return '0.00000:%1.5f' % clade.branch_length
elif (hasattr(clade, 'support')
and clade.support is not None):
# we have only support
return '%1.2f:0.00000' % clade.support
else:
return '0.00:0.00000'
return make_info_string
| [
"eric.talevich@gmail.com"
] | eric.talevich@gmail.com |
2eda8ed8809c5294bac47f44d7072943089f4409 | 1496c7886cf599cc2168aba4948b4928018c3639 | /0x06-python-classes/4-square.py | 33f9226b7afa2a62db48cef609de50cf8231cd67 | [] | no_license | RhysMurage/alx-higher_level_programming | 5365e3617bdba54d8093de7f0869f89bf89ccebd | 08c50efb3602ce7d4ff1782351fac8103fd38e12 | refs/heads/main | 2023-03-16T02:01:40.105333 | 2022-09-24T13:51:40 | 2022-09-24T13:51:40 | 497,946,455 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 923 | py | #!/usr/bin/python3
"""Creates a square class"""
class Square():
"""
Define a square
"""
def __init__(self, size=0):
"""
size initialization.
Args:
size: integer value
"""
if type(size) != int:
raise TypeError('size must be an integer')
if size < 0:
raise ValueError('size must be >= 0')
self.__size = size
def area(self):
"""
calculate area of square
"""
return self.__size**2
@property
def size(self):
"""
return size of square
"""
return self.__size
@size.setter
def size(self, value):
"""
set size of square
"""
if type(value) != int:
raise TypeError('size must be an integer')
if value < 0:
raise ValueError('size must be >= 0')
self.__size = value
| [
"rhysmwangi96@gmail.com"
] | rhysmwangi96@gmail.com |
e5f657f8585b64e8ca97392387cbc8e5ea4a0f7d | 4c9c2940ef3a07e2756fcceddf01acd384ebde01 | /Python/[7 kyu] Ordered count of characters.py | da36015440ad03be1c025a725b9cca4d2ae3af47 | [
"MIT"
] | permissive | KonstantinosAng/CodeWars | 7d3501a605f7ffecb7f0b761b5ffe414e2f1983a | 157818ece648454e882c171a71b4c81245ab0214 | refs/heads/master | 2023-04-11T09:44:27.480064 | 2023-03-26T21:37:07 | 2023-03-26T21:37:07 | 245,296,762 | 6 | 6 | null | null | null | null | UTF-8 | Python | false | false | 578 | py | # see https://www.codewars.com/kata/57a6633153ba33189e000074/solutions/python
def ordered_count(inp):
counts = {}
for letter in inp:
if letter not in counts:
counts[letter] = 1
else:
counts[letter] += 1
return [(key, value) for key, value in counts.items()]
tests = (
('abracadabra', [('a', 5), ('b', 2), ('r', 2), ('c', 1), ('d', 1)]),
('Code Wars', [('C', 1), ('o', 1), ('d', 1), ('e', 1), (' ', 1), ('W', 1), ('a', 1), ('r', 1), ('s', 1)])
)
for t in tests:
inp, exp = t
print(ordered_count(inp) == exp) | [
"kwstantinos.agelopoulos@outlook.com"
] | kwstantinos.agelopoulos@outlook.com |
e928926682f988569eeebca471ebde4c6d562a77 | d075f3ee406a7d79ea507ce82af2f77d542dda9b | /todo_app/trello.py | b0bf8117481df203c5e3e030dbc6d571a7cf7b99 | [] | no_license | kjnvarma/DevOps-Course-Starter | fad5b4f48c00cdbcd8a7388baf5f800256585501 | b0a80ddcce52e24981df45b05fe91d71df31ce34 | refs/heads/master | 2023-05-11T18:10:45.462425 | 2021-05-26T16:08:49 | 2021-05-26T16:08:49 | 302,264,024 | 0 | 0 | null | 2021-05-26T16:08:51 | 2020-10-08T07:29:43 | Python | UTF-8 | Python | false | false | 2,545 | py | import requests
import json
import os
from todo_app.Task import Task
headers = {
"Accept": "application/json"
}
def get_all_tasks():
params = (
('key', os.environ['TRELLO_KEY']),
('token', os.environ['TRELLO_TOKEN']),
)
board_id = os.environ['TRELLO_BOARD_ID']
data = requests.get('https://api.trello.com/1/boards/' + board_id + '/cards', params=params).json()
task_list = []
for task in data:
if task['idList'] == os.environ['TRELLO_TODO_LIST_ID']:
task['idList'] = 'To Do'
elif task['idList'] == os.environ['TRELLO_DOING_LIST_ID']:
task['idList'] = 'Doing'
elif task['idList'] == os.environ['TRELLO_DONE_LIST_ID']:
task['idList'] = 'Done'
task_list.append(Task(id=task['id'], status=task['idList'], title=task['name'], last_modified=task['dateLastActivity']))
return task_list
def create_todo_task(title):
params = (
('key', os.environ['TRELLO_KEY']),
('token', os.environ['TRELLO_TOKEN']),
('name', title),
('idList', os.environ['TRELLO_TODO_LIST_ID'])
)
print(os.environ['TRELLO_KEY'])
print(os.environ['TRELLO_TOKEN'])
print(os.environ['TRELLO_TODO_LIST_ID'])
requests.post('https://api.trello.com/1/cards', params=params)
def move_to_doing(id):
params = (
('key', os.environ['TRELLO_KEY']),
('token', os.environ['TRELLO_TOKEN']),
('idList', os.environ['TRELLO_DOING_LIST_ID'])
)
requests.put("https://api.trello.com/1/cards/" + id, params=params)
def move_to_done(id):
params = (
('key', os.environ['TRELLO_KEY']),
('token', os.environ['TRELLO_TOKEN']),
('idList', os.environ['TRELLO_DONE_LIST_ID'])
)
requests.put("https://api.trello.com/1/cards/" + id, params=params)
def delete_task(id):
params = (
('key', os.environ['TRELLO_KEY']),
('token', os.environ['TRELLO_TOKEN'])
)
requests.delete("https://api.trello.com/1/cards/" + id, params=params)
def create_board():
params = (
('key', os.environ['TRELLO_KEY']),
('token', os.environ['TRELLO_TOKEN']),
('name', 'TestBoard1')
)
response = requests.post("https://api.trello.com/1/boards/", params=params)
return response.json()['id']
def delete_board(id):
params = (
('key', os.environ['TRELLO_KEY']),
('token', os.environ['TRELLO_TOKEN'])
)
requests.delete("https://api.trello.com/1/boards/" + id, params=params) | [
"kjnvarma@gmail.com"
] | kjnvarma@gmail.com |
4dcb2f40b532b5bd84a8b6f1c3ec478e443adaf6 | 5be744f908ea25bd5442dfb4cb8a24a0d7941e14 | /timepiece/context_processors.py | 7a51dd0a6549cf8ff01954c75e00c32076b99ad2 | [] | no_license | CalebMuhia/JobsBoard | f986d7c4af939dded0a3e2f8305a444f3502bad3 | 66c40dd5151261bc7e4fb8309a6139d11604f215 | refs/heads/master | 2022-07-07T22:49:06.733313 | 2022-06-23T20:52:20 | 2022-06-23T20:52:20 | 4,616,096 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 434 | py | from django.conf import settings
from timepiece.forms import QuickSearchForm
def timepiece_settings(request):
default_famfamfam_url = settings.STATIC_URL + 'images/icons/'
famfamfam_url = getattr(settings, 'FAMFAMFAM_URL', default_famfamfam_url)
context = {
'FAMFAMFAM_URL': famfamfam_url,
}
return context
def quick_search(request):
return {
'quick_search_form': QuickSearchForm(),
}
| [
"clbnjoroge@gmail.com"
] | clbnjoroge@gmail.com |
c5378d49721305da88e40db5fe94428711285626 | 47789957e8dd6ba944bd4c04341cf8add431d64f | /shoppingcart/home/migrations/0003_address.py | b5190e37fe901c886c8a352e359b0abcbaae6b90 | [] | no_license | binnib/EshopApp | 8c93c518d09b5dbe9e3e9f357e9a68fa2d2d4585 | 4846b7f48e4224f2a0aee8942e9c758a9f890081 | refs/heads/main | 2023-03-28T17:04:48.063283 | 2021-04-12T05:52:37 | 2021-04-12T05:52:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,059 | py | # Generated by Django 3.0.6 on 2021-03-31 07:08
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('home', '0002_cartitem_total_price'),
]
operations = [
migrations.CreateModel(
name='Address',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('address_details', models.CharField(max_length=250)),
('state', models.CharField(max_length=100)),
('city', models.CharField(max_length=100)),
('country', models.CharField(max_length=100)),
('pincode', models.CharField(max_length=10)),
('landmark', models.CharField(max_length=50)),
('is_active', models.BooleanField(default=False)),
('customer', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='home.Customer')),
],
),
]
| [
"maheshsharma171226@gmail.com"
] | maheshsharma171226@gmail.com |
558e9b855ff65427de3dea11f5eb47c5c6c56325 | c28c56b2f0bd987b627e9c59537307323470876a | /blotter/views.py | a250ab02ba0769f4d15d07e074059fa643ecc7e6 | [] | no_license | wider/django-crimemap | 4e26f4435b7f770de0f3dfec11e7780a1476ea73 | 93bb908c2f8f4b82e544ebd72baca4530dd2c5db | refs/heads/master | 2016-09-16T10:38:02.346989 | 2008-10-16T01:44:01 | 2008-10-16T01:44:01 | 114,336 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,140 | py | from django.views.generic.list_detail import object_list
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from crime_map.blotter.models import Crime
from crime_map.blotter.forms import DateRangeForm
def by_type(request, crime_type):
crimes = Crime.objects.filter(crime_type__slug=crime_type)
return object_list(request, queryset=crimes)
def by_agency(request, agency):
crimes = Crime.objects.filter(agency__slug=agency)
return object_list(request, queryset=crimes)
def in_date_range(request, start_date, end_date):
crimes = Crime.objects.filter(date__range=(start_date, end_date))
return object_list(request, queryset=crimes)
def filter_by_date(request):
if request.method == "POST":
f = DateRangeForm(request.POST)
if f.is_valid():
crimes = Crime.objects.filter(date__range=(f.cleaned_data['start_date'], f.cleaned_data['end_date']))
return object_list(request, queryset=crimes)
else:
return HttpResponseRedirect(reverse('crime-list'))
else:
return HttpResponseRedirect(reverse('crime-list'))
| [
"justinlilly@gmail.com"
] | justinlilly@gmail.com |
d8003e5a564f39faa18cfb54ada2ed74bad26175 | 1df04380fbb799ea658201ff9083db550cd0a090 | /restaurants/urls.py | 79ce86478bcbc4b6b31e7b111e877681bed61f31 | [] | no_license | ivanneychev/try_django | 21739e9f5ed9afb0213d58c719141fb898468a4b | 0d6e1048899850234bed148b0ce8d30f2b1369df | refs/heads/master | 2021-04-27T21:27:52.404794 | 2018-02-21T22:57:02 | 2018-02-21T22:57:02 | 122,401,025 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 799 | py | """picky URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from .views import about as about_view
app_name = 'restaurants'
urlpatterns = [
url(r'^$', about_view, name='about'),
]
| [
"ivan_tn@abv.bg"
] | ivan_tn@abv.bg |
6f9a19fbcf2f777df543ea2ca0800eafd2406087 | 18d3fa331288470a5872bfe71ef3f610b18c0588 | /jaqs/research/signaldigger/digger.py | 6f58b77e4d8b28f7e3adbdc7677ba66e5d0411ce | [] | no_license | Jerrychen94/sustecher | 6ca1238cf494df0a3435f5602000334e0c4b328f | 52a8b82f35f9a7ac33a2fab4e5f2e6bce5732369 | refs/heads/master | 2023-07-24T12:38:46.086619 | 2021-09-06T09:17:03 | 2021-09-06T09:17:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 25,113 | py | # encoding: utf-8
import os
from collections import OrderedDict
import numpy as np
import pandas as pd
from . import performance as pfm
from . import plotting
import jaqs.util as jutil
TRADE_DAYS_PER_YEAR = 242
class SignalDigger(object):
"""
Attributes
----------
signal_data : pd.DataFrame - MultiIndex
Index is pd.MultiIndex ['trade_date', 'symbol'], columns = ['signal', 'return', 'quantile']
period : int
Horizon used to calculate return.
n_quantiles : int
output_format : str
output_folder : str
"""
def __init__(self, output_folder=".", output_format='pdf'):
self.output_format = output_format
self.output_folder = os.path.abspath(output_folder)
self.signal_data = None
self.period = None
self.n_quantiles = 5
self.benchmark_ret = None
self.returns_report_data = dict()
self.ic_report_data = dict()
self.fig_data = dict()
self.fig_objs = dict()
def process_signal_before_analysis(self, signal, price=None, ret=None, benchmark_price=None, period=5, n_quantiles=5, mask=None, forward=False):
"""
Prepare for signal analysis.
Parameters
----------
signal : pd.DataFrame
Index is date, columns are stocks.
price : pd.DataFrame
Index is date, columns are stocks.
ret : pd.DataFrame
Index is date, columns are stocks.
benchmark_price : pd.DataFrame or pd.Series or None
Price of benchmark.
mask : pd.DataFrame
Data cells that should NOT be used.
n_quantiles : int
period : int
periods to compute forward returns on.
Returns
-------
res : pd.DataFrame
Index is pd.MultiIndex ['trade_date', 'symbol'], columns = ['signal', 'return', 'quantile']
"""
"""
Deal with suspensions:
If the period of calculating return is d (from T to T+d), then
we do not use signal values of those suspended on T,
we do not calculate return for those suspended on T+d.
"""
# ----------------------------------------------------------------------
# parameter validation
if price is None and ret is None:
raise ValueError("One of price / ret must be provided.")
if price is not None and ret is not None:
raise ValueError("Only one of price / ret should be provided.")
if ret is not None and benchmark_price is not None:
raise ValueError("You choose 'return' mode but benchmark_price is given.")
if not (n_quantiles > 0 and isinstance(n_quantiles, int)):
raise ValueError("n_quantiles must be a positive integer. Input is: {}".format(n_quantiles))
# ensure inputs are aligned
data = price if price is not None else ret
assert np.all(signal.index == data.index)
assert np.all(signal.columns == data.columns)
if mask is not None:
assert np.all(signal.index == mask.index)
assert np.all(signal.columns == mask.columns)
mask = jutil.fillinf(mask)
mask = mask.astype(int).fillna(0).astype(bool) # dtype of mask could be float. So we need to convert.
else:
mask = pd.DataFrame(index=signal.index, columns=signal.columns, data=False)
signal = jutil.fillinf(signal)
data = jutil.fillinf(data)
# ----------------------------------------------------------------------
# save data
self.n_quantiles = n_quantiles
self.period = period
# ----------------------------------------------------------------------
# Get dependent variables
if price is not None:
df_ret = pfm.price2ret(price, period=self.period, axis=0)
if benchmark_price is not None:
benchmark_price = benchmark_price.loc[signal.index]
bench_ret = pfm.price2ret(benchmark_price, self.period, axis=0)
self.benchmark_ret = bench_ret
residual_ret = df_ret.sub(bench_ret.values.flatten(), axis=0)
else:
residual_ret = df_ret
else:
residual_ret = ret
# Get independent varibale
signal = signal.shift(1) # avoid forward-looking bias
# forward or not
if forward:
# point-in-time signal and forward return
residual_ret = residual_ret.shift(-self.period)
else:
# past signal and point-in-time return
signal = signal.shift(self.period)
# ----------------------------------------------------------------------
# get masks
# mask_prices = data.isnull()
# Because we use FORWARD return, if one day's price is broken, the day that is <period> days ago is also broken.
# mask_prices = np.logical_or(mask_prices, mask_prices.shift(self.period))
mask_price_return = residual_ret.isnull()
mask_signal = signal.isnull()
mask_tmp = np.logical_or(mask_signal, mask_price_return)
mask_all = np.logical_or(mask, mask_tmp)
# if price is not None:
# mask_forward = np.logical_or(mask, mask.shift(self.period).fillna(True))
# mask = np.logical_or(mask, mask_forward)
# ----------------------------------------------------------------------
# calculate quantile
signal_masked = signal.copy()
signal_masked = signal_masked[~mask_all]
if n_quantiles == 1:
df_quantile = signal_masked.copy()
df_quantile.loc[:, :] = 1.0
else:
df_quantile = jutil.to_quantile(signal_masked, n_quantiles=n_quantiles)
# ----------------------------------------------------------------------
# stack
def stack_td_symbol(df):
df = pd.DataFrame(df.stack(dropna=False)) # do not dropna
df.index.names = ['trade_date', 'symbol']
df.sort_index(axis=0, level=['trade_date', 'symbol'], inplace=True)
return df
mask_all = stack_td_symbol(mask_all)
df_quantile = stack_td_symbol(df_quantile)
residual_ret = stack_td_symbol(residual_ret)
# ----------------------------------------------------------------------
# concat signal value
res = stack_td_symbol(signal)
res.columns = ['signal']
res['return'] = residual_ret
res['quantile'] = df_quantile
res = res.loc[~(mask_all.iloc[:, 0]), :]
print("Nan Data Count (should be zero) : {:d}; " \
"Percentage of effective data: {:.0f}%".format(res.isnull().sum(axis=0).sum(), len(res) * 100. / signal.size))
res = res.astype({'signal': float, 'return': float, 'quantile': int})
self.signal_data = res
def show_fig(self, fig, file_name):
"""
Save fig object to self.output_folder/filename.
Parameters
----------
fig : matplotlib.figure.Figure
file_name : str
"""
self.fig_objs[file_name] = fig
if self.output_format in ['pdf', 'png', 'jpg']:
fp = os.path.join(self.output_folder, '.'.join([file_name, self.output_format]))
jutil.create_dir(fp)
fig.savefig(fp)
print("Figure saved: {}".format(fp))
elif self.output_format == 'base64':
fig_b64 = jutil.fig2base64(fig, 'png')
self.fig_data[file_name] = fig_b64
print("Base64 data of figure {} will be stored in dictionary.".format(file_name))
elif self.output_format == 'plot':
fig.show()
else:
raise NotImplementedError("output_format = {}".format(self.output_format))
@plotting.customize
def create_returns_report(self):
"""
Creates a tear sheet for returns analysis of a signal.
"""
n_quantiles = self.signal_data['quantile'].max()
# ----------------------------------------------------------------------------------
# Daily Signal Return Time Series
# Use regression or weighted average to calculate.
period_wise_long_ret = pfm.calc_period_wise_weighted_signal_return(self.signal_data, weight_method='long_only')
period_wise_short_ret = pfm.calc_period_wise_weighted_signal_return(self.signal_data, weight_method='short_only')
cum_long_ret = pfm.period_wise_ret_to_cum(period_wise_long_ret, period=self.period, compound=False)
cum_short_ret = pfm.period_wise_ret_to_cum(period_wise_short_ret, period=self.period, compound=False)
# period_wise_ret_by_regression = perf.regress_period_wise_signal_return(signal_data)
# period_wise_ls_signal_ret = \
# pfm.calc_period_wise_weighted_signal_return(signal_data, weight_method='long_short')
# daily_ls_signal_ret = pfm.period2daily(period_wise_ls_signal_ret, period=period)
# ls_signal_ret_cum = pfm.daily_ret_to_cum(daily_ls_signal_ret)
# ----------------------------------------------------------------------------------
# Period-wise Quantile Return Time Series
# We calculate quantile return using equal weight or market value weight.
# Quantile is already obtained according to signal values.
# quantile return
period_wise_quantile_ret_stats = pfm.calc_quantile_return_mean_std(self.signal_data, time_series=True)
cum_quantile_ret = pd.concat({k: pfm.period_wise_ret_to_cum(v['mean'], period=self.period, compound=False)
for k, v in period_wise_quantile_ret_stats.items()}, axis=1)
# top quantile minus bottom quantile return
period_wise_tmb_ret = pfm.calc_return_diff_mean_std(period_wise_quantile_ret_stats[n_quantiles],
period_wise_quantile_ret_stats[1])
cum_tmb_ret = pfm.period_wise_ret_to_cum(period_wise_tmb_ret['mean_diff'], period=self.period, compound=False)
# ----------------------------------------------------------------------------------
# Alpha and Beta
# Calculate using regression.
'''
weighted_portfolio_alpha_beta
tmb_alpha_beta =
'''
# start plotting
if self.output_format:
vertical_sections = 6
gf = plotting.GridFigure(rows=vertical_sections, cols=1)
gf.fig.suptitle("Returns Tear Sheet\n\n(no compound)\n (period length = {:d} days)".format(self.period))
plotting.plot_quantile_returns_ts(period_wise_quantile_ret_stats, ax=gf.next_row())
plotting.plot_cumulative_returns_by_quantile(cum_quantile_ret, ax=gf.next_row())
plotting.plot_cumulative_return(cum_long_ret,
title="Signal Weighted Long Only Portfolio Cumulative Return",
ax=gf.next_row())
plotting.plot_cumulative_return(cum_short_ret,
title="Signal Weighted Short Only Portfolio Cumulative Return",
ax=gf.next_row())
plotting.plot_mean_quantile_returns_spread_time_series(period_wise_tmb_ret, self.period,
bandwidth=0.5,
ax=gf.next_row())
plotting.plot_cumulative_return(cum_tmb_ret,
title="Top Minus Bottom (long top, short bottom) Portfolio Cumulative Return",
ax=gf.next_row())
self.show_fig(gf.fig, 'returns_report')
self.returns_report_data = {'period_wise_quantile_ret': period_wise_quantile_ret_stats,
'cum_quantile_ret': cum_quantile_ret,
'cum_long_ret': cum_long_ret,
'cum_short_ret': cum_short_ret,
'period_wise_tmb_ret': period_wise_tmb_ret,
'cum_tmb_ret': cum_tmb_ret}
@plotting.customize
def create_information_report(self):
"""
Creates a tear sheet for information analysis of a signal.
"""
ic = pfm.calc_signal_ic(self.signal_data)
ic.index = pd.to_datetime(ic.index, format="%Y%m%d")
monthly_ic = pfm.mean_information_coefficient(ic, "M")
if self.output_format:
ic_summary_table = pfm.calc_ic_stats_table(ic)
plotting.plot_information_table(ic_summary_table)
columns_wide = 2
fr_cols = len(ic.columns)
rows_when_wide = (((fr_cols - 1) // columns_wide) + 1)
vertical_sections = fr_cols + 3 * rows_when_wide + 2 * fr_cols
gf = plotting.GridFigure(rows=vertical_sections, cols=columns_wide)
gf.fig.suptitle("Information Coefficient Report\n\n(period length = {:d} days)"
"\ndaily IC = rank_corr(period-wise forward return, signal value)".format(self.period))
plotting.plot_ic_ts(ic, self.period, ax=gf.next_row())
plotting.plot_ic_hist(ic, self.period, ax=gf.next_row())
# plotting.plot_ic_qq(ic, ax=ax_ic_hqq[1::2])
plotting.plot_monthly_ic_heatmap(monthly_ic, period=self.period, ax=gf.next_row())
self.show_fig(gf.fig, 'information_report')
self.ic_report_data = {'daily_ic': ic, 'monthly_ic': monthly_ic}
def create_binary_event_report(self, signal, price, mask, benchmark_price, periods, join_method_periods='inner', group_by=None):
"""
Parameters
----------
signal : pd.DataFrame
price : pd.DataFrame
mask : pd.DataFrame
benchmark_price : pd.DataFrame
periods : list of int
join_method_periods : {'inner', 'outer'}.
Whether to take intersection or union of data of different periods.
group_by : {'year', 'month', None}
Calculate various statistics within each year/month/whole sample.
Returns
-------
res : dict
"""
import scipy.stats as scst
# Raw Data
dic_signal_data = OrderedDict()
for my_period in periods:
self.process_signal_before_analysis(signal, price=price, mask=mask,
n_quantiles=1, period=my_period,
benchmark_price=benchmark_price,
forward=True)
dic_signal_data[my_period] = self.signal_data
# Processed Data
dic_events = OrderedDict()
dic_all = OrderedDict()
for period, df in dic_signal_data.items():
ser_ret = df['return']
ser_sig = df['signal'].astype(bool)
events_ret = ser_ret.loc[ser_sig]
dic_events[period] = events_ret
dic_all[period] = ser_ret
df_events = pd.concat(dic_events, axis=1, join=join_method_periods)
df_all = pd.concat(dic_all, axis=1, join=join_method_periods)
# Data Statistics
def _calc_statistics(df):
df_res = pd.DataFrame(index=periods,
columns=['Annu. Ret.', 'Annu. Vol.', 't-stat', 'p-value', 'skewness', 'kurtosis', 'occurance'],
data=np.nan)
df_res.index.name = 'Period'
ser_periods = pd.Series(index=df.columns, data=df.columns.values)
ratio = (1.0 * TRADE_DAYS_PER_YEAR / ser_periods)
mean = df.mean(axis=0)
std = df.std(axis=0)
annual_ret, annual_vol = mean * ratio, std * np.sqrt(ratio)
t_stats, p_values = scst.ttest_1samp(df.values, np.zeros(df.shape[1]), axis=0)
df_res.loc[:, 't-stat'] = t_stats
df_res.loc[:, 'p-value'] = np.round(p_values, 5)
df_res.loc[:, "skewness"] = scst.skew(df, axis=0)
df_res.loc[:, "kurtosis"] = scst.kurtosis(df, axis=0)
df_res.loc[:, 'Annu. Ret.'] = annual_ret
df_res.loc[:, 'Annu. Vol.'] = annual_vol
df_res.loc[:, 'occurance'] = len(df)
# dic_res[period] = df
return df_res
if group_by == 'year':
grouper_func = jutil.date_to_year
elif group_by == 'month':
grouper_func = jutil.date_to_month
else:
grouper_func = get_dummy_grouper
idx_group = grouper_func(df_events.index.get_level_values('trade_date'))
df_stats = df_events.groupby(idx_group).apply(_calc_statistics)
idx_group_all = grouper_func(df_all.index.get_level_values('trade_date'))
df_all_stats = df_all.groupby(idx_group_all).apply(_calc_statistics)
df_all_stats = df_all_stats.loc[df_stats.index, ['Annu. Ret.', 'Annu. Vol.']]
df_all_stats.columns = ['Annu. Ret. (all samp)', 'Annu. Vol. (all samp)']
df_stats = pd.concat([df_stats, df_all_stats], axis=1)
# return df_all, df_events, df_stats
ser_signal_raw, monthly_signal, yearly_signal = calc_calendar_distribution(signal)
# return
# plot
gf = plotting.GridFigure(rows=len(np.unique(idx_group)) * len(periods) + 3, cols=2, height_ratio=1.2)
gf.fig.suptitle("Event Return Analysis (annualized)")
plotting.plot_calendar_distribution(ser_signal_raw,
monthly_signal=monthly_signal, yearly_signal=yearly_signal,
ax1=gf.next_row(), ax2=gf.next_row())
plotting.plot_event_bar(df_stats.reset_index(), x='Period', y='Annu. Ret.', hue='trade_date', ax=gf.next_row())
# plotting.plot_event_pvalue(df_stats['p-value'], ax=gf.next_subrow())
def _plot_dist(df):
date = grouper_func(df.index.get_level_values('trade_date'))[0]
plotting.plot_event_dist(df, group_by.title() + ' ' + str(date), axs=[gf.next_cell() for _ in periods])
if group_by is not None:
df_events.groupby(idx_group).apply(_plot_dist)
else:
plotting.plot_event_dist(df_events, "", axs=[gf.next_cell() for _ in periods])
self.show_fig(gf.fig, 'event_report')
# dic_res['df_res'] = df_res
return df_all, df_events, df_stats
@plotting.customize
def create_full_report(self):
"""
Creates a full tear sheet for analysis and evaluating single
return predicting (alpha) signal.
"""
# signal quantile description statistics
qstb = calc_quantile_stats_table(self.signal_data)
if self.output_format:
plotting.plot_quantile_statistics_table(qstb)
self.create_returns_report()
self.create_information_report()
# we do not do turnover analysis for now
# self.create_turnover_report(signal_data)
res = dict()
res.update(self.returns_report_data)
res.update(self.ic_report_data)
res.update(self.fig_data)
return res
def create_single_signal_report(self, signal, price, periods, n_quantiles, mask=None, trade_condition=None):
"""
Parameters
----------
signal : pd.Series
index is integer date, values are signals
price : pd.Series
index is integer date, values are prices
mask : pd.Series or None, optional
index is integer date, values are bool
periods : list of int
trade_condition : dict , optional
{'cond_name1': {'col_name': str, 'hold': int, 'filter': func, 'direction': 1},
'cond_name2': {'col_name': str, 'hold': int, 'filter': func, 'direction': -1},
}
Returns
-------
res : dict
"""
if isinstance(signal, pd.DataFrame):
signal = signal.iloc[:, 0]
if isinstance(price, pd.DataFrame):
price = price.iloc[:, 0]
# calc return
ret_l = {period: pfm.price2ret(price, period=period, axis=0) for period in periods}
df_ret = pd.concat(ret_l, axis=1)
# ----------------------------------------------------------------------
# calculate quantile
if n_quantiles == 1:
df_quantile = signal.copy()
df_quantile.loc[:] = 1.0
else:
df_quantile = jutil.to_quantile(signal, n_quantiles=n_quantiles, axis=0)
# ----------------------------------------------------------------------
# concat signal value
res = pd.DataFrame(signal.shift(1))
res.columns = ['signal']
res['quantile'] = df_quantile
res = pd.concat([res, df_ret], axis=1)
res = res.dropna()
print("Nan Data Count (should be zero) : {:d}; " \
"Percentage of effective data: {:.0f}%".format(res.isnull().sum(axis=0).sum(), len(res) * 100. / signal.size))
# calc quantile stats
gp = res.groupby(by='quantile')
dic_stats = OrderedDict()
for q, df in gp:
df_stat = pd.DataFrame(index=['mean', 'std'], columns=df_ret.columns, data=np.nan)
df_stat.loc['mean', :] = df.loc[:, df_ret.columns].mean(axis=0)
df_stat.loc['std', :] = df.loc[:, df_ret.columns].std(axis=0)
dic_stats[q] = df_stat
# calculate IC
ics = calc_various_ic(res, ret_cols=df_ret.columns)
# backtest
if trade_condition is not None:
def sim_backtest(df, dic_of_cond):
dic_cum_ret = dict()
for key, dic in dic_of_cond.items():
col_name = dic['column']
func = dic['filter']
n_hold = dic['hold']
direction = dic['direction']
mask = df[col_name].apply(func).astype(int)
dic_cum_ret[key] = (df[n_hold] * mask).cumsum() * direction
df_cumret = pd.concat(dic_cum_ret, axis=1)
return df_cumret
df_backtest = sim_backtest(res, trade_condition)
# plot
gf = plotting.GridFigure(rows=3, cols=1, height_ratio=1.2)
gf.fig.suptitle("Event Return Analysis (annualized)")
plotting.plot_ic_decay(ics, ax=gf.next_row())
plotting.plot_quantile_return_mean_std(dic_stats, ax=gf.next_row())
if trade_condition is not None:
plotting.plot_batch_backtest(df_backtest, ax=gf.next_row())
self.show_fig(gf.fig, 'single_inst.pdf')
def calc_ic(x, y, method='rank'):
"""
Calculate IC between x and y.
Parameters
----------
x : np.ndarray
y : np.ndarray
method : {'rank', 'normal'}
Returns
-------
corr : float
"""
import scipy.stats as scst
if method == 'rank':
corr = scst.spearmanr(x, y)[0]
elif method == 'normal':
corr = np.corrcoef(x, y)[0, 1]
else:
raise NotImplementedError("method = {}".format(method))
return corr
def calc_various_ic(df, ret_cols):
res_dic = dict()
# signal normal IC: signal value v.s. return
res_dic['normal'] = [calc_ic(df['signal'], df[col], method='normal') for col in ret_cols]
# signal rank IC: signal value v.s. return
res_dic['rank'] = [calc_ic(df['signal'], df[col], method='rank') for col in ret_cols]
# quantile normal IC: signal quantile v.s. return
res_dic['normal_q'] = [calc_ic(df['quantile'], df[col], method='normal') for col in ret_cols]
# quantile rank IC: signal quantile v.s. return
res_dic['rank_q'] = [calc_ic(df['quantile'], df[col], method='rank') for col in ret_cols]
res = pd.DataFrame(index=ret_cols, data=res_dic)
return res
def calc_quantile_stats_table(signal_data):
quantile_stats = signal_data.groupby('quantile').agg(['min', 'max', 'mean', 'std', 'count'])['signal']
quantile_stats['count %'] = quantile_stats['count'] / quantile_stats['count'].sum() * 100.
return quantile_stats
def get_dummy_grouper(ser):
res = pd.Index(np.array(['all_sample'] * len(ser)), name=ser.name)
return res
def calc_calendar_distribution(df_signal):
daily_signal = df_signal.sum(axis=1)
daily_signal = daily_signal.fillna(0).astype(int)
idx = daily_signal.index.values
month = jutil.date_to_month(idx)
year = jutil.date_to_year(idx)
monthly_signal = daily_signal.groupby(by=month).sum()
yearly_signal = daily_signal.groupby(by=year).sum()
monthly_signal = pd.DataFrame(monthly_signal, columns=['Time'])
yearly_signal = pd.DataFrame(yearly_signal, columns=['Time'])
monthly_signal.index.name = 'Month'
yearly_signal.index.name = 'Year'
return daily_signal, monthly_signal, yearly_signal
| [
"8342537@qq.com"
] | 8342537@qq.com |
a60cc6b2ee0c67040088e9b8635511a8e481d5ab | 9ebed060fb79f468504030d545501711230fe743 | /venv/bin/rst2html.py | 219c73b8e81865a06af0da1402f0c3a8ba7797db | [] | no_license | CarlosKim94/100daysweb-site | 458ff8f2338abb5c8215893a7af5d8333e81f8e5 | 8e07494e76ed5b58e49a2dec78e5cd7f59adbb2f | refs/heads/master | 2021-06-20T04:16:54.447742 | 2019-06-12T02:41:11 | 2019-06-12T02:41:11 | 191,472,386 | 0 | 0 | null | 2021-03-29T19:48:19 | 2019-06-12T01:04:50 | Python | UTF-8 | Python | false | false | 662 | py | #!/Users/carloskim/Developer/100daysweb/5-static-sites/100daysweb-site/venv/bin/python
# $Id: rst2html.py 4564 2006-05-21 20:44:42Z wiemann $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
A minimal front end to the Docutils Publisher, producing HTML.
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
description = ('Generates (X)HTML documents from standalone reStructuredText '
'sources. ' + default_description)
publish_cmdline(writer_name='html', description=description)
| [
"carlos.kim94@gmail.com"
] | carlos.kim94@gmail.com |
a4ad9d37d8916e0b9997b849d394f54ae71f9098 | 20b57ed25fcfb6eda1ea262228e0c53838cb61e1 | /registration.py | 442e95986fe800769867b2f88c6bca2dcfdac724 | [] | no_license | Rutuja999/covid_future_forcasting | 6c47864adca448c25a23a5b2c2fadbec68c4a244 | 9816616b9248dd3ab019b89ee02ae47334dbec12 | refs/heads/master | 2023-04-08T14:16:07.364621 | 2021-04-24T08:26:56 | 2021-04-24T08:26:56 | 361,109,015 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,356 | py | import tkinter as tk
# from tkinter import *
from tkinter import messagebox as ms
import sqlite3
from PIL import Image, ImageTk
import re
import random
import os
import cv2
window = tk.Tk()
window.geometry("700x700")
window.title("REGISTRATION FORM")
window.configure(background="grey")
Fullname = tk.StringVar()
address = tk.StringVar()
username = tk.StringVar()
Email = tk.StringVar()
Phoneno = tk.IntVar()
var = tk.IntVar()
age = tk.IntVar()
password = tk.StringVar()
password1 = tk.StringVar()
value = random.randint(1, 1000)
print(value)
# database code
db = sqlite3.connect('evaluation.db')
cursor = db.cursor()
cursor.execute("CREATE TABLE IF NOT EXISTS registration"
"(Fullname TEXT, address TEXT, username TEXT, Email TEXT, Phoneno TEXT,Gender TEXT,age TEXT , password TEXT)")
db.commit()
def password_check(passwd):
SpecialSym =['$', '@', '#', '%']
val = True
if len(passwd) < 6:
print('length should be at least 6')
val = False
if len(passwd) > 20:
print('length should be not be greater than 8')
val = False
if not any(char.isdigit() for char in passwd):
print('Password should have at least one numeral')
val = False
if not any(char.isupper() for char in passwd):
print('Password should have at least one uppercase letter')
val = False
if not any(char.islower() for char in passwd):
print('Password should have at least one lowercase letter')
val = False
if not any(char in SpecialSym for char in passwd):
print('Password should have at least one of the symbols $@#')
val = False
if val:
return val
def insert():
fname = Fullname.get()
addr = address.get()
un = username.get()
email = Email.get()
mobile = Phoneno.get()
gender = var.get()
time = age.get()
pwd = password.get()
cnpwd = password1.get()
with sqlite3.connect('evaluation.db') as db:
c = db.cursor()
# Find Existing username if any take proper action
find_user = ('SELECT * FROM registration WHERE username = ?')
c.execute(find_user, [(username.get())])
# else:
# ms.showinfo('Success!', 'Account Created Successfully !')
# to check mail
#regex = '^\w+([\.-]?\w+)*@\w+([\.-]?\w+)*(\.\w{2,3})+$'
regex='^[a-z0-9]+[\._]?[a-z0-9]+[@]\w+[.]\w{2,3}$'
if (re.search(regex, email)):
a = True
else:
a = False
# validation
if (fname.isdigit() or (fname == "")):
ms.showinfo("Message", "please enter valid name")
elif (addr == ""):
ms.showinfo("Message", "Please Enter Address")
elif (email == "") or (a == False):
ms.showinfo("Message", "Please Enter valid email")
elif((len(str(mobile)))<10 or len(str((mobile)))>10):
ms.showinfo("Message", "Please Enter 10 digit mobile number")
elif ((time > 100) or (time == 0)):
ms.showinfo("Message", "Please Enter valid age")
elif (c.fetchall()):
ms.showerror('Error!', 'Username Taken Try a Diffrent One.')
elif (pwd == ""):
ms.showinfo("Message", "Please Enter valid password")
elif (var == False):
ms.showinfo("Message", "Please Enter gender")
elif(pwd=="")or(password_check(pwd))!=True:
ms.showinfo("Message", "password must contain atleast 1 Uppercase letter,1 symbol,1 number")
elif (pwd != cnpwd):
ms.showinfo("Message", "Password Confirm password must be same")
else:
conn = sqlite3.connect('evaluation.db')
with conn:
cursor = conn.cursor()
cursor.execute(
'INSERT INTO registration(Fullname, address, username, Email, Phoneno, Gender, age , password) VALUES(?,?,?,?,?,?,?,?)',
(fname, addr, un, email, mobile, gender, time, pwd))
conn.commit()
db.close()
ms.showinfo('Success!', 'Account Created Successfully !')
# window.destroy()
window.destroy()
#####################################################################################################################################################
#from subprocess import call
#call(["python", "lecture_login.py"])
# assign and define variable
# def login():
#####For background Image
image2 = Image.open('r7.jpg')
image2 = image2.resize((700, 700), Image.ANTIALIAS)
background_image = ImageTk.PhotoImage(image2)
background_label = tk.Label(window, image=background_image)
background_label.image = background_image
background_label.place(x=0, y=0) # , relwidth=1, relheight=1)
l1 = tk.Label(window, text="Registration Form", font=("Times new roman", 30, "bold"), bg="#306EFF", fg="white")
l1.place(x=190, y=50)
# that is for label1 registration
l2 = tk.Label(window, text="Full Name :", width=12, font=("Times new roman", 15, "bold"), bg="snow")
l2.place(x=130, y=150)
t1 = tk.Entry(window, textvar=Fullname, width=20, font=('', 15))
t1.place(x=330, y=150)
# that is for label 2 (full name)
l3 = tk.Label(window, text="Address :", width=12, font=("Times new roman", 15, "bold"), bg="snow")
l3.place(x=130, y=200)
t2 = tk.Entry(window, textvar=address, width=20, font=('', 15))
t2.place(x=330, y=200)
# that is for label 3(address)
# that is for label 4(blood group)
l5 = tk.Label(window, text="E-mail :", width=12, font=("Times new roman", 15, "bold"), bg="snow")
l5.place(x=130, y=250)
t4 = tk.Entry(window, textvar=Email, width=20, font=('', 15))
t4.place(x=330, y=250)
# that is for email address
l6 = tk.Label(window, text="Phone number :", width=12, font=("Times new roman", 15, "bold"), bg="snow")
l6.place(x=130, y=300)
t5 = tk.Entry(window, textvar=Phoneno, width=20, font=('', 15))
t5.place(x=330, y=300)
# phone number
l7 = tk.Label(window, text="Gender :", width=12, font=("Times new roman", 15, "bold"), bg="snow")
l7.place(x=130, y=350)
# gender
tk.Radiobutton(window, text="Male", padx=5, width=5, bg="snow", font=("bold", 15), variable=var, value=1).place(x=330,
y=350)
tk.Radiobutton(window, text="Female", padx=20, width=4, bg="snow", font=("bold", 15), variable=var, value=2).place(
x=440, y=350)
l8 = tk.Label(window, text="Age :", width=12, font=("Times new roman", 15, "bold"), bg="snow")
l8.place(x=130, y=400)
t6 = tk.Entry(window, textvar=age, width=20, font=('', 15))
t6.place(x=330, y=400)
l4 = tk.Label(window, text="User Name :", width=12, font=("Times new roman", 15, "bold"), bg="snow")
l4.place(x=130, y=450)
t3 = tk.Entry(window, textvar=username, width=20, font=('', 15))
t3.place(x=330, y=450)
l9 = tk.Label(window, text="Password :", width=12, font=("Times new roman", 15, "bold"), bg="snow")
l9.place(x=130, y=500)
t9 = tk.Entry(window, textvar=password, width=20, font=('', 15), show="*")
t9.place(x=330, y=500)
l10 = tk.Label(window, text="Confirm Password:", width=13, font=("Times new roman", 15, "bold"), bg="snow")
l10.place(x=130, y=550)
t10 = tk.Entry(window, textvar=password1, width=20, font=('', 15), show="*")
t10.place(x=330, y=550)
btn = tk.Button(window, text="Register", bg="#306EFF",font=("",20),fg="white", width=9, height=1, command=insert)
btn.place(x=260, y=620)
# tologin=tk.Button(window , text="Go To Login", bg ="dark green", fg = "white", width=15, height=2, command=login)
# tologin.place(x=330, y=600)
window.mainloop() | [
"resmmm99@gmail.com"
] | resmmm99@gmail.com |
4264a34b29a88e2d471136659bc04171310f792e | a943f3aa41e7f3b4bf8e10d4121fdfce99a207b9 | /CTCI/Arrays and Strings/One_Away.py | 037b711364cfa37141d5ae6cd1091de57e57c1ef | [] | no_license | noahjpark/Algorithms | 3160ebce01c833de94223fc9fd90432a879cdc02 | fb4159370f96dbe48af44186b9bbe0e577ca1ce7 | refs/heads/master | 2023-06-18T21:00:35.328470 | 2021-07-14T13:40:47 | 2021-07-14T13:40:47 | 218,333,384 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,157 | py | '''
Noah Park
There are three types of edits that can be performed on strings:
insert a character, remove a character, or replace a character.
Given two strings, write a function to check if they are one
edit (or zero edits) away.
'''
def oneAway(str1, str2):
if(len(str1) == len(str2)):
return checkEqualLength(str1, str2)
elif(len(str1) - len(str2) == 1):
return checkNonEqualLength(str2, str1)
elif(len(str2) - len(str1) == 1):
return checkNonEqualLength(str1, str2)
else:
return False
def checkNonEqualLength(str1, str2):
i = 0
j = 0
while i < len(str1) and j < len(str2):
if str1[i] != str2[j]:
if i != j:
return False
j += 1
else:
i += 1
j += 1
return True
def checkEqualLength(str1, str2):
numDifference = 0
for i in range(0, len(str1)):
if str1[i] != str2[i]:
numDifference += 1
return numDifference <= 1
def main():
str1 = "pale"
str2 = "ple"
str3 = "pales"
str4 = "bale"
str5 = "bake"
print(oneAway(str1, str2))
print(oneAway(str3, str1))
print(oneAway(str1, str4))
print(oneAway(str1, str5))
if __name__ == '__main__':
main()
| [
"noreply@github.com"
] | noreply@github.com |
179481b626143f7f8d7b3e9a868b7138a4363cbd | b3d1761947d0223388bf720e86517ebea2310269 | /thesite/admin.py | 7ffb6ccec56c45035487b700ea1d3c9843280062 | [] | no_license | brica1000/Hrimnir | 4b632c70900a0a88ff8a61731ac4c7e25414e0cc | 05db6189fd72ab9f151e2f500f9e0b3e6b7faa45 | refs/heads/master | 2020-06-12T23:03:19.462437 | 2017-07-22T12:42:41 | 2017-07-22T12:42:41 | 75,422,702 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 485 | py | from django.contrib import admin
from .models import Conglomerate, Product, Cert, Verification
class ProductAdmin(admin.ModelAdmin):
list_display = ('name', 'approved_edit') # Where does this understand the connection with Product?
class ConglomerateAdmin(admin.ModelAdmin):
list_display = ('name', 'approved_edit')
admin.site.register(Conglomerate, ConglomerateAdmin)
admin.site.register(Product, ProductAdmin)
admin.site.register(Cert)
admin.site.register(Verification)
| [
"brica1000@msn.com"
] | brica1000@msn.com |
072478a3ee547f9f85be9b2c1e6a1cc17608e576 | 83ea820cb8902128ac5544565f07ef29e1ed8eae | /aiqiyi/lushi.py | 822393fc34b8ef4cbb2f2ce9ba1273f3f7ffaf4c | [] | no_license | LeoEatle/python_leetcode_practice | a48a17ba5d512bb7ecd141ab875d63993cc43d5b | 34b3e508be271aa5f52f8a3c34b0524747358978 | refs/heads/master | 2020-12-24T05:50:02.279914 | 2017-01-14T15:28:48 | 2017-01-14T15:28:48 | 73,447,100 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 633 | py | # coding=utf-8
def bag(n, c, w, v):
res = [[-1 for j in range(c + 1)] for i in range(n + 1)]
for j in range(c + 1):
res[0][j] = 0
for i in range(1, n + 1):
for j in range(1, c + 1):
res[i][j] = res[i - 1][j]
if j >= w[i - 1] and res[i][j] < res[i - 1][j - w[i - 1]] + v[i - 1]:
res[i][j] = res[i - 1][j - w[i - 1]] + v[i - 1]
return res
user = raw_input().split(" ")
n = int(user[1])
c = int(user[0])
w = []
v = []
for i in range(n):
card = raw_input().split(" ")
w.append(int(card[0]))
v.append(int(card[1]))
res = bag(n, c, w, v)
print(res[n][c]) | [
"liuyitao811@hotmail.com"
] | liuyitao811@hotmail.com |
7de2ec3667cf0defb17f51e3619a7db12f4d067a | 6440557df22018c670c39a606037d65ceec6e94a | /myjokes/myjokes/settings.py | 39e5fa2c3633300892fdb96bb881d9301f5e3a1b | [] | no_license | Imran-Gasanov/joke-project | 05dd15e4664b8dbcb13d3ea018f2053ba3d60887 | 327df2ac294d8d682204ed1fc66cacbcbc67a743 | refs/heads/master | 2023-05-05T18:55:09.619192 | 2021-05-28T14:15:32 | 2021-05-28T14:15:32 | 368,822,261 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,778 | py | """
Django settings for myjokes project.
Generated by 'django-admin startproject' using Django 3.2.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-%c=&r_v@+$@a%so4n13acn2l)y4mq7_s4jb!s#zyrbdvl+p6in'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'rest_auth',
'django.contrib.sites',
'allauth',
'allauth.account',
'allauth.socialaccount',
'rest_auth.registration',
'joke',
]
SITE_ID = 1
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'myjokes.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'myjokes.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': [
'rest_framework.authentication.TokenAuthentication',
],
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
],
}
| [
"imrangasanov1999@gmail.com"
] | imrangasanov1999@gmail.com |
a80cf6d1ddfc46a4bc219908bc8145a82db73edb | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_naturalism.py | 8c5e9015f19160ff616ae4d4cd686e3352b59c9f | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 442 | py |
#calss header
class _NATURALISM():
def __init__(self,):
self.name = "NATURALISM"
self.definitions = [u'showing people and experiences as they really are, instead of suggesting that they are better than they really are or representing them in a fixed style: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
90b1b6c60dc890fdf2ea41bef43d217f4dc941ad | a72c46e03d6ed9824e56da0f9dce36260bdec992 | /mmcv-0.4.4/tests/test_registry.py | f25d193a8d19f7de00cdd7bb5915206e4989d33b | [
"Apache-2.0"
] | permissive | SirPok/Mask-mmdetection | f1869343dc807cd677f89b88f6e3abc7f73ea01f | 36cb0cc5783546d091de29e22b6fd44aaf3a1cbf | refs/heads/master | 2023-03-04T03:30:41.044719 | 2021-02-17T19:33:41 | 2021-02-17T19:33:41 | 339,834,205 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,266 | py | import pytest
import mmcv
def test_registry():
reg_name = 'cat'
CATS = mmcv.Registry(reg_name)
assert CATS.name == reg_name
assert CATS.module_dict == {}
assert len(CATS) == 0
@CATS.register_module
class BritishShorthair:
pass
assert len(CATS) == 1
assert CATS.get('BritishShorthair') is BritishShorthair
class Munchkin:
pass
CATS.register_module(Munchkin)
assert len(CATS) == 2
assert CATS.get('Munchkin') is Munchkin
assert 'Munchkin' in CATS
with pytest.raises(KeyError):
CATS.register_module(Munchkin)
CATS.register_module(Munchkin, force=True)
assert len(CATS) == 2
with pytest.raises(KeyError):
@CATS.register_module
class BritishShorthair:
pass
@CATS.register_module(force=True)
class BritishShorthair:
pass
assert len(CATS) == 2
assert CATS.get('PersianCat') is None
assert 'PersianCat' not in CATS
# The order of dict keys are not preserved in python 3.5
assert repr(CATS) in [
"Registry(name=cat, items=['BritishShorthair', 'Munchkin'])",
"Registry(name=cat, items=['Munchkin', 'BritishShorthair'])"
]
# the registered module should be a class
with pytest.raises(TypeError):
CATS.register_module(0)
def test_build_from_cfg():
BACKBONES = mmcv.Registry('backbone')
@BACKBONES.register_module
class ResNet:
def __init__(self, depth, stages=4):
self.depth = depth
self.stages = stages
@BACKBONES.register_module
class ResNeXt:
def __init__(self, depth, stages=4):
self.depth = depth
self.stages = stages
cfg = dict(type='ResNet', depth=50)
model = mmcv.build_from_cfg(cfg, BACKBONES)
assert isinstance(model, ResNet)
assert model.depth == 50 and model.stages == 4
cfg = dict(type='ResNet', depth=50)
model = mmcv.build_from_cfg(cfg, BACKBONES, default_args={'stages': 3})
assert isinstance(model, ResNet)
assert model.depth == 50 and model.stages == 3
cfg = dict(type='ResNeXt', depth=50, stages=3)
model = mmcv.build_from_cfg(cfg, BACKBONES)
assert isinstance(model, ResNeXt)
assert model.depth == 50 and model.stages == 3
cfg = dict(type=ResNet, depth=50)
model = mmcv.build_from_cfg(cfg, BACKBONES)
assert isinstance(model, ResNet)
assert model.depth == 50 and model.stages == 4
# non-registered class
with pytest.raises(KeyError):
cfg = dict(type='VGG')
model = mmcv.build_from_cfg(cfg, BACKBONES)
# cfg['type'] should be a str or class
with pytest.raises(TypeError):
cfg = dict(type=1000)
model = mmcv.build_from_cfg(cfg, BACKBONES)
# cfg should contain the key "type"
with pytest.raises(TypeError):
cfg = dict(depth=50, stages=4)
model = mmcv.build_from_cfg(cfg, BACKBONES)
# incorrect registry type
with pytest.raises(TypeError):
dict(type='ResNet', depth=50)
model = mmcv.build_from_cfg(cfg, 'BACKBONES')
# incorrect default_args type
with pytest.raises(TypeError):
dict(type='ResNet', depth=50)
model = mmcv.build_from_cfg(cfg, BACKBONES, default_args=0)
| [
"s6030211031@phuket.psu.ac.th"
] | s6030211031@phuket.psu.ac.th |
6c903615d78a14c4d885bcb3dbc9ea600c0e8b51 | 5b1095dbf692f0f92fe0eb8ea4718977349c2138 | /3_MyNoDecreasingSort.py | f7cf3f3632d1c605d8e45fd6049bbf93900623de | [] | no_license | Maoxiaoying/MY_hogwarts_python-2- | d5a9fe8f9740d0dd1d4a6200b770880a694949c1 | b091e1ac735194f4d1ac68538cf45918530b7c0b | refs/heads/master | 2022-05-26T06:38:30.146063 | 2020-05-01T00:37:46 | 2020-05-01T00:37:46 | 258,985,277 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,066 | py | """
作业3:
使用列表推导式写下面这个算法题
给定一个按非递减顺序排序的整数数组 A,返回每个数字的平方组成的新数组,要求也按非递减顺序排序。
"""
#常规做法
#分析:平方后的数组呈先小后大趋势,定义一个i从头开始,一个j从尾开始,取较大值放进B中
from ModuleSort.mysort import bubble_sort #导入sort模块
A=[-4,-1,0,3,10] #定义目标排序列表
B=[] #定义暂存列表
i=0 #i从头开始
j=len(A)-1 #j从尾开始
while(i<=j): #直到i,j交叉结束
if(A[i]**2<A[j]**2): #谁大就把谁放进B,并且索引移动
B.append(A[j]**2)
j=j-1
else:
B.append(A[i]**2)
i=i+1
B.reverse()
print(B)
#列表法
#先用for循环和append进行求解
# B=[]
# for x in A:
# B.append(x**2)
# bubble_sort(B)
# print(B)
C=[x*x for x in A] #将A中所有元素平方,得到一个平方列表
bubble_sort(C) #对平方列表进行排序
print(C) #打印排序后的平方列表
| [
"1151293557@qq.com"
] | 1151293557@qq.com |
a8efa3d067fabf99c931a123bd34c575d677a2ea | 591ae6348e53bf797f21c7e3ed4a26fb975422c2 | /lightdataparser/parsing.py | 3b3a2f480872d98a810eb393918edf4615168416 | [
"MIT"
] | permissive | xenking/lightdataparser | 415d1f7cb27b68c874a96907a5da7e1202adfb6b | 299cc188dcfd40c5b1a7573ada2281e8cbdc065d | refs/heads/master | 2020-05-17T09:11:03.369273 | 2019-05-16T08:26:14 | 2019-05-16T08:26:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,951 | py | """
Функции конвертации и обработки данных
"""
import csv
import re
import json
from typing import Tuple, List
from xml.etree import ElementTree
from functools import singledispatch
from lightdataparser import config
from lightdataparser.datatype import DataNode, CsvObject, JsonObject, XmlObject
@singledispatch
def parse(file: object) -> Tuple[dict, list]:
"""
Общая управляемая функция для обработки данных
:param file: Объект с типом файла
:return: Кортеж из Заголовка и списка со строками
"""
print("Unsupported file type: {}".format(file))
return {}, []
@parse.register(CsvObject)
def _parse_csv(file) -> Tuple[dict, list]:
with open(file.path, 'r') as f:
data = list(csv.reader(f, delimiter=file.delimeter))
header = compose_header(data.pop(0))
return header, data
@parse.register(JsonObject)
def _parse_json(file) -> Tuple[dict, list]:
with open(file.path, 'r') as f:
try:
data = json.load(f, parse_int=str)
except json.JSONDecodeError as e:
print("Can't load json data in %s: %s" % (file.path, e))
return None
# Разделяем данные и заголовок
data = next(iter(data.values()))
nodes = list(next(iter(data)).keys())
header = compose_header(nodes)
data = [list(raw.values()) for raw in data]
return header, data
@parse.register(XmlObject)
def _parse_xml(file) -> Tuple[dict, list]:
root = ElementTree.parse(file.path).getroot()
# Ищем заголовки в группе объектов (objects -> object -> name)
nodes = [t.attrib[config.xml_node_header] for t in root.find(config.xml_node_group).findall(config.xml_node_item)]
data = []
# Добавляем значения в виде строк
for tag in root.findall(config.xml_node_group):
data.append(list(t.find(config.xml_node_data).text for t in tag.findall(config.xml_node_item)))
header = compose_header(nodes)
return header, data
# parse xml with regex
# re.match(r"<(\S+)(?:\s+(\w+)=\"(\w+)?\")?[^>]*>\s*(\S*)\s*<\/\1>",str, re.I)
def compose_header(header: list) -> dict:
"""
Разбирает заголовки вида NameIndex -> "Name":[0..Index]
:param header: Строка с заголовками
:return: Разобранный словарь с заголовками
"""
keys = set()
ids = dict()
for el in header:
match = re.match(r"([a-z]+)([0-9]+)", el, re.I)
if match:
key, index = match.groups()
if key not in keys:
keys.add(key)
ids[key] = []
ids[key].append(int(index))
return ids
def split_nodes(header: dict) -> Tuple[List[DataNode], List[Tuple[int, int]]]:
"""
Разбирает заголовок на структуры узлов
:param header: Заголовочный словарь
:return: Кортеж из Узлов и границы разбиения строки
"""
nodes = []
bounds = []
bound_offset = 0
for key, value in header.items():
nodes.append(DataNode(key, value))
bounds.append((bound_offset, bound_offset + len(value)))
bound_offset += len(value)
return nodes, bounds
def parse_nodes(nodes: list, bounds: list, data: list):
"""
Последовательно переносит данные из строк в массив узлов
Тут можно накинуть потоки
:param nodes: Список узлов
:param bounds: Границы разбиения для кажого узла
:param data: Список строк
"""
for raw in data:
for node, bound in zip(nodes, bounds):
node.append(raw[bound[0]:bound[1]])
| [
"xenkings@gmail.com"
] | xenkings@gmail.com |
80c838c33d1f7524627c025af19bf3861fc45233 | de419e535b782ce72d8808b31ab51b7e5893013f | /etf50-main.py | 7d437cce04666d3be84c2b664bf6a2b715148fb0 | [] | no_license | wyj0613/StockWeb | 24c57835504011884cc7e8fe27589a10ed612d46 | b8cc79194b3ed50b9d9c7a3a806fd8d091b797d7 | refs/heads/master | 2023-01-31T16:53:16.622827 | 2020-12-16T08:55:45 | 2020-12-16T08:55:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,617 | py | ## For more details, please see the example 9-8 in book of
# "Python Programming" by ChenChunHui.
# JamesYe 2019-9-10 Happy Teacher's Day
# This is main enterance of project.
# visit http://127.0.0.1:5000/, you can see the graphic
from flask import Flask
from flask import render_template
from flask import request
from flask import url_for
from flask import redirect
from chart_plot import Chart_Plot
import datetime
app = Flask(__name__)
chart = Chart_Plot("20200101","20201101")
context = {} # 字典
# 获取前1天或N天的日期,beforeOfDay=1:前1天;beforeOfDay=N:前N天
def getdate(beforeOfDay):
today = datetime.datetime.now()
# 计算偏移量
offset = datetime.timedelta(days=-beforeOfDay)
# 获取想要的日期的时间
re_date = (today + offset).strftime('%Y-%m-%d')
return re_date
def get_value():
graph_type=request.form.get("graph_type")
stock_id=request.form.get("stock_id")
stock_id2=request.form.get("stock_id2")
start_date=request.form.get("start_date")
end_date=request.form.get("end_date")
# 判断用户选择的日期,若没有选择开始日期,默认绘制最近一年的数据
start_date = getdate(365) if start_date == "" else start_date
end_date = getdate(1) if end_date == "" else end_date
#判断用户输入的股票id,如果为空,默认为000001
stock_id="000001" if stock_id=="" else stock_id
stock_id2="000002" if stock_id2=="" else stock_id2
period=request.form.get("period")
return {
"graph_type":graph_type,
"stock_id":stock_id,
"stock_id2":stock_id2,
"start_date":start_date,
"end_date":end_date,
"period":period
}
@app.route('/',methods=["POST","GET"])
def index():
context['graph'] = chart.twoline_graph()
return render_template("chars.html", title='Home', context=context)
@app.route('/search',methods=["POST","GET"])
def search():
#获取用户输入值
values = get_value()
print(values)
graph_type=values.get("graph_type")
#创建绘图对象
chart = Chart_Plot(start_date=values.get("start_date"), end_date=values.get("end_date"),stock1=values.get("stock_id"), stock2=values.get("stock_id2"))
if graph_type=="半年线图":
context['graph'] = chart.twoline_graph()
elif graph_type=="k线图":
context['graph'] = chart.candle_stick(values.get("period"))
elif graph_type=="高低点":
context['graph'] = chart.high_low()
return render_template("chars.html", title='Home', context=context)
if __name__ == '__main__':
app.run(debug=True)
| [
"52725224+Yourdaylight@users.noreply.github.com"
] | 52725224+Yourdaylight@users.noreply.github.com |
287b74a838cf6babd96618f2096a2deafef82b8d | 4625ba2aa432799bc25bacc5b9994c163693ad5b | /_profile.py | d069c2d6a3e75537790f3bdce1f77e43b56ee870 | [] | no_license | echo28OoO/bbs | f0ab0e35e93d1ad170ae000c89a5c3693a084b8f | 5c1824b4ff5275ee26a9451d6b741aa7265728a1 | refs/heads/master | 2023-05-01T08:16:35.013274 | 2019-09-05T16:27:03 | 2019-09-05T16:27:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 794 | py | import app
from routes.index import profile
import cProfile
from pstats import Stats
def profile_request(path, cookie, f):
a = app.configured_app()
pr = cProfile.Profile()
headers = {'Cookie': cookie}
with a.test_request_context(path, headers=headers):
pr.enable()
# r = f()
# assert type(r) == str, r
f()
pr.disable()
# pr.dump_stats('gua_profile.out')
# pr.create_stats()
# s = Stats(pr)
pr.create_stats()
s = Stats(pr).sort_stats('cumulative')
s.dump_stats('gua_profile.pstat')
s.print_stats('.*web19.*')
# s.print_callers()
if __name__ == '__main__':
path = '/profile'
cookie = 'session=eyJ1c2VyX2lkIjoyfQ.XWPM5g.3acfbweSNyouMk7BM91Zq-jdy-Y'
profile_request(path, cookie, profile)
| [
"50176865+siO-X@users.noreply.github.com"
] | 50176865+siO-X@users.noreply.github.com |
bf5dc29023067b377e9be2c8a51b47247ca9a81a | 5da5473ff3026165a47f98744bac82903cf008e0 | /packages/google-cloud-dialogflow-cx/samples/generated_samples/dialogflow_v3_generated_environments_deploy_flow_async.py | ade1585fbfce343c1ef8b1d490219dca511c0ddb | [
"Apache-2.0"
] | permissive | googleapis/google-cloud-python | ed61a5f03a476ab6053870f4da7bc5534e25558b | 93c4e63408c65129422f65217325f4e7d41f7edf | refs/heads/main | 2023-09-04T09:09:07.852632 | 2023-08-31T22:49:26 | 2023-08-31T22:49:26 | 16,316,451 | 2,792 | 917 | Apache-2.0 | 2023-09-14T21:45:18 | 2014-01-28T15:51:47 | Python | UTF-8 | Python | false | false | 1,997 | py | # -*- coding: utf-8 -*-
# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for DeployFlow
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-dialogflow-cx
# [START dialogflow_v3_generated_Environments_DeployFlow_async]
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google.cloud import dialogflowcx_v3
async def sample_deploy_flow():
# Create a client
client = dialogflowcx_v3.EnvironmentsAsyncClient()
# Initialize request argument(s)
request = dialogflowcx_v3.DeployFlowRequest(
environment="environment_value",
flow_version="flow_version_value",
)
# Make the request
operation = client.deploy_flow(request=request)
print("Waiting for operation to complete...")
response = (await operation).result()
# Handle the response
print(response)
# [END dialogflow_v3_generated_Environments_DeployFlow_async]
| [
"noreply@github.com"
] | noreply@github.com |
f3fb7e68e6ab6946084708c29c3acc4018c11d41 | 5aed34c47e330fbdb04f2bbc8a3bd8dcb02421ba | /3.MilestoneProject-2/BlackJack/package/sub_modules/EvaluateResult.py | 4a910d625ea352e9967ed7db7ff93d546df88fa7 | [] | no_license | rraghu214/Complete-Python-3-Bootcamp | 5535cab7c39211051e99fc88a5ecac792d5067f5 | 2212590a2f8ff7303be8ab79220f33e1b61d89c1 | refs/heads/master | 2022-11-28T14:46:05.634348 | 2020-08-09T10:10:24 | 2020-08-09T10:10:24 | 266,571,998 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,075 | py | #import
class Evaluate():
def __init__(self):
pass
def full_eval(self,p_score,d_score,p_iter,d_iter):
'''
This is used during the initial evaluation of cards.
'''
if p_score == d_score == 21:
return 'PUSH'
elif p_score == 21 and d_score !=21 and p_iter == 1:
return 'PLAYER BLACKJACK'
elif p_score !=21 and d_score == 21 and d_iter == 1:
return 'DEALER BLACKJACK'
elif (p_score <=21 and d_score < p_score and d_score >= 17) or d_score > 21:
return 'PLAYER WINS'
elif (d_score <=21 and d_score > p_score and d_score >= 17) or p_score > 21:
#elif p_score > 21 and d_score > p_score and d_score >= 17:
return 'PLAYER BUST'
elif p_score == d_score:
return 'PUSH'
else:
return 'CONTINUE'
def dealer_eval(self,dealer_score):
'''
Check if the dealer is still eligible to draw the cards until he reaches a score of 17.
A partial result is returned.
'''
if dealer_eval>=17:
return 'BREAK'
else:
return 'CONTINUE'
def player_eval(self,player_score):
'''
Check if the player is still eligible to draw the cards. Player can continue only if the score is less then 21.
A partial result is returned.
Score = 21 --> WINS
Score > 21 --> BUST
'''
if player_score > 21:
return 'BUST'
elif player_score == 21:
return 'WINS'
else:
return 'CONTINUE'
if __name__ == '__main__':
player_eval = Evaluate()
player_score = 19
#player_res = player_eval.initial_eval(player_score)
#print(player_res)
#player_res = player_eval.evaluate_result(is_player=True,player_score=player_score)
#print(player_res)
dealer_eval = Evaluate()
dealer_score = 22
#dealer_res=dealer_eval.evaluate_result(is_player=False,player_score=player_score,dealer_score=dealer_score)
#print(dealer_res) | [
"rraghu214@outlook.com"
] | rraghu214@outlook.com |
79afdf13c61a200d338ede0d864a956c63fabe3f | ce76b3ef70b885d7c354b6ddb8447d111548e0f1 | /little_person_or_time/year/be_long_woman/part_and_thing/same_fact.py | 4a49f93faba8dbcc80ef785b249febee2ff44e24 | [] | no_license | JingkaiTang/github-play | 9bdca4115eee94a7b5e4ae9d3d6052514729ff21 | 51b550425a91a97480714fe9bc63cb5112f6f729 | refs/heads/master | 2021-01-20T20:18:21.249162 | 2016-08-19T07:20:12 | 2016-08-19T07:20:12 | 60,834,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 226 | py |
#! /usr/bin/env python
def public_company(str_arg):
life_or_long_week(str_arg)
print('tell_part')
def life_or_long_week(str_arg):
print(str_arg)
if __name__ == '__main__':
public_company('want_next_thing')
| [
"jingkaitang@gmail.com"
] | jingkaitang@gmail.com |
21dc338240315e8019b82bc2685fd02ffe26bea0 | 4589f8036582d6acc2286c773ae0d5997d8d01d8 | /app/tests.py | 66b5c78d92fde4f8fdbe163aef124a0a6e4bf7d9 | [] | no_license | pgossman/chore-manager | c35c13933f6cabf84f7aa34621e296d62cc3ee80 | a63ae4139874f3d4993fb3fc32e0eceb0da8a085 | refs/heads/master | 2023-05-07T03:24:21.054664 | 2021-05-23T22:07:51 | 2021-05-23T22:07:51 | 289,840,963 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,411 | py | from typing import Callable
from django.test import Client, TestCase
from django.contrib.auth.models import User
from django.utils import timezone
from .models import (
Chore,
ChoreAssignment,
ChoreInstance,
DayOfWeek,
PartOfDay,
ChoreStatus,
)
class InstanceProcessTests(TestCase):
USER1_NAME = "johnny"
USER1_PASS = "abc123"
USER2_NAME = "grace"
USER2_PASS = "def123987"
def setUp(self):
self.client = Client()
self.user1 = User.objects.create_user(
username=self.USER1_NAME,
email=f"{self.USER1_NAME}@rocket.gov",
password=self.USER1_PASS,
)
self.user2 = User.objects.create_user(
username=self.USER2_NAME,
email=f"{self.USER2_NAME}@rocket.gov",
password=self.USER2_PASS,
)
def _login_user1(self) -> None:
assert self.client.login(username=self.USER1_NAME, password=self.USER1_PASS)
def _login_user2(self) -> None:
assert self.client.login(username=self.USER2_NAME, password=self.USER2_PASS)
def _create_chore(self, name: str) -> Chore:
return Chore.objects.create(name=name)
def _create_assignment(
self, user: User, chore: Chore, dow: DayOfWeek, time: PartOfDay
) -> ChoreAssignment:
assignment = ChoreAssignment.objects.create(chore=chore, dow=dow, time=time,)
assignment.users.add(user)
assignment.save()
return assignment
def _create_instance(
self, user: User, assignment: ChoreAssignment
) -> ChoreInstance:
return ChoreInstance.objects.create(
user=user,
assignment=assignment,
status=ChoreStatus.ASSIGNED,
due_date=timezone.now(),
)
def _get_content(self, url: str) -> str:
response = self.client.get(url)
return response.content.decode("utf-8")
def test_view_assignments_on_homepage(self):
self._login_user1()
chore1_name = "sweep kitchen floors"
dow1 = DayOfWeek.MONDAY
time1 = PartOfDay.AFTERNOON
chore2_name = "dishes"
dow2 = DayOfWeek.TUESDAY
time2 = PartOfDay.EVENING
chore1 = self._create_chore(chore1_name)
chore2 = self._create_chore(chore2_name)
self._create_assignment(self.user1, chore1, dow1, time1)
self._create_assignment(self.user1, chore2, dow2, time2)
content = self._get_content("/")
self.assertTrue(f"{chore1_name} {dow1.label} {time1.label}" in content)
self.assertTrue(f"{chore2_name} {dow2.label} {time2.label}" in content)
def test_view_instances_on_homepage(self):
self._login_user1()
# TODO: add multiple instances
chore_name = "sweep kitchen floors"
dow = DayOfWeek.MONDAY
time = PartOfDay.AFTERNOON
chore = self._create_chore(chore_name)
assignment = self._create_assignment(self.user1, chore, dow, time)
# Creating an instance gives the user the option to submit
self.assertTrue("No chores available for submission" in self._get_content("/"))
instance = self._create_instance(self.user1, assignment)
self.assertFalse("No chores available for submission" in self._get_content("/"))
self.assertTrue("Submit a chore" in self._get_content("/"))
# New instance is marked as assigned
self.assertEqual(ChoreStatus.ASSIGNED, instance.status)
def test_submit_assigned_instance(self):
self._login_user1()
chore_name = "sweep kitchen floors"
dow = DayOfWeek.MONDAY
time = PartOfDay.AFTERNOON
chore = self._create_chore(chore_name)
assignment = self._create_assignment(self.user1, chore, dow, time)
instance = self._create_instance(self.user1, assignment)
self.assertEqual(ChoreStatus.ASSIGNED, instance.status)
instance_notes = "Hello sorry its a little late"
self.client.post(
"/instance/submit", {"instance": instance.id, "notes": instance_notes}
)
# Submitted chore is marked as submitted, notes are attached
instance = ChoreInstance.objects.filter(id=instance.id).get()
self.assertEqual(ChoreStatus.SUBMITTED, instance.status)
self.assertEqual(instance_notes, instance.notes)
def test_review_submitted_instance(self):
pass
| [
"me@paulgossman.com"
] | me@paulgossman.com |
dce33266677a3e01c6ff99c2c720c7dfc65d296c | d7d7873d0bea9185a252916e3599b33e301d394c | /setup.py | 8f0a378e044d453b35d69a16563e88fab08a6dcc | [] | no_license | KennethJHan/pip_test | b16a3248a50025075cc3db916d07ee9761cc9b9f | 89e957d7059e303e5b640a1f2e514c437b616c10 | refs/heads/main | 2023-01-12T02:00:35.976500 | 2020-11-18T07:36:52 | 2020-11-18T07:36:52 | 313,856,656 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 570 | py | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="gitandpip",
version="0.0.1",
author="kenneth joohyun han",
author_email="kenneth.jh.han@snu.ac.kr",
description="It's pip... with git.",
long_description=long_description,
url="https://github.com/KennethJHan/pip_test",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
| [
"kenneth.jh.han@gmail.com"
] | kenneth.jh.han@gmail.com |
9a608545f0f62d2b6aee9b76e96f8b7b1d1a2bbb | e3ba772036723637c55a548aa8fd0807151b4130 | /domain/mouse.py | dd8ce49f2e87b3c8acd150aca4bcaed9e17feaf7 | [
"MIT"
] | permissive | hesslink111/neuralnetworknibbles | d711527fcdb98826b89bb3a9630aa1b759872f89 | a902a845eddbb6ed43ab4f8b1f6ae71660cc1e21 | refs/heads/master | 2020-07-01T08:13:30.010892 | 2016-12-14T22:17:08 | 2016-12-14T22:17:08 | 74,090,023 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 687 | py | from domain.position import Position
class Mouse:
"""Domain class for representing a mouse in a game."""
def __init__(self, board, xorshift, position=None):
self.position = position
self.board = board
self.xorshift = xorshift
def randomize_position(self, hideprev=True):
if hideprev:
self.board.clear_piece_at(self.position)
self.position = Position(self.xorshift.randrange(0, 8), self.xorshift.randrange(0, 8))
while self.board.piece_at(self.position) == -1:
self.position = Position(self.xorshift.randrange(0, 8), self.xorshift.randrange(0, 8))
self.board.set_piece_at(self.position, 1)
| [
"hesslink111@gmail.com"
] | hesslink111@gmail.com |
f982e7aca4bbe4938af7ef1ab99c9793bdb6a151 | f2319ccd8ac62ee27330c28a199dda869ab4ce44 | /string/medium/0093_restore_ip_address.py | 231e6d5d492da5ef56f8462b67375938f3d542d6 | [] | no_license | mike-chesnokov/leetcode | 0159f9620cc2caa2310867df31cf0cc80ebad167 | fad2fefe333d405ac440a3e69ffd510f5a4184d4 | refs/heads/master | 2023-01-22T07:51:14.235744 | 2023-01-15T19:59:36 | 2023-01-15T19:59:36 | 177,213,912 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,519 | py | """
93. Restore IP Addresses
A valid IP address consists of exactly four integers separated by single dots.
Each integer is between 0 and 255 (inclusive) and cannot have leading zeros.
For example, "0.1.2.201" and "192.168.1.1" are valid IP addresses,
but "0.011.255.245", "192.168.1.312" and "192.168@1.1" are invalid IP addresses.
Given a string s containing only digits,
return all possible valid IP addresses that can be formed by inserting dots into s.
You are not allowed to reorder or remove any digits in s. You may return the valid IP addresses in any order.
Example 1:
Input: s = "25525511135"
Output: ["255.255.11.135","255.255.111.35"]
Example 2:
Input: s = "0000"
Output: ["0.0.0.0"]
Example 3:
Input: s = "101023"
Output: ["1.0.10.23","1.0.102.3","10.1.0.23","10.10.2.3","101.0.2.3"]
Constraints:
1 <= s.length <= 20
s consists of digits only.
"""
class Solution:
def __init__(self):
self.result = []
@staticmethod
def insert_dot(string: str, ind: int) -> str:
return string[:ind] + '.' + string[ind:]
@staticmethod
def get_ind_candidates(string: int) -> List[int]:
"""
Return candidates from right dot to the end of string
"""
string_len = len(string)
# find the most right dot position
right_dot_ind = string.rfind('.')
# in case there is no dot
if right_dot_ind == -1:
start_ind = 1
else:
start_ind = right_dot_ind + 2
# print("start_ind = ", start_ind)
return [ind for ind in range(start_ind, string_len)]
@staticmethod
def is_valid(string: str) -> bool:
for value in string.split('.'):
if len(value) > 1 and value[0] == '0':
return False
if int(value) > 255:
return False
return True
def backtracking(self, string: str) -> List[str]:
"""
Bcktracking with dot position candidates
"""
# ip has 3 dots
if string.count('.') == 3 and self.is_valid(string):
self.result.append(string)
return
# print("string = ", string)
for ind in self.get_ind_candidates(string):
# print("ind = ", ind)
new_string = self.insert_dot(string, ind)
# print("new_string = ", new_string)
self.backtracking(new_string)
def restoreIpAddresses(self, s: str) -> List[str]:
self.backtracking(s)
return self.result
| [
"mchesnokov@uma.tech"
] | mchesnokov@uma.tech |
fcc6cc291f03c38e6c6f5ca02d7a95e7ee6d6f7d | 5739a5b4fc28447dedbc2de07ad2a086118f9e27 | /mysite/settings.py | b94b560b9d6e1a7b4c080d80e605095bb914b637 | [] | no_license | 2ez4salt/djangoblog | 07357bd2941e6a2e4b1afd0db14f4c55fe493395 | d48e04bbb130f5daa52be2fcbd9ea997e0e5a4fe | refs/heads/master | 2021-05-04T01:32:58.924732 | 2018-02-05T19:50:54 | 2018-02-05T19:50:54 | 120,357,599 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,118 | py | """
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 1.11.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'djs3bx!^q@g9g$8))=smq*4r--ez0)879h+*eygh5-6o66aqk5'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Europe/Istanbul'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
| [
"talhaslt@gmail.com"
] | talhaslt@gmail.com |
9f3a4c72756e26bb17b1fe4a87c755b5e04cd441 | ab174d6a1c5effdaab4a49015987c44909680792 | /p4/solve.py | cf24db71cb7964f30b8b21b561e3433d28b73124 | [] | no_license | carrdelling/AdventOfCode2018 | 2b26ed6cae8e48f473243e156d528b17fcb71584 | c42f29d684ca7fb1954c3c1d45031e837d8c818a | refs/heads/master | 2022-01-14T10:01:14.460444 | 2022-01-02T16:35:59 | 2022-01-02T16:35:59 | 160,434,068 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,569 | py | import datetime as dt
from collections import defaultdict, Counter
log = []
with open('input_data') as in_f:
for row in in_f:
timestamp, action = row.strip().split(']')
_time = dt.datetime.strptime(timestamp[1:], "%Y-%m-%d %H:%M")
log.append((_time, action.strip()))
log.sort()
guard_id = None
start = None
sleep_time = None
sum_sleep = defaultdict(int)
sleep_periods = defaultdict(list)
for _time, action in log:
if 'Guard' in action:
guard_id = action.split()[1]
start = None
if 'falls' in action:
start = _time
if 'wakes' in action:
sleep_time = int((_time - start).total_seconds() / 60.0)
start_minute = start.minute
sum_sleep[guard_id] += sleep_time
sleep_periods[guard_id].append([start_minute + i for i in range(sleep_time)])
lazy_guard = sorted(sum_sleep.items(), key=lambda x: -x[1])[0]
sleep_pattern = Counter(minute for night in sleep_periods[lazy_guard[0]] for minute in night)
quiet_minute = sleep_pattern.most_common(1)[0][0]
plan = int(lazy_guard[0][1:]) * quiet_minute
all_quiet_minutes = []
for guard, sleep_patterns in sleep_periods.items():
sleep_pattern = Counter(minute for night in sleep_patterns for minute in night)
quiet_minute, times = sleep_pattern.most_common(1)[0]
all_quiet_minutes.append((guard, quiet_minute, times))
laziest_guard, quiet_minute, zzz_times = sorted(all_quiet_minutes, key=lambda x: -x[2])[0]
second_plan = int(laziest_guard[1:]) * quiet_minute
print(f'P4-1: {plan}')
print(f'P4-2: {second_plan}')
| [
"carrdelling@gmail.com"
] | carrdelling@gmail.com |
b5430eddf82635dbf665e9178008d4abf5933bc3 | 1e0ed7e82ed271067510c30c003b9bb672bae000 | /apps/belt1_app/apps.py | 689162a090d07296391d6d3829fc79bf699d4572 | [] | no_license | harshi-agarwal/python_beltexam | 990a3fe5525cc505d98df5388754250ab3542cb6 | 5811d0178661651bcf655e810f6e6a0f98ed905b | refs/heads/master | 2021-01-12T16:04:03.874975 | 2016-10-25T19:18:14 | 2016-10-25T19:18:14 | 71,929,960 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 133 | py | from __future__ import unicode_literals
from django.apps import AppConfig
class Belt1AppConfig(AppConfig):
name = 'belt1_app'
| [
"harshita.agarwal211@gmail.com"
] | harshita.agarwal211@gmail.com |
3aa42dfe75937de11efba3950fb0301198e88b6a | 3d4fcc7cbfafc4aaebea8e08d3a084ed0f0d06a1 | /Programme_2/Creation_donnees/MIDI/bk_xmas4fMidiSimple.py | cc8ced8f4e27729a020952c8b025d1842f14d469 | [] | no_license | XgLsuLzRMy/Composition-Musicale-par-Reseau-de-Neurones | 0421d540efe2d9dc522346810f6237c5f24fa3bf | 518a6485e2ad44e8c7fbae93c94a9dc767454a83 | refs/heads/master | 2021-09-03T20:43:01.218089 | 2018-01-11T20:02:00 | 2018-01-11T20:02:00 | 106,448,584 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 151,641 | py | import midi
pattern=midi.Pattern(format=1, resolution=480, tracks=\
[midi.Track(\
[ midi.NoteOnEvent(tick=1200, channel=0, data=[67, 63]),
midi.NoteOnEvent(tick=230, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=10, channel=0, data=[67, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 69]),
midi.NoteOnEvent(tick=360, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 62]),
midi.NoteOnEvent(tick=120, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 69]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 63]),
midi.NoteOnEvent(tick=230, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=10, channel=0, data=[67, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 69]),
midi.NoteOnEvent(tick=360, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 62]),
midi.NoteOnEvent(tick=120, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 69]),
midi.NoteOnEvent(tick=480, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 63]),
midi.NoteOnEvent(tick=230, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=10, channel=0, data=[67, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 63]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 56]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 66]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 63]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 56]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 66]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 63]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 67]),
midi.NoteOnEvent(tick=960, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 41]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 50]),
midi.NoteOnEvent(tick=480, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[66, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 63]),
midi.NoteOnEvent(tick=480, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 67]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 63]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 70]),
midi.NoteOnEvent(tick=720, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 63]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 63]),
midi.NoteOnEvent(tick=360, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 59]),
midi.NoteOnEvent(tick=120, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 63]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 63]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 57]),
midi.NoteOnEvent(tick=360, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 51]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 57]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 66]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 63]),
midi.NoteOnEvent(tick=360, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 57]),
midi.NoteOnEvent(tick=120, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 61]),
midi.NoteOnEvent(tick=480, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 66]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 64]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 78]),
midi.NoteOnEvent(tick=360, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 66]),
midi.NoteOnEvent(tick=120, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 70]),
midi.NoteOnEvent(tick=480, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 70]),
midi.NoteOnEvent(tick=360, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[75, 59]),
midi.NoteOnEvent(tick=120, channel=0, data=[75, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 61]),
midi.NoteOnEvent(tick=480, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 51]),
midi.NoteOnEvent(tick=360, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 46]),
midi.NoteOnEvent(tick=120, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 52]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 53]),
midi.NoteOnEvent(tick=480, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 31]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 36]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 51]),
midi.NoteOnEvent(tick=1440, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 36]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=230, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=10, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 52]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 56]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 56]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 58]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 58]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 60]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 58]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 60]),
midi.NoteOnEvent(tick=240, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 60]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 60]),
midi.NoteOnEvent(tick=240, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[82, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[82, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[70, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[70, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[86, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[86, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 60]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 63]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 57]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 63]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 57]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 52]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[73, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[73, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 61]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 55]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 61]),
midi.NoteOnEvent(tick=360, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 56]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 61]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 57]),
midi.NoteOnEvent(tick=230, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=10, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 61]),
midi.NoteOnEvent(tick=360, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 56]),
midi.NoteOnEvent(tick=120, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 61]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 57]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 58]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 51]),
midi.NoteOnEvent(tick=480, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 55]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 51]),
midi.NoteOnEvent(tick=216, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[71, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 57]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 41]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 59]),
midi.NoteOnEvent(tick=720, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 49]),
midi.NoteOnEvent(tick=216, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[66, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=216, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[69, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 51]),
midi.NoteOnEvent(tick=216, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[69, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 41]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 59]),
midi.NoteOnEvent(tick=720, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=216, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[69, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 49]),
midi.NoteOnEvent(tick=216, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[67, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 49]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 56]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 61]),
midi.NoteOnEvent(tick=360, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 56]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 61]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 57]),
midi.NoteOnEvent(tick=216, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[62, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 61]),
midi.NoteOnEvent(tick=360, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 56]),
midi.NoteOnEvent(tick=120, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 61]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 57]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 58]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 51]),
midi.NoteOnEvent(tick=480, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 55]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 51]),
midi.NoteOnEvent(tick=216, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[71, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 57]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=216, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[69, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 50]),
midi.NoteOnEvent(tick=216, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[69, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 57]),
midi.NoteOnEvent(tick=720, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=216, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[69, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 61]),
midi.NoteOnEvent(tick=360, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 56]),
midi.NoteOnEvent(tick=108, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=12, channel=0, data=[64, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 61]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 64]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 71]),
midi.NoteOnEvent(tick=360, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 63]),
midi.NoteOnEvent(tick=108, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=12, channel=0, data=[67, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 71]),
midi.NoteOnEvent(tick=480, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 62]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 70]),
midi.NoteOnEvent(tick=480, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 52]),
midi.NoteOnEvent(tick=240, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 44]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[66, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=960, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 44]),
midi.NoteOnEvent(tick=480, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 49]),
midi.NoteOnEvent(tick=720, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 44]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 49]),
midi.NoteOnEvent(tick=960, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 44]),
midi.NoteOnEvent(tick=470, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=10, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 50]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[59, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 47]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 37]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 37]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 44]),
midi.NoteOnEvent(tick=1920, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=960, channel=0, data=[59, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 33]),
midi.NoteOnEvent(tick=720, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 39]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 37]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 39]),
midi.NoteOnEvent(tick=720, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 28]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 34]),
midi.NoteOnEvent(tick=960, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 35]),
midi.NoteOnEvent(tick=720, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 42]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 40]),
midi.NoteOnEvent(tick=480, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=720, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 40]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 39]),
midi.NoteOnEvent(tick=720, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 40]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 45]),
midi.NoteOnEvent(tick=720, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 55]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 59]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 49]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 69]),
midi.NoteOnEvent(tick=480, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 71]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 71]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 70]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 69]),
midi.NoteOnEvent(tick=240, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 67]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 67]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 67]),
midi.NoteOnEvent(tick=960, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 62]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 78]),
midi.NoteOnEvent(tick=960, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 78]),
midi.NoteOnEvent(tick=960, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 67]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 67]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 79]),
midi.NoteOnEvent(tick=720, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 70]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 63]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 75]),
midi.NoteOnEvent(tick=480, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 61]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 72]),
midi.NoteOnEvent(tick=480, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 78]),
midi.NoteOnEvent(tick=960, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 78]),
midi.NoteOnEvent(tick=960, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 67]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 67]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 79]),
midi.NoteOnEvent(tick=720, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 70]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 63]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 75]),
midi.NoteOnEvent(tick=480, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 61]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 72]),
midi.NoteOnEvent(tick=480, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 78]),
midi.NoteOnEvent(tick=960, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 72]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 85]),
midi.NoteOnEvent(tick=960, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 69]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 69]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 82]),
midi.NoteOnEvent(tick=960, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 69]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 69]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 82]),
midi.NoteOnEvent(tick=480, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 68]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 68]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 80]),
midi.NoteOnEvent(tick=480, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 75]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 75]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 89]),
midi.NoteOnEvent(tick=960, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 74]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 81]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 66]),
midi.NoteOnEvent(tick=480, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 66]),
midi.NoteOnEvent(tick=480, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 67]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 67]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 78]),
midi.NoteOnEvent(tick=1440, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 45]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 57]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 57]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 56]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 56]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 62]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 88]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[78, 65]),
midi.NoteOnEvent(tick=240, channel=0, data=[78, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 62]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 81]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 66]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 72]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 84]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 68]),
midi.NoteOnEvent(tick=240, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 72]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 79]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 65]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 69]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 81]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 63]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 72]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 60]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 79]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 66]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 78]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 63]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 81]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 81]),
midi.NoteOnEvent(tick=960, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 72]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 67]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=720, channel=0, data=[64, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 45]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 53]),
midi.NoteOnEvent(tick=720, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 41]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 53]),
midi.NoteOnEvent(tick=480, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 55]),
midi.NoteOnEvent(tick=720, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 55]),
midi.NoteOnEvent(tick=960, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 58]),
midi.NoteOnEvent(tick=720, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 58]),
midi.NoteOnEvent(tick=480, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 56]),
midi.NoteOnEvent(tick=480, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 61]),
midi.NoteOnEvent(tick=720, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 60]),
midi.NoteOnEvent(tick=240, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 64]),
midi.NoteOnEvent(tick=960, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 62]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 88]),
midi.NoteOnEvent(tick=480, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 79]),
midi.NoteOnEvent(tick=480, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 72]),
midi.NoteOnEvent(tick=0, channel=0, data=[81, 84]),
midi.NoteOnEvent(tick=480, channel=0, data=[81, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[83, 79]),
midi.NoteOnEvent(tick=480, channel=0, data=[83, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 69]),
midi.NoteOnEvent(tick=0, channel=0, data=[84, 81]),
midi.NoteOnEvent(tick=480, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 78]),
midi.NoteOnEvent(tick=480, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 62]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 62]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 79]),
midi.NoteOnEvent(tick=480, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[77, 78]),
midi.NoteOnEvent(tick=480, channel=0, data=[77, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 64]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 64]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 77]),
midi.NoteOnEvent(tick=960, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 61]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 61]),
midi.NoteOnEvent(tick=7, channel=0, data=[72, 61]),
midi.NoteOnEvent(tick=459, channel=0, data=[71, 55]),
midi.NoteOnEvent(tick=21, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=459, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=14, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 48]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 38]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 38]),
midi.NoteOnEvent(tick=960, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 38]),
midi.NoteOnEvent(tick=960, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 38]),
midi.NoteOnEvent(tick=960, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 38]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 41]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 41]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 41]),
midi.NoteOnEvent(tick=960, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 50]),
midi.NoteOnEvent(tick=960, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 60]),
midi.NoteOnEvent(tick=1440, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 60]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 70]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 70]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 70]),
midi.NoteOnEvent(tick=1440, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 59]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 59]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 59]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 59]),
midi.NoteOnEvent(tick=480, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 67]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 67]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 67]),
midi.NoteOnEvent(tick=1440, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 53]),
midi.NoteOnEvent(tick=480, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=720, channel=0, data=[52, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=960, channel=0, data=[84, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[84, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[88, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[88, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[91, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[91, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[88, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[91, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[96, 39]),
midi.NoteOnEvent(tick=1440, channel=0, data=[96, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[91, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[88, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 36]),
midi.NoteOnEvent(tick=951, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=9, channel=0, data=[64, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 36]),
midi.NoteOnEvent(tick=951, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=9, channel=0, data=[64, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 30]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 36]),
midi.NoteOnEvent(tick=1440, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 35]),
midi.NoteOnEvent(tick=1440, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[52, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 35]),
midi.NoteOnEvent(tick=1440, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 0]),
midi.EndOfTrackEvent(tick=0, data=[])]),
midi.Track(\
[ midi.NoteOnEvent(tick=1440, channel=0, data=[64, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 49]),
midi.NoteOnEvent(tick=360, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 52]),
midi.NoteOnEvent(tick=120, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 49]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[65, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 49]),
midi.NoteOnEvent(tick=360, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 52]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 49]),
midi.NoteOnEvent(tick=480, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 57]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 48]),
midi.NoteOnEvent(tick=960, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[63, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 37]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 41]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 35]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 35]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 41]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 42]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 47]),
midi.NoteOnEvent(tick=120, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 49]),
midi.NoteOnEvent(tick=120, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 51]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 41]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 50]),
midi.NoteOnEvent(tick=480, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 48]),
midi.NoteOnEvent(tick=360, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 44]),
midi.NoteOnEvent(tick=120, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 46]),
midi.NoteOnEvent(tick=480, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[61, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[61, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 44]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 51]),
midi.NoteOnEvent(tick=360, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 46]),
midi.NoteOnEvent(tick=120, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[74, 58]),
midi.NoteOnEvent(tick=360, channel=0, data=[74, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 46]),
midi.NoteOnEvent(tick=120, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 56]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[63, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 47]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 51]),
midi.NoteOnEvent(tick=360, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 40]),
midi.NoteOnEvent(tick=120, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 40]),
midi.NoteOnEvent(tick=480, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 31]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 29]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[45, 31]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[45, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 36]),
midi.NoteOnEvent(tick=480, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 36]),
midi.NoteOnEvent(tick=480, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 40]),
midi.NoteOnEvent(tick=480, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=1920, channel=0, data=[55, 40]),
midi.NoteOnEvent(tick=480, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 47]),
midi.NoteOnEvent(tick=360, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 31]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 46]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 60]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 47]),
midi.NoteOnEvent(tick=360, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 31]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 38]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 46]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 63]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 60]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 50]),
midi.NoteOnEvent(tick=480, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 62]),
midi.NoteOnEvent(tick=360, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 58]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 64]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[51, 60]),
midi.NoteOnEvent(tick=460, channel=0, data=[51, 0]),
midi.NoteOnEvent(tick=20, channel=0, data=[59, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 62]),
midi.NoteOnEvent(tick=360, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 58]),
midi.NoteOnEvent(tick=120, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 64]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 60]),
midi.NoteOnEvent(tick=460, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=20, channel=0, data=[55, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 62]),
midi.NoteOnEvent(tick=360, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 58]),
midi.NoteOnEvent(tick=120, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 64]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[51, 60]),
midi.NoteOnEvent(tick=460, channel=0, data=[51, 0]),
midi.NoteOnEvent(tick=20, channel=0, data=[59, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 62]),
midi.NoteOnEvent(tick=360, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 58]),
midi.NoteOnEvent(tick=120, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 64]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 60]),
midi.NoteOnEvent(tick=460, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=20, channel=0, data=[55, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 62]),
midi.NoteOnEvent(tick=360, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 58]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 64]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 58]),
midi.NoteOnEvent(tick=480, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 62]),
midi.NoteOnEvent(tick=360, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 58]),
midi.NoteOnEvent(tick=120, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 64]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 58]),
midi.NoteOnEvent(tick=480, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 59]),
midi.NoteOnEvent(tick=960, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 41]),
midi.NoteOnEvent(tick=480, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 39]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 41]),
midi.NoteOnEvent(tick=480, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 33]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 33]),
midi.NoteOnEvent(tick=480, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 41]),
midi.NoteOnEvent(tick=480, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 50]),
midi.NoteOnEvent(tick=480, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 51]),
midi.NoteOnEvent(tick=1440, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 56]),
midi.NoteOnEvent(tick=480, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[47, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[47, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[45, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[45, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 42]),
midi.NoteOnEvent(tick=360, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 47]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 42]),
midi.NoteOnEvent(tick=480, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 49]),
midi.NoteOnEvent(tick=230, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=17, channel=0, data=[50, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 42]),
midi.NoteOnEvent(tick=353, channel=0, data=[43, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 47]),
midi.NoteOnEvent(tick=7, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=113, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 51]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[48, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[47, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[47, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[45, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[45, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 47]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 55]),
midi.NoteOnEvent(tick=480, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[43, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 33]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=14, channel=0, data=[43, 42]),
midi.NoteOnEvent(tick=226, channel=0, data=[50, 30]),
midi.NoteOnEvent(tick=14, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=226, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=14, channel=0, data=[55, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=226, channel=0, data=[50, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 42]),
midi.NoteOnEvent(tick=360, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 47]),
midi.NoteOnEvent(tick=120, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 42]),
midi.NoteOnEvent(tick=480, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 49]),
midi.NoteOnEvent(tick=216, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=31, channel=0, data=[50, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 42]),
midi.NoteOnEvent(tick=353, channel=0, data=[43, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 47]),
midi.NoteOnEvent(tick=7, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=113, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 42]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 51]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[52, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[47, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[47, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[45, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[45, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 47]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 55]),
midi.NoteOnEvent(tick=480, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=216, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=24, channel=0, data=[67, 46]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 41]),
midi.NoteOnEvent(tick=720, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[57, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 41]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 51]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 51]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 60]),
midi.NoteOnEvent(tick=360, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 56]),
midi.NoteOnEvent(tick=108, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=12, channel=0, data=[48, 51]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 60]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[49, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 58]),
midi.NoteOnEvent(tick=480, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[49, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[49, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 70]),
midi.NoteOnEvent(tick=360, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[49, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[49, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 63]),
midi.NoteOnEvent(tick=108, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[49, 0]),
midi.NoteOnEvent(tick=12, channel=0, data=[49, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 70]),
midi.NoteOnEvent(tick=240, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[49, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 47]),
midi.NoteOnEvent(tick=120, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 51]),
midi.NoteOnEvent(tick=120, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 56]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=240, channel=0, data=[38, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 47]),
midi.NoteOnEvent(tick=480, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[54, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[54, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 33]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 38]),
midi.NoteOnEvent(tick=480, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=960, channel=0, data=[43, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 33]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 33]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 33]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 57]),
midi.NoteOnEvent(tick=480, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 58]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 58]),
midi.NoteOnEvent(tick=240, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 57]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 57]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 52]),
midi.NoteOnEvent(tick=480, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 59]),
midi.NoteOnEvent(tick=480, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[60, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 63]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 79]),
midi.NoteOnEvent(tick=480, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 53]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 79]),
midi.NoteOnEvent(tick=480, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 70]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 78]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 66]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 70]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 66]),
midi.NoteOnEvent(tick=480, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 63]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 75]),
midi.NoteOnEvent(tick=480, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 53]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 79]),
midi.NoteOnEvent(tick=480, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 70]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 77]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 66]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 60]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 70]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 55]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 66]),
midi.NoteOnEvent(tick=480, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 63]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 76]),
midi.NoteOnEvent(tick=480, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 53]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[31, 79]),
midi.NoteOnEvent(tick=480, channel=0, data=[31, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 62]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 62]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 73]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 80]),
midi.NoteOnEvent(tick=480, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 66]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 77]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 61]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 61]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 73]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 61]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 61]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 72]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 82]),
midi.NoteOnEvent(tick=480, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 53]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[38, 72]),
midi.NoteOnEvent(tick=480, channel=0, data=[38, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 63]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 58]),
midi.NoteOnEvent(tick=480, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[31, 63]),
midi.NoteOnEvent(tick=480, channel=0, data=[31, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[55, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 53]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 53]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 40]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 53]),
midi.NoteOnEvent(tick=480, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[59, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 44]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 51]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 56]),
midi.NoteOnEvent(tick=720, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 45]),
midi.NoteOnEvent(tick=240, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 56]),
midi.NoteOnEvent(tick=960, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 59]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 59]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 45]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 55]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 59]),
midi.NoteOnEvent(tick=480, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 57]),
midi.NoteOnEvent(tick=480, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 60]),
midi.NoteOnEvent(tick=720, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 59]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 50]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 60]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 69]),
midi.NoteOnEvent(tick=480, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[71, 64]),
midi.NoteOnEvent(tick=480, channel=0, data=[71, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 56]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 67]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 52]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 62]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 64]),
midi.NoteOnEvent(tick=960, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[49, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[58, 63]),
midi.NoteOnEvent(tick=480, channel=0, data=[58, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[49, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 49]),
midi.NoteOnEvent(tick=0, channel=0, data=[57, 58]),
midi.NoteOnEvent(tick=480, channel=0, data=[57, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[50, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 38]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[52, 45]),
midi.NoteOnEvent(tick=240, channel=0, data=[52, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 50]),
midi.NoteOnEvent(tick=240, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=720, channel=0, data=[67, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 48]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 41]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 42]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[68, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[68, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 44]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 44]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[69, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[69, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 48]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 48]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[64, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[66, 46]),
midi.NoteOnEvent(tick=480, channel=0, data=[66, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[63, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 47]),
midi.NoteOnEvent(tick=0, channel=0, data=[65, 47]),
midi.NoteOnEvent(tick=480, channel=0, data=[65, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[62, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 49]),
midi.NoteOnEvent(tick=480, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 58]),
midi.NoteOnEvent(tick=0, channel=0, data=[67, 69]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 56]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 67]),
midi.NoteOnEvent(tick=960, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[60, 64]),
midi.NoteOnEvent(tick=960, channel=0, data=[60, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 53]),
midi.NoteOnEvent(tick=0, channel=0, data=[53, 63]),
midi.NoteOnEvent(tick=960, channel=0, data=[53, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[41, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 46]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 37]),
midi.NoteOnEvent(tick=960, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 39]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 39]),
midi.NoteOnEvent(tick=960, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 33]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 36]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 37]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 38]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 39]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 40]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 41]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 44]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 46]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 47]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 48]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 49]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 42]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 33]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[36, 29]),
midi.NoteOnEvent(tick=228, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=12, channel=0, data=[36, 28]),
midi.NoteOnEvent(tick=240, channel=0, data=[36, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[40, 29]),
midi.NoteOnEvent(tick=240, channel=0, data=[40, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[43, 30]),
midi.NoteOnEvent(tick=240, channel=0, data=[43, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=960, channel=0, data=[67, 31]),
midi.NoteOnEvent(tick=240, channel=0, data=[67, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 33]),
midi.NoteOnEvent(tick=240, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[76, 34]),
midi.NoteOnEvent(tick=240, channel=0, data=[76, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 35]),
midi.NoteOnEvent(tick=240, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=720, channel=0, data=[72, 34]),
midi.NoteOnEvent(tick=0, channel=0, data=[79, 34]),
midi.NoteOnEvent(tick=1440, channel=0, data=[79, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[72, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[48, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 28]),
midi.NoteOnEvent(tick=951, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=9, channel=0, data=[48, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 28]),
midi.NoteOnEvent(tick=951, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=9, channel=0, data=[48, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 28]),
midi.NoteOnEvent(tick=1440, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[48, 24]),
midi.NoteOnEvent(tick=0, channel=0, data=[55, 28]),
midi.NoteOnEvent(tick=1440, channel=0, data=[55, 0]),
midi.NoteOnEvent(tick=0, channel=0, data=[48, 0]),
midi.NoteOnEvent(tick=480, channel=0, data=[36, 28]),
midi.NoteOnEvent(tick=1440, channel=0, data=[36, 0]),
midi.EndOfTrackEvent(tick=0, data=[])])])
midi.write_midifile("creationMidi.mid", pattern) | [
"jeremy.catelain@insa-rouen.fr"
] | jeremy.catelain@insa-rouen.fr |
f04b76cbb17426a78874440a2c7113515590e5f8 | 92de9445b33f2c0d596b5fc96c9b8558d205fec1 | /venv/Lib/site-packages/pandas/core/frame.pyi | ffe631cef03f8e776e53cec3880e320bd197bede | [] | no_license | bcstarke/spotify-ETL | 13967444cf2ff5fdc7b01d5b5a263c7ef2977a4f | a1fd6df6e7f4298bf421e17f6af5373ec3deddf9 | refs/heads/master | 2023-04-04T20:52:57.648203 | 2021-04-21T23:33:22 | 2021-04-21T23:33:22 | 353,499,809 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,238 | pyi | import sys
import numpy.ma as np
from pandas import datetime
from pandas._typing import Axes, Axis, Dtype as Dtype, FilePathOrBuffer, Level, Renamer, Column, Label, FrameOrSeries, \
ArrayLike, AnyArrayLike, GoogleCredentials, Scalar, ReplaceMethod, ToReplace, ReplaceValue, Frequency, AxisOption, \
Orientation, Function, AggregationFunction, GroupByObject, GeneralDuplicatesKeepStrategy, InterpolationMethod, \
CorrelationMethod, SortKind, JoinType, FillMethod, ErrorsStrategy, NaSortPosition, FillValue, TimestampMethod
from pandas.core.accessor import CachedAccessor
from pandas.core.base import PandasObject
from pandas.core.generic import NDFrame
from pandas.core.groupby import generic as groupby_generic
from pandas.core.groupby.grouper import Grouper
from pandas.core.indexes.api import Index
from pandas.core.series import Series
from pandas.io.formats import format as fmt
from pandas.io.formats.format import formatters_type, VALID_JUSTIFY_PARAMETERS, FloatFormatType
from pandas.io.formats.style import Styler
from typing import Any, Hashable, IO, Iterable, List, Optional, Sequence, Tuple, Union, Dict, Mapping, Type, \
overload, Iterator, Callable, AnyStr
# Literals have only been introduced in version 3.8
if sys.version_info >= (3, 8):
from typing import Literal
ExportOrientation = Literal['dict', 'list', 'series', 'split', 'records', 'index']
CompressionType = Literal['snappy', 'gzip', 'brotli']
IfExistStrategy = Literal['fail', 'replace', 'append']
ParquetEngine = Literal['auto', 'pyarrow', 'fastparquet']
DropTypes = Literal['any', 'all']
KeepStrategy = Literal['first', 'last', 'all']
UpdateJoinType = Literal['left']
UpdateErrorsStrategy = Literal['raise', 'ignore']
ApplyResultType = Literal['expand', 'reduce', 'broadcast']
MergeType = JoinType
MergeValidationMethod = Literal["one_to_one", "1:1", "one_to_many", "1:m", "many_to_one", "m:1", "many_to_many", "m:m"]
else:
ExportOrientation = str
CompressionType = str
IfExistStrategy = str
ParquetEngine = str
DropTypes = str
KeepStrategy = str
UpdateJoinType = str
UpdateErrorsStrategy = str
ApplyResultType = str
MergeType = str
MergeValidationMethod = str
IndexArray = Union[Series, Index, np.ndarray, Iterator]
CoercibleIntoDataFrame = Union[Dict[str, Scalar], Dict[str, Series], Dict[str, Tuple[Scalar, ...]], Dict[str, Iterable[Scalar]]]
TransformFunction = AggregationFunction
class DataFrame(NDFrame):
plot: CachedAccessor = ...
hist: Callable[..., Any] = ...
boxplot: Callable[..., Any] = ...
sparse: CachedAccessor = ...
def __init__(self, data: Any = ..., index: Optional[Axes[Any]] = ..., columns: Optional[Axes[Any]] = ..., dtype: Optional[Dtype] = ..., copy: bool = ...) -> None: ...
def __len__(self) -> int: ...
def __le__(self, other: Scalar) -> DataFrame: ...
def __lt__(self, other: Scalar) -> DataFrame: ...
def __ge__(self, other: Scalar) -> DataFrame: ...
def __gt__(self, other: Scalar) -> DataFrame: ...
def __mul__(self, other: Scalar) -> DataFrame: ...
def __truediv__(self, other: Union[Scalar, DataFrame]) -> DataFrame: ...
def __floordiv__(self, other: Union[Scalar, DataFrame]) -> DataFrame: ...
def __mod__(self, other: Union[Scalar, DataFrame]) -> DataFrame: ...
def __and__(self, other: DataFrame) -> DataFrame: ...
def __or__(self, other: DataFrame) -> DataFrame: ...
def __add__(self, other: Union[Scalar, DataFrame]) -> DataFrame: ...
def __sub__(self, other: Union[Scalar, DataFrame]) -> DataFrame: ...
def __matmul__(self, other: Union[FrameOrSeries, ArrayLike]) -> FrameOrSeries: ...
def __rmatmul__(self, other: Union[FrameOrSeries, ArrayLike]) -> FrameOrSeries: ...
# Type ignoring the first declaration because for some reason mypy thinks that np.ndarray overlaps Column
@overload
def __getitem__(self, key: Column) -> Series: ... # type: ignore
@overload
def __getitem__(self, key: Union[Series, Index, DataFrame, List[Column], slice, np.ndarray]) -> DataFrame: ...
@overload
def __setitem__(self, key: Column, value: Any) -> DataFrame: ...
@overload
def __setitem__(self, key: Union[Series, Index, DataFrame, List[Column], slice, np.ndarray], value: Any) -> DataFrame: ...
@property
def axes(self) -> List[Index]: ...
@property
def shape(self) -> Tuple[int, int]: ...
def to_string(self, buf: Optional[FilePathOrBuffer[str]] = ..., columns: Optional[Sequence[str]] = ..., col_space: Optional[int] = ..., header: Union[bool, Sequence[str]] = ..., index: bool = ..., na_rep: str = ..., formatters: Optional[fmt.formatters_type] = ..., float_format: Optional[fmt.float_format_type] = ..., sparsify: Optional[bool] = ..., index_names: bool = ..., justify: Optional[str] = ..., max_rows: Optional[int] = ..., min_rows: Optional[int] = ..., max_cols: Optional[int] = ..., show_dimensions: bool = ..., decimal: str = ..., line_width: Optional[int] = ..., max_colwidth: Optional[int] = ..., encoding: Optional[str] = ...) -> Optional[str]: ...
@property
def style(self) -> Styler: ...
def items(self) -> Iterable[Tuple[Label, Series]]: ...
def iteritems(self) -> Iterable[Tuple[Label, Series]]: ...
def iterrows(self) -> Iterable[Tuple[Label, Series]]: ...
# This isn't exact, first argument could(!) be an Index, the rest column values
def itertuples(self, index: bool = ..., name: str = ...) -> Iterable[Tuple[Any, ...]]: ...
def dot(self, other: Union[FrameOrSeries, ArrayLike]) -> FrameOrSeries: ...
@classmethod
def from_dict(cls: Any, data: Dict[str, Union[AnyArrayLike, Series, Dict[Column, Dtype]]], orient: Orientation = ..., dtype: Optional[Dtype] = ..., columns: Optional[Sequence[str]] = ...) -> DataFrame: ...
def to_numpy(self, dtype: Union[str, np.dtype] = ..., copy: bool = ...) -> np.ndarray: ...
def to_dict(self, orient: ExportOrientation = ..., into: Type[Mapping[Column, Any]] = ...) -> Union[Mapping[Column, Any], List[Any]]: ...
def to_gbq(self, destination_table: str, project_id: Optional[str] = ..., chunksize: Optional[int] = ..., reauth: bool = ..., if_exists: IfExistStrategy = ..., auth_local_webserver: bool = ..., table_schema: Optional[List[Dict[str, Any]]] = ..., location: Optional[str] = ..., progress_bar: bool = ..., credentials: Optional[GoogleCredentials] = ...) -> None: ...
@classmethod
def from_records(cls: Any, data: Union[np.ndarray, List[Tuple[Any, ...]], Dict[Any, Any], DataFrame], index: Union[Sequence[str], ArrayLike] = ..., exclude: Sequence[Column] = ..., columns: Sequence[Column] = ..., coerce_float: bool = ..., nrows: Optional[int] = ...) -> DataFrame: ...
def to_records(self, index: bool = ..., column_dtypes: Optional[Union[str, type, Dict[Column, Dtype]]] = ..., index_dtypes: Optional[Union[str, type, Dict[Column, Dtype]]] = ...) -> np.recarray: ...
def to_stata(self, path: FilePathOrBuffer[AnyStr], convert_dates: Optional[Dict[Label, str]] = ..., write_index: bool = ..., byteorder: Optional[str] = ..., time_stamp: Optional[datetime.datetime] = ..., data_label: Optional[str] = ..., variable_labels: Optional[Dict[Label, str]] = ..., version: int = ..., convert_strl: Optional[Sequence[Label]] = ...) -> None: ...
def to_feather(self, path: str) -> None: ...
def to_markdown(self, buf: Optional[IO[str]] = ..., mode: Optional[str] = ..., **kwargs: Any) -> Optional[str]: ...
def to_parquet(self, path: str, engine: ParquetEngine = ..., compression: Optional[CompressionType] = ..., index: Optional[bool] = ..., partition_cols: Optional[List[Column]] = ..., **kwargs: Any) -> None: ...
def to_html(self, buf: Optional[Any] = ..., columns: Optional[Sequence[str]] = ..., col_space: Optional[Union[str, int]] = ..., header: Union[bool, Sequence[str]] = ..., index: bool = ..., na_rep: str = ..., formatters: Optional[formatters_type] = ..., float_format: Optional[FloatFormatType] = ..., sparsify: Optional[bool] = ..., index_names: bool = ..., justify: Optional[VALID_JUSTIFY_PARAMETERS] = ..., max_rows: Optional[int] = ..., max_cols: Optional[int] = ..., show_dimensions: Union[bool, str] = ..., decimal: str = ..., bold_rows: bool = ..., classes: Optional[Sequence[str]] = ..., escape: bool = ..., notebook: bool = ..., border: Optional[int] = ..., table_id: Optional[str] = ..., render_links: bool = ..., encoding: Optional[str] = ...) -> str: ...
def info(self, verbose: Optional[bool] = ..., buf: Optional[IO[str]] = ..., max_cols: Optional[int] = ..., memory_usage: Optional[Union[bool, str]] = ..., null_counts: Optional[bool] = ...) -> None: ...
def memory_usage(self, index: Optional[bool] = ..., deep: Optional[bool] = ...) -> Series: ...
def transpose(self, *args: Any, copy: bool = ...) -> DataFrame: ...
@property
def T(self) -> DataFrame: ...
def query(self, expr: str, inplace: bool = ..., **kwargs: Any) -> Optional[DataFrame]: ...
def eval(self, expr: str, inplace: bool = ..., **kwargs: Any) -> Union[None, np.ndarray, int, float, PandasObject]: ...
def select_dtypes(self, include: Optional[Sequence[Union[str, Dtype]]] = ..., exclude: Optional[Sequence[Union[str, Dtype]]] = ...) -> DataFrame: ...
def insert(self, loc: int, column: Union[Column, Hashable], value: Union[int, Series, ArrayLike], allow_duplicates: Optional[bool] = ...) -> None: ...
def assign(self, **kwargs: Any) -> DataFrame: ...
def lookup(self, row_labels: Sequence[Any], col_labels: Sequence[Column]) -> np.ndarray: ...
def align(self, other: FrameOrSeries, join: JoinType = ..., axis: AxisOption = ..., level: Level = ..., copy: bool = ..., fill_value: Scalar = ..., method: Optional[FillMethod] = ..., limit: Optional[int] = ..., fill_axis: AxisOption = ..., broadcast_axis: AxisOption = ...) -> DataFrame: ...
def reindex(self, *args: Any, **kwargs: Any) -> DataFrame: ...
def drop(self, labels: Optional[Sequence[Label]] = ..., axis: AxisOption = ..., index: Optional[Sequence[Label]] = ..., columns: Optional[Sequence[Label]] = ..., level: Optional[Level] = ..., inplace: bool = ..., errors: ErrorsStrategy = ...) -> Optional[DataFrame]: ...
def rename(self, mapper: Optional[Renamer] = ..., *, index: Optional[Renamer] = ..., columns: Optional[Renamer] = ..., axis: Optional[Axis] = ..., copy: bool = ..., inplace: bool = ..., level: Optional[Level] = ..., errors: ErrorsStrategy = ...) -> Optional[DataFrame]: ...
def fillna(self, value: FillValue = ..., method: Optional[FillMethod] = ..., axis: Optional[Axis] = ..., inplace: Optional[bool] = ..., limit: int = ..., downcast: Optional[Dict[Any, Dtype]] = ...) -> Optional[DataFrame]: ...
def replace(self, to_replace: Optional[ToReplace] = ..., value: Optional[ReplaceValue] = ..., inplace: bool = ..., limit: Optional[int] = ..., regex: bool = ..., method: ReplaceMethod = ...) -> DataFrame: ...
def shift(self, periods: int = ..., freq: Optional[Frequency] = ..., axis: AxisOption = ..., fill_value: Scalar = ...) -> DataFrame: ...
def set_index(self, keys: Union[Label, IndexArray, List[Union[Label, IndexArray]]], drop: bool = ..., append: bool = ..., inplace: bool = ..., verify_integrity: bool = ...) -> Optional[DataFrame]: ...
def reset_index(self, level: Optional[Union[Hashable, Sequence[Hashable]]] = ..., drop: bool = ..., inplace: bool = ..., col_level: Hashable = ..., col_fill: Optional[Hashable] = ...) -> Optional[DataFrame]: ...
def isna(self) -> DataFrame: ...
def isnull(self) -> DataFrame: ...
def notna(self) -> DataFrame: ...
def notnull(self) -> DataFrame: ...
def dropna(self, axis: AxisOption = ..., how: DropTypes = ..., thresh: Optional[int] = ..., subset: Optional[Any] = ..., inplace: bool = ...) -> Optional[DataFrame]: ...
def drop_duplicates(self, subset: Optional[Union[Hashable, Sequence[Hashable]]] = ..., keep: GeneralDuplicatesKeepStrategy = ..., inplace: bool = ..., ignore_index: bool = ...) -> Optional[DataFrame]: ...
def duplicated(self, subset: Optional[Union[Hashable, Sequence[Hashable]]] = ..., keep: Union[str, bool] = ...) -> Series: ...
# Parent allowed by to be None - that's the reason for override
def sort_values(self, by: Union[str, List[str]], axis: AxisOption = ..., ascending: bool = ..., inplace: bool = ..., kind: SortKind = ..., na_position: NaSortPosition = ..., ignore_index: bool = ...) -> Optional[DataFrame]: ... # type: ignore[override]
def sort_index(self, axis: AxisOption = ..., level: Optional[Union[Level, List[Level]]] = ..., ascending: bool = ..., inplace: bool = ..., kind: SortKind = ..., na_position: NaSortPosition = ..., sort_remaining: bool = ..., ignore_index: bool = ...) -> Optional[DataFrame]: ...
def nlargest(self, n: int, columns: Union[Label, List[Label]], keep: KeepStrategy = ...) -> DataFrame: ...
def nsmallest(self, n: int, columns: Union[Label, List[Label]], keep: KeepStrategy = ...) -> DataFrame: ...
def swaplevel(self, i: Level = ..., j: Level = ..., axis: AxisOption = ...) -> DataFrame: ...
def reorder_levels(self, order: Union[List[int], List[str]], axis: AxisOption = ...) -> DataFrame: ...
def combine(self, other: DataFrame, func: Union[np.func, Callable[[Series, Series], Union[Series, Scalar]]], fill_value: Optional[Scalar] = ..., overwrite: bool = ...) -> DataFrame: ...
def combine_first(self, other: DataFrame) -> DataFrame: ...
def update(self, other: Union[DataFrame, CoercibleIntoDataFrame], join: UpdateJoinType = ..., overwrite: bool = ..., filter_func: Optional[Callable[..., bool]] = ..., errors: UpdateErrorsStrategy = ...) -> None: ...
def groupby(self, by: Optional[GroupByObject] = ..., axis: AxisOption = ..., level: Optional[Sequence[Level]] = ..., as_index: bool = ..., sort: bool = ..., group_keys: bool = ..., squeeze: bool = ..., observed: bool = ...) -> groupby_generic.DataFrameGroupBy: ...
def pivot(self, index: Optional[Union[Label, Sequence[Label]]] = ..., columns: Optional[Union[Label, Sequence[Label]]] = ..., values: Optional[Union[Label, Sequence[Label]]] = ...) -> DataFrame: ...
def pivot_table(self, values: Optional[Sequence[Column]] = ..., index: Optional[Union[Column, Grouper, np.ndarray, List[Union[Column, Grouper, np.ndarray]]]] = ..., columns: Optional[Union[Column, Grouper, np.ndarray, List[Union[Column, Grouper, np.ndarray]]]] = ..., aggfunc: AggregationFunction = ..., fill_value: Scalar = ..., margins: bool = ..., dropna: bool = ..., margins_name: str = ..., observed: bool = ...) -> DataFrame: ...
def stack(self, level: Union[Level, List[Level]] = ..., dropna: bool = ...) -> FrameOrSeries: ...
def explode(self, column: Union[Column, Tuple[Column, ...]]) -> DataFrame: ...
def unstack(self, level: Union[Level, List[Level]] = ..., fill_value: Optional[Scalar] = ...) -> FrameOrSeries: ...
def melt(self, id_vars: Optional[Union[Tuple[Column], List[Column], np.ndarray]] = ..., value_vars: Optional[Union[Sequence[Column], np.ndarray]] = ..., var_name: Optional[Scalar] = ..., value_name: Scalar = ..., col_level: Optional[Level] = ...) -> DataFrame: ...
def diff(self, periods: int = ..., axis: AxisOption = ...) -> DataFrame: ...
def aggregate(self, func: AggregationFunction, axis: AxisOption = ..., *args: Any, **kwargs: Any) -> Union[Scalar, FrameOrSeries]: ...
def agg(self, func: AggregationFunction, axis: AxisOption = ..., *args: Any, **kwargs: Any) -> Union[Scalar, FrameOrSeries]: ...
def transform(self, func: TransformFunction, axis: AxisOption = ..., *args: Any, **kwargs: Any) -> DataFrame: ...
def apply(self, func: Function, axis: AxisOption = ..., raw: bool = ..., result_type: Optional[ApplyResultType] = ..., args: Any = ..., **kwds: Any) -> FrameOrSeries: ...
def applymap(self, func: Callable[[Any], Any]) -> DataFrame: ...
def append(self, other: Union[FrameOrSeries, Dict[Column, Any], List[Union[FrameOrSeries, Dict[Column, Any]]]], ignore_index: bool = ..., verify_integrity: bool = ..., sort: bool = ...) -> DataFrame: ...
def join(self, other: Union[FrameOrSeries, List[DataFrame]], on: Optional[Union[str, List[str], ArrayLike]] = ..., how: JoinType = ..., lsuffix: str = ..., rsuffix: str = ..., sort: bool = ...) -> DataFrame: ...
def merge(self, right: FrameOrSeries, how: MergeType = ..., on: Optional[Union[Label, List[Label]]] = ..., left_on: Optional[Union[Label, List[Label], ArrayLike]] = ..., right_on: Optional[Union[Label, List[Label], ArrayLike]] = ..., left_index: bool = ..., right_index: bool = ..., sort: bool = ..., suffixes: Tuple[str, str] = ..., copy: bool = ..., indicator: Union[bool, str] = ..., validate: Optional[MergeValidationMethod] = ...) -> DataFrame: ...
def round(self, decimals: Union[int, Dict[Column, int], Series] = ..., *args: Any, **kwargs: Any) -> DataFrame: ...
def corr(self, method: CorrelationMethod = ..., min_periods: Optional[int] = ...) -> DataFrame: ...
def cov(self, min_periods: Optional[int] = ...) -> DataFrame: ...
def corrwith(self, other: FrameOrSeries, axis: AxisOption = ..., drop: bool = ..., method: CorrelationMethod = ...) -> Series: ...
def count(self, axis: AxisOption = ..., level: Optional[Level] = ..., numeric_only: bool = ...) -> FrameOrSeries: ...
def nunique(self, axis: AxisOption = ..., dropna: Optional[bool] = ...) -> Series: ...
def idxmin(self, axis: AxisOption = ..., skipna: Optional[bool] = ...) -> Series: ...
def idxmax(self, axis: AxisOption = ..., skipna: Optional[bool] = ...) -> Series: ...
def mode(self, axis: AxisOption = ..., numeric_only: bool = ..., dropna: Optional[bool] = ...) -> DataFrame: ...
def quantile(self, q: Union[float, ArrayLike] = ..., axis: AxisOption = ..., numeric_only: bool = ..., interpolation: InterpolationMethod = ...) -> FrameOrSeries: ...
def to_timestamp(self, freq: Optional[str] = ..., how: TimestampMethod = ..., axis: AxisOption = ..., copy: bool = ...) -> DataFrame: ...
def to_period(self, freq: Optional[str] = ..., axis: AxisOption = ..., copy: bool = ...) -> DataFrame: ...
def isin(self, values: Union[Sequence[Scalar], FrameOrSeries, Dict[Column, Scalar], np.ndarray]) -> DataFrame: ...
| [
"bstarke0225@gmail.com"
] | bstarke0225@gmail.com |
b10af2bd41f12df19680a59d837580264ddaf9bf | d60759fa4b28ff211fb47bff89f7875cc776ba64 | /great_lakes/notebooks/total_n_seqs.py | 89a52bf27b03a1311a646634a7c6f200d9d61eec | [] | no_license | CooperStansbury/rotation_project | 8e40c4ca9b296ad78efd58ca0b3fc4d7cc55f1d3 | 6685bb787fb541474f64551dc5fa96b9d89992ea | refs/heads/main | 2023-05-15T10:11:57.507313 | 2021-06-02T17:04:44 | 2021-06-02T17:04:44 | 335,704,476 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,541 | py |
"""
To count the total number of seqs per donor and cell type
"""
# %%
import sys
import os
import pandas
sys.path.append('../')
from get_distance_pairs import file_loader
# %%
ROOT_DIR = '/scratch/indikar_root/indikar1/shared_data/cstansbu_rotation/b_cell_data/'
for _dir in os.listdir(ROOT_DIR):
subdir_path = f"{ROOT_DIR}{_dir}"
if os.path.isdir(subdir_path):
print(f"{_dir} has {len(os.listdir(subdir_path))} files ")
else:
continue
"""
Output:
>>> D1-Na has 188 files
>>> outputs has 0 files
>>> D2-M has 188 files
>>> D2-N has 188 files
>>> test_data has 2 files
>>> D1-M has 188 files
>>> D3-N has 188 files
>>> D1-Nb has 188 files
>>> D3-M has 188 files
"""
# %%
for _dir in os.listdir(ROOT_DIR):
subdir_path = f"{ROOT_DIR}{_dir}"
subdir_count = 0
if os.path.isdir(subdir_path):
for file in os.listdir(subdir_path):
if not file.endswith('.tsv'):
continue
file_path = f"{subdir_path}/{file}"
df = file_loader.read_file(file_path, usecols=['nucleotide'])
subdir_count += df.shape[0]
print(f"{_dir} has {subdir_count} sequences")
"""
Output:
>>> D1-Na has 7995966 sequences
>>> outputs has 0 sequences
>>> D2-M has 8418156 sequences
>>> D2-N has 6006650 sequences
>>> D1-M_0_BRR_D1-M-001.adap.txt.results.tsv has 0 sequences
>>> test_data has 39656 sequences
>>> D1-M has 8223221 sequences
>>> D3-N has 8431449 sequences
>>> D1-Nb has 7400302 sequences
>>> D3-M has 9785485 sequences
"""
# %%
| [
"cstansbu@gl-login2.arc-ts.umich.edu"
] | cstansbu@gl-login2.arc-ts.umich.edu |
61aa6ac03dc078f592be45003537818b83f76714 | ecff48edc09a503923d9466262c67f656bcfa559 | /Stanford-Alg/Week 1/InversionCount.py | 38473f0633d87ec1b92077a681e1e1dd9d0e63f3 | [] | no_license | szywind/algorithm | d5c17ef6d73b86611032999ff4c0a9ce34a81ad9 | 98203a697cde769e18a38be1a8eee412546d634c | refs/heads/master | 2021-03-19T13:35:56.693515 | 2017-03-17T17:50:39 | 2017-03-17T17:50:39 | 70,362,823 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,475 | py | '''
Week 1's Programming Assignment
Divide and Conquer
'''
class CountInversion:
def __init__(self, array = []):
self.array = array
def read_data(self, fileName = "IntegerArray.txt"):
with open(fileName, 'r') as fl:
self.array = [int(i.strip()) for i in fl]
def mergeSortAndCount(self, start, end):
if end - start == 0:
return self.array[start: end+1], 0
mid = start + (end-start)/2
leftArray, countLeft = self.mergeSortAndCount(start, mid)
rightArray, countRight = self.mergeSortAndCount(mid+1, end)
ans = []
countSplit = 0
i = j = 0
while i < len(leftArray) and j < len(rightArray):
if leftArray[i] <= rightArray[j]:
ans.append(leftArray[i])
i += 1
else:
ans.append(rightArray[j])
j += 1
countSplit += len(leftArray) - i
if i < len(leftArray):
ans.extend(leftArray[i:])
else:
ans.extend(rightArray[j:])
return ans, countSplit + countLeft + countRight
def countInverse(self):
self.result = self.mergeSortAndCount(0, len(self.array)-1)[1]
print("Total inversion #: {0}".format(self.result))
if __name__ == '__main__':
input1 = [3,2,5,1,1,4,4]
# ci = InversionCount(input1)
ci = CountInversion()
ci.read_data()
ci.countInverse() # Total inversion #: 2407905288
| [
"szywind@163.com"
] | szywind@163.com |
228f735a222ca42202717414edd6aa7d3e266659 | bf1d46f0e8e42df1137875d4b72d34cb96f979f6 | /neuralnet/optimizers.py | 117a1385d3afaa9935410c60d38aec160842d339 | [] | no_license | FerdinandEiteneuer/NeuralNet | 6f0ee94d4551db8e9e129d13c137146f210df834 | f486ba328ffc5314ef023bb021293c6aaed20fea | refs/heads/master | 2023-04-07T08:49:15.543940 | 2021-04-10T14:25:55 | 2021-04-10T14:25:55 | 119,675,574 | 0 | 0 | null | 2021-02-03T00:07:32 | 2018-01-31T10:59:33 | Python | UTF-8 | Python | false | false | 4,190 | py | """
Optimizers for gradient descent (SGD and Nadam)
"""
import numpy as np
class SGD:
"""
Stochastic Gradient Descent
"""
def __init__(self, learning_rate=0.01, momentum=0, bias_correction=True):
self.lr = learning_rate
self.beta_1 = momentum
self.bias_correction = bias_correction
self.decay = 0.04
self.updates = 0
# momentum
self.mom_w = {}
self.mom_b = {}
self.network = None
def prepare_params(self, network):
"""
Initializes the momentum parameters.
"""
self.network = network
for layer in self.network.weight_layers():
self.mom_w[layer.layer_id] = np.zeros(layer.w.shape)
self.mom_b[layer.layer_id] = np.zeros(layer.b.shape)
return self
def __str__(self):
s = f'Optimizer: SGD(lr={self.lr}, momentum={self.beta_1}, bias_correction={self.bias_correction})'
return s
def update_weights(self):
μ = self.beta_1 # readability
for layer in self.network.weight_layers():
l = layer.layer_id
self.mom_w[l] = μ * self.mom_w[l] + (1 - μ) * layer.dw
self.mom_b[l] = μ * self.mom_b[l] + (1 - μ) * layer.db
if self.bias_correction:
correction = 1 - μ ** (1 + self.updates)
else:
correction = 1
layer.w -= self.lr * self.mom_w[l] / correction
layer.b -= self.lr * self.mom_b[l] / correction
for layer in self.network.batchnorm_layers():
layer.γ -= self.lr * layer.dγ
layer.β -= self.lr * layer.dβ
self.updates += 1
class Nadam:
"""
Nadam Optimizer. Combines nesterov, momentum, RMS prop step.
Algorithm from https://openreview.net/pdf?id=OM0jvwB8jIp57ZJjtNEZ
"""
def __init__(self, learning_rate=0.01, beta_1=0.9, beta_2=0.999, eps=10**(-8), bias_correction=True):
self.lr = learning_rate
self.beta_1 = beta_1
self.beta_2 = beta_2
self.eps = eps
self.bias_correction = bias_correction
self.updates = 1
self.network = None
#momenta for w and b
self.mom_w = {}
self.mom_b = {}
#rms prop momentum
self.rms_w = {}
self.rms_b = {}
def prepare_params(self, network):
"""
Initializes the momentum parameters.
"""
self.network = network
for layer in self.network.weight_layers():
l = layer.layer_id
self.mom_w[l] = np.zeros(layer.w.shape)
self.mom_b[l] = np.zeros(layer.b.shape)
self.rms_w[l] = np.zeros(layer.w.shape)
self.rms_b[l] = np.zeros(layer.b.shape)
return self
def __str__(self):
s = f'Optimizer: Nadam(lr={self.lr}, beta_1={self.beta_1}, beta_2={self.beta_2}, eps={self.eps})'
return s
def update_weights(self):
# set vars for readability
μ = self.beta_1
ν = self.beta_2
t = self.updates
ε = self.eps
for layer in self.network.weight_layers():
l = layer.layer_id
#momentum
self.mom_w[l] = μ * self.mom_w[l] + (1 - μ) * layer.dw
self.mom_b[l] = μ * self.mom_b[l] + (1 - μ) * layer.db
#RMS prop.
self.rms_w[l] = ν * self.rms_w[l] + (1 - ν) * layer.dw**2
self.rms_b[l] = ν * self.rms_b[l] + (1 - ν) * layer.db**2
#Nesterov
m_w = μ * self.mom_w[l] / (1 - μ**(t + 1)) \
+ (1 - μ) * layer.dw / (1 - μ**t)
m_b = μ * self.mom_b[l] / (1 - μ**(t + 1)) \
+ (1 - μ) * layer.db / (1 - μ**t)
n_w = ν * self.rms_w[l] / (1 - ν**t)
n_b = ν * self.rms_b[l] / (1 - ν**t)
#update parameters
layer.w -= self.lr * m_w / np.sqrt(n_w + ε)
layer.b -= self.lr * m_b / np.sqrt(n_b + ε)
for layer in self.network.batchnorm_layers():
layer.γ -= self.lr * layer.dγ
layer.β -= self.lr * layer.dβ
self.updates += 1
| [
"ferdinand.eiteneuer@rwth-aachen.de"
] | ferdinand.eiteneuer@rwth-aachen.de |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.