text stringlengths 38 1.54M |
|---|
from django.contrib.auth.mixins import LoginRequiredMixin
from django.views import generic
class HomeView(generic.TemplateView):
template_name = 'index.html'
class SignUpView(generic.TemplateView):
template_name = 'sign_up.html'
class LoginView(generic.TemplateView):
template_name = 'sign_in.html'
class LogoutView(generic.TemplateView):
template_name = 'logout.html'
class AlbumView(LoginRequiredMixin, generic.TemplateView):
login_url = '/login/'
template_name = 'albums.html'
class AlbumDetailView(LoginRequiredMixin, generic.TemplateView):
login_url = '/login/'
template_name = 'album_detail.html'
|
# usr/bin/env python3
# -*- coding:utf-8 -*-
__author__ = 'wangjianfeng'
import requests
import re
import time
import http.cookiejar as cookielib
from selenium import webdriver
from bs4 import BeautifulSoup
# 构造request headers
agent = 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36'
headers = {
'User-Agent': agent,
'Content-Type': 'text/plain;charset=UTF-8',
'Cache-Control': 'max-age=0',
'Accept': 'application/json, text/javascript, */*; q=0.01',
'Connection': 'Keep-Alive',
}
# 使用cookies信息登录
session = requests.session()
session.cookies = cookielib.LWPCookieJar(filename='cookies_unicom')
try:
session.cookies.load(ignore_discard=True)
except:
print('cookies未能加载')
class unicom_parse(object):
def __init__(self, username, password):
self.username, self.password = str(username), str(password)
print(self.username, self.password)
def Call_detail_parse(self):
post_url = 'http://iservice.10010.com/e3/static/query/callDetail?_=1479795143774&menuid=000100030001'
post_data = {
'pageNo': '1', 'pageSize': '20', 'beginDate': '2016-11-01', 'endDate': '2016-11-22'
}
# page_source=requests.get(url,cookies=cookies)
# page_source=requests.post(url,data=cookies)
# print(page_source.text)
def Is_login(self):
url = "http://iservice.10010.com/e3/static/check/checklogin/?_=1479963186816"
check_page = session.get(url, headers=headers, allow_redirects=False)
login_code = check_page.status_code
if login_code == 200:
return True
else:
return False
print(check_page)
def User_login(self):
post_url = "https://uac.10010.com/portal/Service/MallLogin?callback=jQuery17209332841114299033_" \
"1420279331097&redirectURL=http%3A%2F%2Fwww.10010.com&userName=" + self.username + "&password=" \
+ self.password + "&pwdType=01&productType=04&redirectType=01&rememberMe=1&areaCode=841" \
"&arrcity=%E8%A5%BF%E5%AE%89"
# post_data = {
# 'userName': '18665961559', 'userPwd': '066530'
# }
# login_page=session.post(post_url,data=post_data,headers=headers)
login_page = session.get(post_url, headers=headers)
login_code = login_page.text
print(login_page.status_code, login_code)
session.cookies.save()
self.Is_login() # 检查登陆成功否
if __name__ == '__main__':
ac = unicom_parse(18665961559, '066530')
ac.User_login()
# print(temp)
'https://uac.10010.com/oauth2/new_auth?display=wap&page_type=05&real_ip=106.39.79.162'
'http://iservice.10010.com/e3/static/check/checklogin/?_=1479789247653'
"https://uac.10010.com/portal/Service/MallLogin?callback=?"
'''
CommonConstants.LOGIN_URL = UacPrefix.PRXFIX_HTTPS_URL + "/portal/Service/MallLogin?callback=?";
CommonConstants.LOGIN_UNICOM_URL = UacPrefix.PRXFIX_HTTPS_URL + "/portal/Service/LoginUnicom?callback=?";
UacPrefix.PRXFIX_HTTPS_URL = "https://uac.10010.com"
检查对比4648
url: CommonConstants.LOGIN_URL + "?req_time=" + new Date().getTime()
CommonConstants.LOGIN_URL = "/oauth2/new_auth"
'https://uac.10010.com/oauth2/new_auth'+'?req_time='+
loginCommon.getLoginParas = function() {
var params = {};
params.app_code = $.query.get("app_code");
params.user_id = $("#userName").val().trim();
params.user_pwd = $("#userPwd").val().trim();
params.user_type = $("#userType").val();
params.pwd_type = $("#pwdType").val();
params.display = "web";
params.response_type = "code";
params.redirect_uri = $.query.get("redirect_uri");
params.is_check = "1";
if (loginCommon.isShowVerify == "0") {
params.verify_code = $("#verifyCode").val();
params.uvc = $("#uvc").val();
}
params.state = $.query.get("state");
return params;
}
'''
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Purchase Date Planned Update module for Odoo
# Copyright (C) 2015 Akretion (http://www.akretion.com)
# @author Alexis de Lattre <alexis.delattre@akretion.com>
# @author Sébastien Beau <sebastien.beau@akretion.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
class PurchaseOrderLine(orm.Model):
_inherit = 'purchase.order.line'
def write(self, cr, uid, ids, vals, context=None):
if vals.get('date_planned'):
if not isinstance(ids, list):
ids = [ids]
polines = self.browse(cr, uid, ids, context=context)
move_ids = []
for poline in polines:
# Add msg in chatter
poline.order_id.message_post(_(
"Updated Scheduled Date of line <b>%s</b> from %s "
"to <b>%s</b>"
% (poline.name, poline.date_planned,
vals['date_planned'])))
move_ids += [
sm.id for sm in poline.move_ids if sm.state != 'done']
if move_ids:
# update related stock move
self.pool['stock.move'].write(cr, SUPERUSER_ID, move_ids, {
'date_expected': vals['date_planned'],
}, context=context)
return super(PurchaseOrderLine, self).write(
cr, uid, ids, vals, context=context)
|
from django.db import models
from stdimage.models import StdImageField
# import uuid
# def get_file_path(_instance, filename):
# ext = filename.split('.')[-1]
# filename = f'{uuid.uuid4()}.{ext}'
# return filename
"""
Função para Criptografar o caminho das imagens...
trocar o upload_to='serviços/imagens por get_file_path'
"""
class Base(models.Model):
""" A classe BASE é uma class abstrata e não é criada no banco de addos
servirá apenas de rascunho para outras classes. """
# Só adiciona a Data na criação do objeto
DataCriacao = models.DateField(
verbose_name='Data de Criação', auto_now_add=True)
# Adiciona a data Em toda alteração
DataAlteracao = models.DateField(
verbose_name='Data de Alteração', auto_now=True)
Ativo = models.BooleanField(default=True, verbose_name='Ativo?')
class Meta:
abstract = True
class Categoria(Base):
NomeCategoria = models.CharField(max_length=50, verbose_name='Nome da Categoria')
class Meta:
verbose_name = ("Categoria")
verbose_name_plural = ("Categorias")
def __str__(self):
return self.NomeCategoria
class Servico(Base):
NomeServico = models.CharField(
max_length=35, verbose_name='Nome do Serviço')
DescricaoServico = models.CharField(
max_length=255, verbose_name='Descrição')
FkCategoria = models.ForeignKey(
'core.Categoria', verbose_name='Categoria', on_delete=models.DO_NOTHING)
ImageServico = StdImageField(
'Image', upload_to='servicos/images', variations={'thumb': {'width': 400, 'height': 400, 'crop': True}})
SlugServico = models.SlugField(
max_length=150, blank=True, editable=False, verbose_name='Slug')
Instagram = models.CharField(
max_length=255, null=True, blank=True, verbose_name='Instagram')
Facebook = models.CharField(
max_length=255, null=True, blank=True, verbose_name='Facebook')
class Meta:
verbose_name = ("Serviço")
verbose_name_plural = ("Serviços")
def __str__(self):
return self.NomeServico
|
from django.urls import path
from .views import index, list_of_recommendations
urlpatterns = [
path('', index),
path('recommend/Stuff', list_of_recommendations),
] |
from sage.categories.category_with_axiom import CategoryWithAxiom, all_axioms
from sage.misc.cachefunc import cached_method
class Magmas:
class GAP(CategoryWithAxiom):
class ElementMethods:
def _mul_(self, other):
r"""
Return the product of self by other
EXAMPLES::
sage: from mygap import mygap
sage: G = mygap.FreeGroup(3)
sage: f1, f2, f3 = G.group_generators()
sage: f1 * f3 * f2
f1*f3*f2
"""
return self.parent(self.gap() * other.gap()) # TODO; call directly the gap operation
class Unital:
class GAP(CategoryWithAxiom):
class ParentMethods:
def one(self):
return self(self.gap().One())
class ElementMethods:
def __invert__(self):
r"""
Return the inverse of this element.
EXAMPLES::
sage: from mygap import mygap
sage: G = mygap.FreeGroup("a")
sage: a, = G.group_generators()
sage: a.__invert__()
a^-1
sage: a^-1
a^-1
sage: ~a
a^-1
sage: ~a * a
<identity ...>
This also works when inverses are defined everywhere but for zero::
sage: F = mygap.FiniteField(3)
sage: a = F.one(); a
Z(3)^0
sage: ~a
Z(3)^0
sage: ~(a+a)
Z(3)
sage: a = F.zero()
sage: ~a
Traceback (most recent call last):
...
ValueError: 0*Z(3) is not invertible
.. WARN::
In other cases, GAP may return the inverse in
a larger domain without this being noticed by
Sage at this point::
sage: N = mygap.eval("Integers")
sage: x = N.one()
Probably acceptable::
sage: y = ~(x + x); y
1/2
Not acceptable::
sage: y.parent()
Integers
Should we have a category for the analogue of
MagmasWithInverseIfNonZero, and move this method
there?
"""
from sage.libs.gap.libgap import libgap
fail = libgap.eval("fail")
inverse = self.gap().Inverse()
if inverse == fail:
raise ValueError("%s is not invertible"%self)
return self.parent()(inverse)
|
# vim: set fileencoding=utf-8 :
"""
Test L{gbp.deb.changelog.ChangeLog}
"""
cl_debian = """git-buildpackage (0.5.32) unstable; urgency=low
* [efe9220] Use known_compressions in guess_upstream_version too
(Closes: #645477)
* [e984baf] git-import-orig: fix --filter
-- Guido Günther <agx@sigxcpu.org> Mon, 17 Oct 2011 10:15:22 +0200
git-buildpackage (0.5.31) unstable; urgency=low
[ Guido Günther ]
* [3588d88] Fix pristine-tar error message
* [8da98da] gbp-pq: don't fail on missing series file but create an empty
branch instead
[ Salvatore Bonaccorso ]
* [b33cf74] Fix URL to cl2vcs service.
Refer to https://honk.sigxcpu.org/cl2vcs instead of
https://honk.sigxcpu.org/cl2vcs for the cl2vcs service. (Closes: #640141)
-- Guido Günther <agx@sigxcpu.org> Wed, 28 Sep 2011 20:21:34 +0200
"""
cl_upstream="""python-dateutil (1.0-1) unstable; urgency=low
* Initial release (Closes: #386256)
-- Guido Günther <agx@sigxcpu.org> Wed, 6 Sep 2006 10:33:06 +0200
"""
cl_epoch="""xserver-xorg-video-nv (1:1.2.0-3) unstable; urgency=low
[ Steve Langasek ]
* Upload to unstable
-- David Nusinow <dnusinow@debian.org> Mon, 18 Sep 2006 19:57:45 -0400
"""
def test_parse_debian_only():
"""
Parse a the changelog of debian only package
Methods tested:
- L{gbp.deb.changelog.ChangeLog.__init__}
- L{gbp.deb.changelog.ChangeLog.is_native}
Properties tested:
- L{gbp.deb.changelog.ChangeLog.version}
- L{gbp.deb.changelog.ChangeLog.debian_version}
- L{gbp.deb.changelog.ChangeLog.upstream_version}
- L{gbp.deb.changelog.ChangeLog.epoch}
- L{gbp.deb.changelog.ChangeLog.noepoch}
>>> import gbp.deb.changelog
>>> cl = gbp.deb.changelog.ChangeLog(cl_debian)
>>> cl.version
'0.5.32'
>>> cl.version == cl['Version']
True
>>> cl.debian_version
'0.5.32'
>>> cl.debian_version == cl['Debian-Version']
True
>>> cl.noepoch
'0.5.32'
>>> cl.noepoch == cl['NoEpoch-Version']
True
>>> cl.epoch
>>> cl.upstream_version
>>> cl.is_native()
True
"""
def test_parse_no_eopch():
"""
Parse a the changelog of a package without eopch
Methods tested:
- L{gbp.deb.changelog.ChangeLog.__init__}
- L{gbp.deb.changelog.ChangeLog.has_epoch}
- L{gbp.deb.changelog.ChangeLog.is_native}
Properties tested:
- L{gbp.deb.changelog.ChangeLog.version}
- L{gbp.deb.changelog.ChangeLog.debian_version}
- L{gbp.deb.changelog.ChangeLog.upstream_version}
- L{gbp.deb.changelog.ChangeLog.epoch}
- L{gbp.deb.changelog.ChangeLog.noepoch}
>>> import gbp.deb.changelog
>>> cl = gbp.deb.changelog.ChangeLog(cl_upstream)
>>> cl.version
'1.0-1'
>>> cl.version == cl['Version']
True
>>> cl.debian_version
'1'
>>> cl.debian_version == cl['Debian-Version']
True
>>> cl.noepoch
'1.0-1'
>>> cl.noepoch == cl['NoEpoch-Version']
True
>>> cl.epoch
>>> cl.upstream_version
'1.0'
>>> cl.has_epoch()
False
>>> cl.is_native()
False
"""
def test_parse_eopch():
"""
Parse a the changelog of a package without epoch
Methods tested:
- L{gbp.deb.changelog.ChangeLog.__init__}
- L{gbp.deb.changelog.ChangeLog.has_epoch}
- L{gbp.deb.changelog.ChangeLog.is_native}
Properties tested:
- L{gbp.deb.changelog.ChangeLog.version}
- L{gbp.deb.changelog.ChangeLog.debian_version}
- L{gbp.deb.changelog.ChangeLog.upstream_version}
- L{gbp.deb.changelog.ChangeLog.epoch}
- L{gbp.deb.changelog.ChangeLog.noepoch}
>>> import gbp.deb.changelog
>>> cl = gbp.deb.changelog.ChangeLog(cl_epoch)
>>> cl.version
'1:1.2.0-3'
>>> cl.version == cl['Version']
True
>>> cl.debian_version
'3'
>>> cl.debian_version == cl['Debian-Version']
True
>>> cl.noepoch
'1.2.0-3'
>>> cl.noepoch == cl['NoEpoch-Version']
True
>>> cl.epoch
'1'
>>> cl.upstream_version
'1.2.0'
>>> cl.has_epoch()
True
>>> cl.is_native()
False
"""
def test_parse_name():
"""
Methods tested:
- L{gbp.deb.changelog.ChangeLog.__init__}
Properties tested:
- L{gbp.deb.changelog.ChangeLog.name}
>>> import gbp.deb.changelog
>>> cl = gbp.deb.changelog.ChangeLog(cl_debian)
>>> cl.name
'git-buildpackage'
"""
def test_parse_last_mod():
"""
Test author, email and date of last modification
Methods tested:
- L{gbp.deb.changelog.ChangeLog.__init__}
Properties tested:
- L{gbp.deb.changelog.ChangeLog.name}
- L{gbp.deb.changelog.ChangeLog.email}
- L{gbp.deb.changelog.ChangeLog.date}
>>> import gbp.deb.changelog
>>> cl = gbp.deb.changelog.ChangeLog(cl_debian)
>>> cl.author.startswith('Guido')
True
>>> cl.email
'agx@sigxcpu.org'
>>> cl.date
'Mon, 17 Oct 2011 10:15:22 +0200'
"""
def test_parse_sections():
"""
Test if we can parse sections out of the changelog
Methods tested:
- L{gbp.deb.changelog.ChangeLog.__init__}
- L{gbp.deb.changelog.ChangeLogSection.__init__}
- L{gbp.deb.changelog.ChangeLogSection.parse}
Properties tested:
- L{gbp.deb.changelog.ChangeLog.sections}
>>> import gbp.deb.changelog
>>> cl = gbp.deb.changelog.ChangeLog(cl_debian)
>>> cl.sections[0].package
'git-buildpackage'
>>> cl.sections[0].version
'0.5.32'
>>> cl.sections[1].package
'git-buildpackage'
>>> cl.sections[1].version
'0.5.31'
"""
|
"""Defines URL patterns for offers app."""
from django.urls import path
from . import views
##from django.conf import settings
##from django.conf.urls.static import static
app_name = 'offers'
urlpatterns = [
#Home page for offers app.
path('', views.index, name='index'),
#New_offer page for adding new offers.
path('new_offer/', views.new_offer, name='new_offer'),
path('available_to_me/', views.available_to_me, name='available_to_me'),
path('available_to_me/add_dib/', views.add_dib, name='add_dib'),
path('all_my_dibs/', views.all_my_dibs, name='all_my_dibs'),
path('dibs_on_my_stuff/', views.dibs_on_my_stuff, name='dibs_on_my_stuff'),
] |
def main():
t = int(input()) # read a line with a single integer
for i in range(1, t + 1):
r, c = map(int, input().split())
matrix = []
for j in range(r):
matrix.append(input())
print("Case #{}: {}".format(i, str(solve_problem(r, c, matrix))))
def solve_problem(r, c, matrix):
required_order = []
char_set = []
execution_order = ''
for i in range(c):
working_order = []
for j in range(r-1, -1, -1):
if matrix[j][i] not in char_set:
char_set.append(matrix[j][i])
if len(working_order) == 0:
working_order.append(matrix[j][i])
elif (matrix[j][i] != working_order[-1]) and (matrix[j][i] in working_order):
return -1
elif matrix[j][i] != working_order[-1]:
working_order.append(matrix[j][i])
required_order.append(working_order)
for k in range(len(char_set)):
target = list(filter(lambda x: sum([column.index(x) for column in required_order if x in column]) == 0, char_set))[0]
execution_order += target
char_set.remove(target)
for _column in required_order:
if target in _column:
_column.remove(target)
return execution_order
if __name__ == '__main__':
main()
|
#coding:utf-8
# 很差劲的sample
from atexit import register
from random import randrange
from threading import BoundedSemaphore,Lock,Thread
from time import ctime,sleep
lock = Lock()
MAX = 5
candytray = BoundedSemaphore(MAX)
def refill():
lock.acquire()
print 'Refilling candy ...'
try:
candytray.release()
except Exception, e:
print 'full, skipping'
else:
print 'Refill Successfully !'
lock.release()
def buy():
lock.acquire()
print 'Buying candy ...'
if candytray.acquire(False):
print 'You can buy candy...'
else:
print 'lack of stock, skipping'
lock.release()
def producer(loops):
for i in xrange(loops):
refill()
sleep(randrange(3))
def consumer(loops):
for i in xrange(loops):
buy()
sleep(randrange(3))
def _main():
print 'Starting at :',ctime()
nloops = randrange(2,6)
print 'nloops:',nloops
print 'The Candy Machine (full with %d bars)!' % MAX
Thread(target=consumer,args=(randrange(nloops,nloops+MAX+2,),)).start()
Thread(target=producer,args=(nloops,)).start()
@register
def _atexit():
print 'All Done at : ',ctime()
if __name__ == '__main__':
print candytray
_main() |
import itertools
import numpy as np
import pandas as pd
import scaa
import scipy.sparse as ss
import scipy.stats as st
import torch
def simulate_pois(n, p, rank, eta_max=None, holdout=None, seed=0):
np.random.seed(seed)
l = np.random.normal(size=(n, rank))
f = np.random.normal(size=(rank, p))
eta = l.dot(f)
if eta_max is not None:
# Scale the maximum value
eta *= eta_max / eta.max()
x = np.random.poisson(lam=np.exp(eta))
if holdout is not None:
mask = np.random.uniform(size=(n, p)) < holdout
x = np.ma.masked_array(x, mask=mask)
return x, eta
def training_score_oracle(x, eta):
return st.poisson(mu=np.exp(eta)).logpmf(x).sum()
def training_score_nmf(x, rank=10):
from wlra.nmf import nmf
return st.poisson(mu=nmf(x, rank)).logpmf(x).sum()
def training_score_nmf_kl(x, rank=10):
import sklearn.decomposition
m = sklearn.decomposition.NMF(n_components=rank, solver='mu', beta_loss=1).fit(x)
return st.poisson(mu=m.transform(x).dot(m.components_)).logpmf(x).sum()
def training_score_grad(x, rank):
import torch
import wlra.grad
with torch.autograd.set_grad_enabled(True):
m = (wlra.grad.PoissonFA(n_samples=x.shape[0], n_features=x.shape[1], n_components=rank)
.fit(x, atol=1e-3, max_epochs=10000))
return st.poisson(mu=np.exp(m.L.dot(m.F))).logpmf(x).sum()
def training_score_plra(x, rank):
import wlra
return st.poisson(mu=np.exp(wlra.plra(x, rank=rank, max_outer_iters=100, check_converged=True))).logpmf(x).sum()
def training_score_plra1(x, rank=10):
import wlra
lam = np.exp(wlra.plra(x, rank=rank))
return st.poisson(mu=lam).logpmf(x).sum()
def training_score_lda(x, rank=10, learning_method='online', batch_size=100, **kwargs):
import sklearn.decomposition
model = sklearn.decomposition.LatentDirichletAllocation(n_components=rank, learning_method=learning_method, batch_size=batch_size, **kwargs)
L = model.fit_transform(x)
F = model.components_
lam = (L / L.sum(axis=0)).dot(F)
return st.poisson(mu=lam).logpmf(x).sum()
def training_score_maptpx(x, rank=10, **kwargs):
import rpy2.robjects.packages
import rpy2.robjects.numpy2ri
rpy2.robjects.numpy2ri.activate()
maptpx = rpy2.robjects.packages.importr('maptpx')
res = maptpx.topics(x, K=rank, **kwargs)
L = np.array(res.rx2('omega'))
F = np.array(res.rx2('theta'))
return st.poisson(mu=x.sum(axis=1, keepdims=True) * L.dot(F.T)).logpmf(x).sum()
def training_score_hpf(x, rank=50, **kwargs):
try:
import tensorflow as tf
except ImportError:
return np.nan
import scHPF.preprocessing
import scHPF.train
import tempfile
with tempfile.TemporaryDirectory(prefix='/scratch/midway2/aksarkar/ideas/') as d:
tf.reset_default_graph()
# scHPF assumes genes x cells
scHPF.preprocessing.split_dataset_hpf(x.T, outdir=d)
# Set bp, dp as in scHPF.train
bp = x.sum(axis=1).mean() / x.sum(axis=1).var()
dp = x.sum(axis=0).mean() / x.sum(axis=0).var()
opt = scHPF.train.run_trials(
indir=d, outdir=d, prefix='',
nfactors=rank, a=0.3, ap=1, bp=bp, c=0.3, cp=1, dp=dp,
# This is broken when we call the API directly
logging_options={'log_phi': False})
L = np.load(f'{opt}/beta_shape.npy') / np.load(f'{opt}/beta_invrate.npy')
F = np.load(f'{opt}/theta_shape.npy') / np.load(f'{opt}/theta_invrate.npy')
# We assume cells x genes
return st.poisson(mu=F.dot(L.T)).logpmf(x).sum()
def training_score_scvi(train, **kwargs):
from scvi.dataset import GeneExpressionDataset
from scvi.inference import UnsupervisedTrainer
from scvi.models import VAE
data = GeneExpressionDataset(*GeneExpressionDataset.get_attributes_from_matrix(train))
vae = VAE(n_input=train.shape[1])
m = UnsupervisedTrainer(vae, data, verbose=False)
m.train(n_epochs=100)
# Training permuted the data for minibatching. Unpermute before "imputing"
# (estimating lambda)
lam = np.vstack([m.train_set.sequential().imputation(),
m.test_set.sequential().imputation()])
return st.poisson(mu=lam).logpmf(train).sum()
def training_score_zipvae(train, lr=1e-2, max_epochs=100, **kwargs):
import scaa
import torch
if not torch.cuda.is_available():
return np.nan
# scVI does not play nicely
with torch.autograd.set_grad_enabled(True):
training_data = get_data_loader(train, **kwargs)
with torch.cuda.device(0):
model = scaa.modules.ZIPVAE(train.shape[1], 10).fit(training_data, lr=lr, max_epochs=max_epochs)
lam = model.denoise(training_data)
return st.poisson(mu=lam).logpmf(train).sum()
def evaluate_training(rank=3, eta_max=2, num_trials=10):
result = []
for trial in range(num_trials):
x, eta = simulate_pois(n=200, p=300, rank=rank, eta_max=eta_max, seed=trial)
result.append([
trial,
training_score_oracle(x, eta),
training_score_nmf(x, rank),
training_score_grad(x, rank),
training_score_plra(x, rank),
training_score_plra1(x, rank)
])
result = pd.DataFrame(result)
result.columns = ['trial', 'Oracle', 'NMF', 'Grad', 'PLRA', 'PLRA1']
return result
def rmse(pred, true):
return np.sqrt(np.square(pred - true).mean())
def pois_loss(pred, true):
return (pred - true * np.log(pred + 1e-8)).mean()
losses = [rmse, pois_loss]
def loss(pred, true):
return [f(pred, true) for f in losses]
def imputation_score_mean(x):
"""Mean-impute the data"""
return loss(x.mean(), x.data[x.mask])
def imputation_score_nmf(x, rank):
try:
from wlra.nmf import nmf
res = nmf(x, rank, atol=1e-3)
return loss(res[x.mask], x.data[x.mask])
except RuntimeError:
return [np.nan for f in losses]
def imputation_score_plra1(x, rank):
try:
import wlra
res = np.exp(wlra.plra(x, rank=rank, max_outer_iters=1))
return loss(res[x.mask], x.data[x.mask])
except RuntimeError:
return [np.nan for f in losses]
def imputation_score_plra(x, rank):
try:
import wlra
res = np.exp(wlra.plra(x, rank=rank, max_outer_iters=100, check_converged=True))
return loss(res[x.mask], x.data[x.mask])
except RuntimeError:
return [np.nan for f in losses]
def evaluate_pois_imputation(rank=3, holdout=0.25, eta_max=None, num_trials=10):
result = []
for trial in range(num_trials):
x, eta = simulate_pois(n=200, p=300, rank=rank, eta_max=eta_max,
holdout=holdout, seed=trial)
result.append(list(itertools.chain.from_iterable(
[[trial],
imputation_score_mean(x),
imputation_score_nmf(x, rank),
imputation_score_plra(x, rank),
imputation_score_plra1(x, rank),
])))
result = pd.DataFrame(result)
result.columns = ['trial', 'rmse_mean', 'pois_loss_mean', 'rmse_nmf',
'pois_loss_nmf', 'rmse_plra', 'pois_loss_plra',
'rmse_plra1', 'pois_loss_plra1']
return result
def pois_llik(lam, train, test):
if ss.issparse(train):
raise NotImplementedError
else:
lam *= test.sum(axis=1, keepdims=True) / train.sum(axis=1, keepdims=True)
return st.poisson(mu=lam).logpmf(test).sum()
def train_test_split(x, p=0.5):
if ss.issparse(x):
data = np.random.binomial(n=x.data.astype(np.int), p=p, size=x.data.shape)
if ss.isspmatrix_csr(x):
train = ss.csr_matrix((data, x.indices, x.indptr), shape=x.shape)
elif ss.isspmatrix_csc(x):
train = ss.csc_matrix((data, x.indices, x.indptr), shape=x.shape)
else:
raise NotImplementedError('sparse matrix type not supported')
else:
train = np.random.binomial(n=x, p=p, size=x.shape)
test = x - train
return train, test
def generalization_score_oracle(train, test, eta):
return pois_llik(np.exp(eta), train, test)
def generalization_score_plra1(train, test, rank=10, **kwargs):
import wlra
lam = np.exp(wlra.plra(train, rank=rank))
return pois_llik(lam, train, test)
def generalization_score_nmf(train, test, rank=10, **kwargs):
from wlra.nmf import nmf
lam = nmf(train, rank=rank)
return pois_llik(lam, train, test)
def generalization_score_nmf_kl(train, test, n_components=10, **kwargs):
import sklearn.decomposition
m = sklearn.decomposition.NMF(n_components=n_components, solver='mu', beta_loss=1).fit(train)
return pois_llik(m.transform(train).dot(m.components_), train, test)
def generalization_score_grad(train, test, rank=10, **kwargs):
import torch
from wlra.grad import PoissonFA
with torch.autograd.set_grad_enabled(True):
model = PoissonFA(n_samples=train.shape[0], n_features=train.shape[1], n_components=rank).fit(train, atol=1e-3, max_epochs=10000)
lam = np.exp(model.L.dot(model.F))
return pois_llik(lam, train, test)
def generalization_score_hpf(train, test, rank=50, **kwargs):
try:
import tensorflow as tf
except:
return np.nan
import scHPF.preprocessing
import scHPF.train
import tempfile
with tempfile.TemporaryDirectory(prefix='/scratch/midway2/aksarkar/ideas/') as d:
tf.reset_default_graph()
# scHPF assumes genes x cells
scHPF.preprocessing.split_dataset_hpf(train.T, outdir=d)
# Set bp, dp as in scHPF.train
bp = train.sum(axis=1).mean() / train.sum(axis=1).var()
dp = train.sum(axis=0).mean() / train.sum(axis=0).var()
opt = scHPF.train.run_trials(
indir=d, outdir=d, prefix='',
nfactors=rank, a=0.3, ap=1, bp=bp, c=0.3, cp=1, dp=dp,
# This is broken when we call the API directly
logging_options={'log_phi': False})
L = np.load(f'{opt}/beta_shape.npy') / np.load(f'{opt}/beta_invrate.npy')
F = np.load(f'{opt}/theta_shape.npy') / np.load(f'{opt}/theta_invrate.npy')
# We assume cells x genes
return pois_llik(F.dot(L.T), train, test)
def generalization_score_scvi(train, test, **kwargs):
from scvi.dataset import GeneExpressionDataset
from scvi.inference import UnsupervisedTrainer
from scvi.models import VAE
data = GeneExpressionDataset(*GeneExpressionDataset.get_attributes_from_matrix(train))
vae = VAE(n_input=train.shape[1])
m = UnsupervisedTrainer(vae, data, verbose=False)
m.train(n_epochs=100)
# Training permuted the data for minibatching. Unpermute before "imputing"
# (estimating lambda)
with torch.autograd.set_grad_enabled(False):
lam = np.vstack([m.train_set.sequential().imputation(),
m.test_set.sequential().imputation()])
return pois_llik(lam, train, test)
def generalization_score_dca(train, test, **kwargs):
import anndata
import scanpy.api
data = anndata.AnnData(X=train)
# "Denoising" is estimating lambda
scanpy.api.pp.dca(data, mode='denoise')
lam = data.X
return pois_llik(lam, train, test)
def get_data_loader(x, dtype=torch.float, batch_size=25, shuffle=False, **kwargs):
import scaa
import torch.utils.data
if ss.issparse(x):
x = scaa.dataset.SparseDataset(x)
else:
x = torch.tensor(x, dtype=dtype)
return torch.utils.data.DataLoader(x, batch_size=batch_size, shuffle=shuffle)
def generalization_score_zipvae(train, test, lr=1e-2, max_epochs=100, **kwargs):
import scaa
import torch
if not torch.cuda.is_available():
return np.nan
# scVI does not play nicely
with torch.autograd.set_grad_enabled(True):
training_data = get_data_loader(train, **kwargs)
with torch.cuda.device(0):
model = scaa.modules.ZIPVAE(train.shape[1], 10).fit(training_data, lr=lr, max_epochs=max_epochs)
lam = model.denoise(training_data)
return pois_llik(lam, train, test)
def generalization_score_zipaae(train, test, y, lr=1e-2, max_epochs=10, **kwargs):
import scaa
import torch
import torch.utils.data
if not torch.cuda.is_available():
return np.nan
# scVI does not play nicely
with torch.autograd.set_grad_enabled(True):
training_data = get_data_loader(train, **kwargs)
labels = get_data_loader(y, dtype=torch.long, **kwargs)
with torch.cuda.device(0):
model = scaa.modules.ZIPAAE(train.shape[1], 10, num_classes=(y.max() + 1)).fit(training_data, labels, lr=lr, max_epochs=max_epochs)
lam = model.denoise(training_data)
return pois_llik(lam, train, test)
def generalization_score_lda(train, test, n_components=10, learning_method='online', batch_size=100, **kwargs):
import sklearn.decomposition
model = sklearn.decomposition.LatentDirichletAllocation(n_components=n_components, learning_method=learning_method, batch_size=batch_size, **kwargs)
L = model.fit_transform(train)
F = model.components_
lam = (L / L.sum(axis=0)).dot(F)
return pois_llik(lam, train, test)
def generalization_score_maptpx(train, test, rank=10, **kwargs):
import rpy2.robjects.packages
import rpy2.robjects.numpy2ri
rpy2.robjects.numpy2ri.activate()
maptpx = rpy2.robjects.packages.importr('maptpx')
res = maptpx.topics(train, K=rank, **kwargs)
L = np.array(res.rx2('omega'))
F = np.array(res.rx2('theta'))
lam = train.sum(axis=1, keepdims=True) * L.dot(F.T)
return pois_llik(lam, train, test)
|
from django.contrib import messages
from django.core.paginator import Paginator
from django.contrib.auth import authenticate, login, logout
from django.shortcuts import render, redirect, get_object_or_404
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
# allauth decorator @verified_email_required
from allauth.account.decorators import verified_email_required
from allauth.account.views import SignupView, LoginView, PasswordResetView
from tests.models import TestOrder
from report_processing.models import PaymentValidation
from .models import Profile
from .forms import ProfileUpdateForm
# class MySignupView(SignupView):
# template_name = 'account/custom_users/custom_signup.html'
# class MyLoginView(LoginView):
# template_name = 'account/login.html'
#
#
# class MyPasswordResetView(PasswordResetView):
# template_name = 'account/password_reset.html'
#
#
# class MyPasswordChangeView(PasswordResetView):
# template_name = 'account/password_change.html'
@login_required()
def profile(request, template_name='account/custom_users/profile.html'):
return render(request, template_name)
########################################################################################
@login_required()
def profile_edit(request, template_name='account/custom_users/profile_edit.html'):
existing_profile = get_object_or_404(Profile, user=request.user)
profile_form = ProfileUpdateForm(instance=existing_profile)
if request.method == 'POST':
profile_form = ProfileUpdateForm(request.POST, request.FILES, instance=existing_profile)
if profile_form.is_valid():
profile = profile_form.save(commit=False)
profile.user = request.user
profile.save()
messages.success(request, 'Profile Updated for {}'.format(request.user.username), extra_tags='html_safe')
# return redirect('custom_users:profile')
return redirect('custom_users:orders-by-user')
context = {
'profile_form': profile_form,
'existing_profile': existing_profile,
}
return render(request, template_name, context)
########################################################################################
def orders_by_user(request):
if request.user.is_authenticated:
user = get_object_or_404(User, id=request.user.id)
user_profile = Profile.objects.filter(user=user.id)
if user_profile:
profile = get_object_or_404(Profile, user=request.user.id)
orders = TestOrder.objects.filter(client_info=profile.id).order_by('-id')
paginator = Paginator(orders, 5)
page = request.GET.get('page')
paginator_data = paginator.get_page(page)
template = 'account/custom_users/orders_by_user.html'
context = {'orders': paginator_data}
return render(request, template, context)
########################################################################################
def filtered_report(request, id=None):
user = get_object_or_404(User, id=id)
filtered_reports = PaymentValidation.objects.filter(approved_order__client_info__user=user)
# Filtered reports Paginator
paginator = Paginator(filtered_reports, 20)
page = request.GET.get('page')
filtered_reports_paginator = paginator.get_page(page)
template = 'account/custom_users/filtered_reports.html'
context = {'filtered_reports': filtered_reports_paginator}
return render(request, template, context)
########################################################################################
|
import xml.etree.ElementTree as et
import sys
def get_depth_rec(el: et.Element, depth: int) -> int:
if len(el) or el.attrib:
dep = depth + 1
for child in el:
if isinstance(child, et.Element) and child.attrib:
d = get_depth_rec(child, depth + 1)
if d > dep:
dep = d
return dep
def get_depth(path):
root = et.parse(path).getroot()
return get_depth_rec(root, 0)
if __name__ == "__main__":
if len(sys.argv) > 1:
print(get_depth(sys.argv[1]))
else:
print("Pass the path to *.xml as the first parameter") |
# coding=UTF-8
@dbRequestHandler('ABLOG-MAIN','GET-USER-INFO')
def request__get_user_info(db,userid,**kwargs):
reqres = db._make_request(
"""
SELECT
user_id,user_name,
user_passwd,user_email,
user_reg_time,user_avatar_path,
user_about_text
FROM
{db_name}.USERS
WHERE
user_id = {user_id}
""",
('id','name','passwd','email','reg_time','avapath','about'),
user_id=str(userid) )
if reqres == None:
return None
if len(reqres) != 1:
return None
return reqres[0]
@dbRequestHandler('ABLOG-MAIN','GET-USERS')
def request__get_users(db,first,count,**kwargs):
reqres = db._make_request(
"""
SELECT
user_id,user_name
FROM
{db_name}.USERS
ORDER
BY user_id
LIMIT {first},{count}
""",
('id','name'),
first=first,
count=count );
return reqres
@dbRequestHandler('ABLOG-MAIN','GET-USERS-COUNT')
def request__get_users_count(db,**kwargs):
reqres = db._make_request(
"""
SELECT
COUNT(*)
FROM
{db_name}.USERS;
""");
if reqres == None:
return None
return reqres[0][0]
@dbRequestHandler('ABLOG-MAIN','GET-USER-NAME')
def request__get_user_info(db,userid,**kwargs):
reqres = db._make_request(
"""
SELECT
user_name
FROM
{db_name}.USERS
WHERE
user_id = {user_id}
""",
user_id=str(userid) )
if reqres == None:
return None
if len(reqres) != 1:
return None
return reqres[0][0]
@dbRequestHandler('ABLOG-MAIN','GET-USER-ID-BY-NAME')
def request__get_user_info(db,name,**kwargs):
reqres = db._make_request(
"""
SELECT
user_id
FROM
{db_name}.USERS
WHERE
user_name = "{name}";
""",
name=name)
if reqres == None:
return None
if len(reqres) != 1:
return None
return reqres[0][0]
@dbRequestHandler('ABLOG-MAIN','NEW-USER')
def request__get_user_info(db,name,passwd,**kwargs):
db._make_request(
"""
INSERT INTO
{db_name}.USERS
(user_name,user_passwd,user_reg_time)
VALUES
("{name}","{passwd}",CURRENT_TIMESTAMP);
""",
name=name,
passwd=passwd)
@dbRequestHandler('ABLOG-MAIN','SET-USER-AVATAR-PATH')
def request__set_user_avatar_path(db,userid,path,**kwargs):
db._make_request(
"""
UPDATE
{db_name}.USERS
SET user_avatar_path="{path}"
WHERE
user_id={userid};
""",
userid=userid,
path=path)
@dbRequestHandler('ABLOG-MAIN','SET-USER-DATA')
def request__set_user_avatar_path(db,userid,email,about,**kwargs):
if about == None:
about = 'NULL'
else:
about = '\"' + about.replace('\\','\\\\').replace('\"','\\\"') + '\"'
#
if email == None or email == '':
email = 'NULL'
else:
email = '\"' + email.replace('\\','\\\\').replace('\"','\\\"') + '\"'
#
db._make_request(
"""
UPDATE
ABLOG_DB.USERS
SET
user_email={email},
user_about_text={about}
WHERE
user_id={userid};
""",
userid=userid,
about=about,
email=email)
|
from numpy import genfromtxt
from sklearn import linear_model
path = r'./dataset1.csv'
data = genfromtxt(path, delimiter=',')
print(data)
x_data = data[:, :-1] # 所有行 除开最后一列 最后一列为运输时间
print("x_data:\n %s" % format(x_data))
y_data = data[:, -1] # 所有行 只取最后一列
print("y_data:\n %s " % format(y_data))
# 导入线性回归分类器
regr = linear_model.LinearRegression()
# 训练
regr.fit(x_data, y_data)
# 截距 b0
b0 = regr.intercept_
print(b0) # -0.868701466781709
# 系数
b1 = regr.coef_
print(b1) # [0.0611346 0.92342537]
# 公式: y = -0.86870 + 0.0611346*英里 + 0.92342537*次数
# 如果一个运输任务是跑102英里 运输6次,预计多少小时
x_pred = [[102, 6]] # 向量
y_pred = regr.predict(x_pred)
print(y_pred)
|
from rest_framework import serializers
from .models import Notification
class NotifSerializer(serializers.ModelSerializer):
class Meta:
model = Notification
fields = ('to', 'by', 'answer',)
|
from django.db import models
import datetime
# Create your models here.
class Todo(models.Model):
title = models.CharField(max_length=200)
description = models.CharField(max_length=300)
due_date = models.DateField(("Date"), default=datetime.date.today) |
"""
Configure test suite
Test run command:
py.test --cov-report term-missing --cov=api tests/
"""
import pytest
from api.app import create_app
from api.database import db as _db
from api.config import TestConfig
@pytest.fixture(scope='function')
def app():
_app = create_app(TestConfig)
ctx = _app.test_request_context()
ctx.push()
yield _app
# Code after yield executes on teardown
ctx.pop()
@pytest.fixture(scope='function')
def db(app):
_db.app = app
_db.create_all()
yield _db
# Code after yield executes on teardown
_db.session.close()
_db.drop_all()
|
import httplib
import time
from datetime import datetime
from base64 import b64encode,b64decode
import hmac
from hashlib import sha512
from urllib import urlencode
import urllib2
import json
class MtgoxHttpInterface(object):
def __init__(self, key, secret):
self.key = key
self.secret = secret
self.count = 0
self.timeOut = 30
self.refreshRate = 3
def __query(self,path,data,auth = True):
jsonData = {}
attempts = 1
while attempts <= self.timeOut/self.refreshRate:
if auth:
data['nonce'] = int(time.time()*1000000)+self.count
self.count += 1
post_data = urlencode(data)
signature = b64encode(str(hmac.new(
b64decode(self.secret),
post_data,
sha512).digest()))
headers = ({'User_Agent':'tradr',
'Rest_Key':self.key,
'Rest_Sign':signature})
else:
post_data = urlencode(data)
headers = {}
url = 'https://mtgox.com/api/0/'+path
req = urllib2.Request(url,post_data,headers)
try:
res = urllib2.urlopen(req)
except urllib2.URLError:
pass
except httplib.BadStatusLine:
pass
else:
if path.endswith('csv'):
return res.read()
else:
try:
jsonData = json.load(res)
except ValueError:
pass
except 'error' in jsonData:
pass
else:
return jsonData
attempts += 1
time.sleep(self.refreshRate)
if jsonData:
self.__log('bad response: '+str(jsonData['error']))
else:
self.__log('connection timed out')
return {'error':'something went horribly wrong'}
def get_ticker(self):
attempts = 1
tickerQry = {}
while attempts <= 5:
tickerQry = self.__query('data/ticker.php', {}, auth=False)
if 'error' not in tickerQry:
return tickerQry
attempts += 1
time.sleep(2)
self.__log('Bad Ticker Response:'+tickerQry['error'])
return tickerQry
def get_depth(self):
return self.__query('data/getDepth.php?Currency=USD', {}, auth=False)
def get_info(self):
return self.__query('info.php', {})
def get_orders(self):
return self.__query('getOrders.php', {})
def get_history(self,cType):
if cType == 'BTC' or cType == 'USD':
return self.__query('history_'+cType+'.csv', {})
def buy_btc(self, amt, price = 0):
if not price:
data = {'amount':amt}
else:
data = {'amount':amt,'price':price}
buyQry = self.__query('buyBTC.php',data)
if 'error' not in buyQry:
msg = 'buy: '+str(amt)+' at '+str(price)+' '+buyQry['oid']
self.__log(msg)
return buyQry
def sell_btc(self, amt, price = 0):
if not price:
data = {'amount':amt}
else:
data = {'amount':amt,'price':price}
sellQry = self.__query('sellBTC.php',data)
if 'error' not in sellQry:
msg = 'sell: '+str(amt)+' at '+str(price)+' '+sellQry['oid']
self.__log(msg)
return sellQry
def cancel_order(self, oid, otype):
# oType = 'Sell' if order['type'] == '1' else 'Buy'
data = {'oid':oid,'type':otype}
cancelQry = self.__query('cancelOrder.php',data)
if 'error' not in cancelQry:
msg = 'cancel: '+oid
self.__log(msg)
return cancelQry
def __log(self, message):
tStamp = datetime.today().strftime('%y-%m-%d %H:%M:%S')
logMsg = tStamp+' '+str(message)
f = open('log/mtgox.log', 'a')
f.write(logMsg+'\n')
f.close()
|
from lib import hashmap
from nose.tools import *
def test_add():
h = hashmap.HashMap()
h.add('john', 'Google')
assert h.get('john') == 'Google'
def test_negative_capacity():
h = hashmap.HashMap(-10)
assert h.size() == 0
h.add('john', 'Google')
assert h.get('john') == 'Google'
def test_add_with_resize():
h = hashmap.HashMap(3)
h.add('john', 'Google')
h.add('jane', 'Amazon')
h.add('victor', 12345)
h.add('mark', 'Facebook')
assert h.get('john') == 'Google'
assert h.get('jane') == 'Amazon'
assert h.get('mark') == 'Facebook'
assert h.get('victor') == 12345
@raises(KeyError)
def test_remove():
h = hashmap.HashMap()
h.add('john', 'Google')
assert h.size() == 1
assert h.remove('john') == 'Google'
assert h.size() == 0
h.get('john')
@raises(KeyError)
def test_remove2():
h = hashmap.HashMap(2)
h.add('john', 'Google')
h.remove('jim')
def test_get():
h = hashmap.HashMap()
h.add('john', 'Google')
assert h.get('john') == 'Google'
h.add('john', 'Facebook')
assert h.get('john') == 'Facebook'
assert not h.get('john') == 'Google'
@raises(KeyError)
def test_get_fail():
h = hashmap.HashMap()
h.get('john')
def test_size():
h = hashmap.HashMap()
assert h.size() == 0
h.add('john', 'Google')
h.add('jane', 'Amazon')
h.add('victor', 12345)
h.add('mark', 'Facebook')
h.add('bill', 'Microsoft')
h.add('elon', ('tesla', 'spacex'))
assert h.size() == 6
assert h.remove('elon') == ('tesla', 'spacex')
assert h.remove('victor') == 12345
assert h.size() == 4
def test_items():
h = hashmap.HashMap()
list = range(10)
for i in list:
h.add(i, i)
for k, v in h.items():
assert k == v and k == list[k]
def test_keys():
h = hashmap.HashMap()
list = range(10)
for i in list:
h.add(i, None)
for key in h.keys():
assert key == list[key]
def test_values():
h = hashmap.HashMap()
list = range(10)
for i in list:
h.add(i, i)
for value in h.values():
assert value == list[value]
def test_iterkeys():
h = hashmap.HashMap()
list = range(10)
for i in list:
h.add(i, i)
for key in h.iterkeys():
assert key == list[key]
def test_itervalues():
h = hashmap.HashMap()
list = range(10)
for i in list:
h.add(i, i)
for value in h.itervalues():
assert value == list[value]
def test_iteritems():
h = hashmap.HashMap()
list = range(10)
for i in list:
h.add(i, i)
for k, v in h.iteritems():
assert k == v and k == list[k]
|
def validBraces(string):
a = "[]"
b = "{}"
c = "()"
while (string.find(a) != -1) or (string.find(b) != -1) or (string.find(c) != -1):
if (string.find(a) != -1):
string=string.replace(a,"")
if (string.find(b) != -1):
string=string.replace(b,"")
if (string.find(c) != -1):
string=string.replace(c,"")
return not len(string) |
def coroutine(func):
def start(*args, **kwargs):
cr = func(*args, **kwargs)
next(cr)
return cr
return start
@coroutine
def grep(pattern):
print("Looking for %s" % pattern)
while True:
line = (yield)
if pattern in line:
print(line)
g = grep("python")
g.send("python")
g.close() # 关闭协程,同时gc
@coroutine
def grep_close(pattern):
print("Looking for %s" % pattern)
try:
while True:
line = (yield)
if pattern in line:
print(line)
except GeneratorExit:
print("Going away. Goodbye.")
g = grep("python")
g.send("python")
g.close()
g.throw(RuntimeError, "You're hosed") # 如何在生成器中抛出一个自定义的异常。
|
from discord_webhook import DiscordWebhook
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import WebDriverException
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.keys import Keys
from oauth2client.service_account import ServiceAccountCredentials
import requests
from time import gmtime, strftime, sleep
from random import randint
import subprocess
import pathlib
import ctypes
import os
import datetime
import gspread
import json
dateTimeObj = datetime.datetime.now()
timestampStr = dateTimeObj.strftime("%H:%M:%S")
def newtime():
global dateTimeObj
global timestampStr
dateTimeObj = datetime.datetime.now()
timestampStr = dateTimeObj.strftime("%H:%M:%S")
def main():
webhook = 0
response = 0
# STARTUP EVENT
# os.system('cls' if os.name == 'nt' else 'clear')
print("Flashsale Sniper [Platform : Shopee Edition Flash Sale]")
producturl = input("Masukkan URL Product: ")
flash_sale = input("Masukkan Jadwal Flash Sale *format(jam:menit) : ")
username = input("Masukkan Username Shopee: ")
password = input("Masukkan Password Shopee: ")
os.system('cls' if os.name == 'nt' else 'clear')
print("Flashsale Sniper [Platform : Shopee Edition Flash Sale]")
print("");
print("Mohon masukkan metode pembayaran yang ingin dipakai")
print("[1] : ShopeePay (Pastikan Saldo Cukup)")
print("[2] : Bank BCA (Cek Otomatis)")
print("[3] : Bank Mandiri (Cek Otomatis)")
print("[4] : Bank BNI (Cek Otomatis)")
print("[5] : Bank BRI (Cek Otomatis)")
print("[6] : Bank Syariah Mandiri (Cek Otomatis)")
print("[7] : Bank Permata (Dicek Otomatis)")
pembayaran = int(input("Pilihan metode pembayaran (1-6) > "))
pakelog = input("Apakah anda ingin memantau aktivitas Sniping via Discord? (y/n) > ")
if pakelog == "y":
print("Masukkan URL Webhook Discord untuk Aktivitas Pemantauan")
logs = input("Webhook URL > ")
# PREPARATION EVENT
print("Pemantauan sniping telah diaktifkan.")
sleep(2)
else:
logs = "fuck off lol"
print("Pemantauan sniping telah dinonaktifkan.")
sleep(2)
os.system('cls' if os.name == 'nt' else 'clear')
print("Flashsale Sniper [Platform : Shopee Edition]")
print("")
print("The date and time now is",strftime("%Y-%m-%d %H:%M:%S", gmtime()))
print("Please ensure you're using the best quality proxy possible!.")
print("")
print("Awaiting for the time to manual prevent bot detection...");
# MAIN EVENTS
# LOGGING IN TO THE ACCOUNT
option = webdriver.ChromeOptions()
option.add_experimental_option("excludeSwitches", ['enable-automation']);
option.add_argument('--disable-notifications')
# option.add_argument("--headless")
PATH = 'data/chromedriver.exe'
# browser = webdriver.Chrome(PATH, chrome_options=chrome_options)
browser = webdriver.Chrome(options=option)
browser.get("https://shopee.co.id/buyer/login")
try:
element = WebDriverWait(browser, 10).until(
EC.presence_of_element_located((By.XPATH, "/html/body/div[1]/div/div[2]/div/div/form/div/div[2]/button"))
)
finally:
newtime()
print("[",timestampStr,"]""[INFO :] SNIPING START!")
dateTimeObj = datetime.datetime.now()
print("[",timestampStr,"]""[INFO :] LOGGING INTO ACCOUNT")
user = browser.find_element_by_name('loginKey')
user.send_keys(username)
passwd = browser.find_element_by_name('password')
passwd.send_keys(password)
passwd.send_keys(Keys.RETURN)
sleep(5)
# START SNIPING
newtime()
print("[",timestampStr,"]""[INFO :] LOGIN ACTIVITY DONE, NOW SNIPING...")
webhook = DiscordWebhook(url=logs, content='[INFO :] LOGIN ACTIVITY DONE, NOW SNIPING...')
if pakelog == "y":
response = webhook.execute()
newtime()
print("[",timestampStr,"]""[INFO :] REDIRECTING INTO THE SPECIFIED PRODUCT URL...")
webhook = DiscordWebhook(url=logs, content='[INFO :] REDIRECTING INTO THE SPECIFIED PRODUCT URL...')
if pakelog == "y":
response = webhook.execute()
browser.get(producturl)
newtime()
print("[",timestampStr,"]""[INFO :] CHECKING IF PRODUCT VARIANT EXISTS...")
webhook = DiscordWebhook(url=logs, content='[INFO :] CHECKING IF PRODUCT VARIANT EXISTS...')
hasvariant = 1
if pakelog == "y":
response = webhook.execute()
try:
element = WebDriverWait(browser, 1).until(
EC.presence_of_element_located((By.XPATH, "//*[@class='product-variation']"))
)
except TimeoutException:
newtime()
print("[",timestampStr,"]""[INFO :] NO PRODUCT VARIANT FOUND, CONTINUING SNIPING PROCESS...")
webhook = DiscordWebhook(url=logs, content='[INFO :] NO PRODUCT VARIANT FOUND, CONTINUING SNIPING PROCESS...')
hasvariant = 0
if pakelog == "y":
response = webhook.execute()
if hasvariant == 1:
productvariant = browser.find_elements_by_xpath("//*[@class='product-variation']")
listVarian = [data.text for data in productvariant]
for (varian, i) in zip(listVarian, range(0, len(listVarian)+1)):
print('['+str(i)+']. '+varian)
newtime()
print("[",timestampStr,"]""[INFO :] VARIANT PRODUK DITEMUKAN, SILAHKAN KETIK NOMOR LIST VARIANT")
webhook = DiscordWebhook(url=logs, content='[INFO :] VARIANT PRODUK DITEMUKAN, SILAHKAN KETIK NOMOR LIST VARIANT DI CONSOLE')
if pakelog == "y":
response = webhook.execute()
inputvariant = int(input("Masukkan nomor variant product >"))
while True:
try:
element = WebDriverWait(browser, 1).until(EC.presence_of_element_located((By.XPATH, "//*[text()='beli sekarang']")))
belisekarang = browser.find_element_by_xpath("//*[text()='beli sekarang']")
newtime()
print("[",timestampStr,"]""[INFO :] ORDER BUTTON FOUND, ATTEMPTING TO SUBMIT...")
webhook = DiscordWebhook(url=logs, content='[INFO :] ORDER BUTTON FOUND, ATTEMPTING TO SUBMIT...')
if pakelog == "y":
response = webhook.execute()
break
except NoSuchElementException:
newtime()
print("[",timestampStr,"]""[INFO :] ORDER BUTTON NOT FOUND, REFRESHING THE PAGE...")
webhook = DiscordWebhook(url=logs, content='[INFO :] ORDER BUTTON NOT FOUND, REFRESHING THE PAGE...')
if pakelog == "y":
response = webhook.execute()
browser.refresh()
continue
except TimeoutException:
newtime()
print("[",timestampStr,"]""[INFO :] ORDER BUTTON NOT FOUND, REFRESHING THE PAGE...")
webhook = DiscordWebhook(url=logs, content='[INFO :] ORDER BUTTON NOT FOUND, REFRESHING THE PAGE...')
if pakelog == "y":
response = webhook.execute()
browser.refresh()
continue
print(belisekarang.is_enabled())
btnclass = belisekarang.get_attribute("class")
print(btnclass)
while True:
if 'disabled' in btnclass:
newtime()
webhook = DiscordWebhook(url=logs, content='[INFO :] ORDER BUTTON DISABLED, REFRESHING THE PAGE...')
if pakelog == "y":
response = webhook.execute()
print("[",timestampStr,"]""[INFO :] ORDER BUTTON DISABLED!, REFRESHING THE PAGE...")
browser.refresh()
element = WebDriverWait(browser, 10).until(EC.presence_of_element_located((By.XPATH, "/html/body/div[1]/div/div[2]/div[2]/div[2]/div[2]/div[3]/div/div[5]/div/div/button[2]")))
belisekarang = browser.find_element_by_xpath("/html/body/div[1]/div/div[2]/div[2]/div[2]/div[2]/div[3]/div/div[5]/div/div/button[2]")
btnclass = belisekarang.get_attribute("class")
else:
times_flashSale = flash_sale.split(":")
hI = int(times_flashSale[0])
mI = int(times_flashSale[1])
# WAIT STORE
while True:
x = datetime.datetime.now()
h = int(x.strftime("%H"))
m = int(x.strftime("%M"))
s = int(x.strftime("%S"))
rate_hourse = hI - h
rate_minuite = mI - m
rate_second = 0 - s
hourse_second = rate_hourse*3600
minute_second = rate_minuite*60
limit = hourse_second + minute_second + rate_second
refresh = limit % 2
# sleep(0.1)
os.system('cls')
print("[",timestampStr,"]""[INFO :] "+str(limit)+ " second")
print()
if limit <= 0:
print("finished!!")
break
elif refresh == 0:
browser.refresh()
sleep(1)
if hasvariant == 1:
try:
browser.implicitly_wait(10)
productvariant = browser.find_elements_by_xpath("//*[@class='product-variation']")
productvariant[inputvariant].click()
browser.implicitly_wait(10)
belisekarang.click()
except:
while True:
print('[ ERROR ] Variasi Tidak tersedia')
browser.implicitly_wait(10)
productvariant = browser.find_elements_by_xpath("//*[@class='product-variation']")
inputvariant = int(input("Masukkan nomor variant product >"))
try:
productvariant[inputvariant].click()
break
except:
pass
finally:
browser.implicitly_wait(10)
belisekarang.click()
newtime()
webhook = DiscordWebhook(url=logs, content='[INFO :] ORDER BUTTON ENABLED, ATTEMPTING TO PUT ITEM IN CART...')
if pakelog == "y":
response = webhook.execute()
print("[",timestampStr,"]""[INFO :] ORDER BUTTON ENABLED!, ATTEMPTING TO PUT ITEM IN CART...")
break
checkout(browser, pembayaran, logs, pakelog, flash_sale)
def checkout(browser, pembayaran, logs, pakelog, flash_sale):
newtime()
print("[",timestampStr,"]""[INFO :] SUCCESSFULLY PUT ITEM INTO CART!")
webhook = DiscordWebhook(url=logs, content='[INFO :] SUCCESSFULLY PUT ITEM INTO CART!')
if pakelog == "y":
response = webhook.execute()
try:
element = WebDriverWait(browser, 10).until(
EC.presence_of_element_located((By.XPATH, "//*[text()='checkout']"))
)
finally:
checkout = browser.find_element_by_xpath("//*[text()='checkout']")
# START CHECKOUT PR
newtime()
print("[",timestampStr,"]""[INFO :] ATTEMPTING TO CHECKOUT ITEM.")
browser.execute_script("arguments[0].click();", checkout)
# checkout.click()
webhook = DiscordWebhook(url=logs, content='[INFO :] ATTEMPTING TO CHECKOUT ITEM.')
if pakelog == "y":
response = webhook.execute()
browser.implicitly_wait(10)
ubahOngkir = browser.find_element_by_xpath('//*[@class="_26DEZ8"]')
browser.execute_script("arguments[0].click();", ubahOngkir)
sleep(2)
browser.execute_script("arguments[0].click();", browser.find_element_by_xpath('//*[text()="Pengiriman setiap saat"]'))
sleep(2)
browser.execute_script("arguments[0].click();", browser.find_element_by_xpath('//*[@class="stardust-button stardust-button--primary -T3OGq"]'))
bankmethod = ""
try:
browser.implicitly_wait(10)
bankmethod = browser.find_element_by_xpath("//*[text()='Transfer Bank']")
except:
print("not found!!")
# ShopeePay
if pembayaran == 1:
try:
element = WebDriverWait(browser, 10).until(
EC.presence_of_element_located((By.XPATH, "//*[text()='ShopeePay']"))
)
shopeepay = browser.find_element_by_xpath("//*[text()='ShopeePay']")
browser.execute_script("arguments[0].click();", shopeepay)
except:
print('Saldo Anda Tidak Mencukupi!!\n')
browser.implicitly_wait(20)
browser.find_element_by_xpath('//*[@id="pay-button"]').click()
# //*[@class="digit-holder"]
# Bank BCA (Cek Otomatis)
elif pembayaran == 2:
browser.execute_script("arguments[0].click();", bankmethod)
sleep(2)
newtime()
print("[",timestampStr,"]""[INFO :] SELECTING BANK AS PAYMENT METHOD.")
try:
element = WebDriverWait(browser, 10).until(
EC.presence_of_element_located((By.XPATH, "//*[text()='Bank BCA (Dicek Otomatis)']"))
)
finally:
bankbca = browser.find_element_by_xpath("//*[text()='Bank BCA (Dicek Otomatis)']")
newtime()
print("[",timestampStr,"]""[INFO :] SELECTING BANK BCA AS THE BANK.")
webhook = DiscordWebhook(url=logs, content='[INFO :] SELECTING BANK BCA AS THE BANK.')
if pakelog == "y":
response = webhook.execute()
bankbca.click()
# Bank Mandiri (Cek Otomatis)
elif pembayaran == 3:
browser.execute_script("arguments[0].click();", bankmethod)
sleep(2)
newtime()
print("[",timestampStr,"]""[INFO :] SELECTING BANK AS PAYMENT METHOD.")
try:
element = WebDriverWait(browser, 10).until(
EC.presence_of_element_located((By.XPATH, "//*[text='Bank Mandiri & Bank Lainnya (Dicek Otomatis)']"))
)
finally:
mandiri1 = browser.find_element_by_xpath("//*[text='Bank Mandiri & Bank Lainnya (Dicek Otomatis)']")
newtime()
print("[",timestampStr,"]""[INFO :] SELECTING BANK MANDIRI AS THE BANK.")
webhook = DiscordWebhook(url=logs, content='[INFO :] SELECTING BANK MANDIRI AS THE BANK.')
if pakelog == "y":
response = webhook.execute()
mandiri1.click()
# Bank BNI (Cek Otomatis)
elif pembayaran == 4:
browser.execute_script("arguments[0].click();", bankmethod)
sleep(2)
newtime()
print("[",timestampStr,"]""[INFO :] SELECTING BANK AS PAYMENT METHOD.")
try:
element = WebDriverWait(browser, 10).until(
EC.presence_of_element_located((By.XPATH, "//*[text='Bank BNI (Dicek Otomatis)']"))
)
finally:
bankbni = browser.find_element_by_xpath("//*[text='Bank BNI (Dicek Otomatis)']")
newtime()
print("[",timestampStr,"]""[INFO :] SELECTING BANK BNI AS THE BANK.")
webhook = DiscordWebhook(url=logs, content='[INFO :] SELECTING BANK BNI AS THE BANK.')
if pakelog == "y":
response = webhook.execute()
bankbni.click()
# Bank BRI (Cek Otomatis)
elif pembayaran == 5:
browser.execute_script("arguments[0].click();", bankmethod)
sleep(2)
newtime()
print("[",timestampStr,"]""[INFO :] SELECTING BANK AS PAYMENT METHOD.")
try:
element = WebDriverWait(browser, 10).until(
EC.presence_of_element_located((By.XPATH, "//*[text='Bank BRI (Dicek Otomatis)']"))
)
finally:
bankbri = browser.find_element_by_xpath("//*[text='Bank BRI (Dicek Otomatis)']")
newtime()
print("[",timestampStr,"]""[INFO :] SELECTING BANK BRI AS THE BANK.")
webhook = DiscordWebhook(url=logs, content='[INFO :] SELECTING BANK BRI AS THE BANK.')
if pakelog == "y":
response = webhook.execute()
bankbri.click()
# Bank Syariah Mandiri (Cek Otomatis)
elif pembayaran == 6:
browser.execute_script("arguments[0].click();", bankmethod)
sleep(2)
newtime()
print("[",timestampStr,"]""[INFO :] SELECTING BANK AS PAYMENT METHOD.")
try:
element = WebDriverWait(browser, 10).until(
EC.presence_of_element_located((By.XPATH, "//*[text='Bank Syariah Indonesia (BSI) (Dicek Otomatis)']"))
)
finally:
mandirisyariah = browser.find_element_by_xpath("//*[text='Bank Syariah Indonesia (BSI) (Dicek Otomatis)']")
newtime()
print("[",timestampStr,"]""[INFO :] SELECTING MANDIRI SYARI'AH AS THE BANK.")
webhook = DiscordWebhook(url=logs, content='[INFO :] SELECTING MANDIRI SYARIAH AS THE BANK.')
if pakelog == "y":
response = webhook.execute()
mandirisyariah.click()
# Bank Permata (Dicek Otomatis)
elif pembayaran == 7:
browser.execute_script("arguments[0].click();", bankmethod)
sleep(2)
newtime()
print("[",timestampStr,"]""[INFO :] SELECTING BANK AS PAYMENT METHOD.")
try:
element = WebDriverWait(browser, 10).until(
EC.presence_of_element_located((By.XPATH, "//*[text='Bank Permata (Dicek Otomatis)']"))
)
finally:
mandirisyariah = browser.find_element_by_xpath("//*[text='Bank Permata (Dicek Otomatis)']")
newtime()
print("[",timestampStr,"]""[INFO :] SELECTING MANDIRI SYARI'AH AS THE BANK.")
webhook = DiscordWebhook(url=logs, content='[INFO :] SELECTING MANDIRI SYARIAH AS THE BANK.')
if pakelog == "y":
response = webhook.execute()
mandirisyariah.click()
else:
print("Invalid payment method specified!, please try again later.")
print("Sniping failed!")
# BUAT ORDER
try:
element = WebDriverWait(browser, 10).until(
EC.presence_of_element_located((By.CLASS_NAME, "stardust-button"))
)
finally:
newtime()
print("[",timestampStr,"]""[INFO :] CREATING YOUR ORDER.")
webhook = DiscordWebhook(url=logs, content='[INFO :] CREATING YOUR ORDER.')
if pakelog == "y":
response = webhook.execute()
makeorder = browser.find_element_by_class_name("stardust-button")
browser.execute_script("arguments[0].click();", makeorder)
# makeorder.click()
# END EVENT
newtime()
print("[",timestampStr,"]""[INFO :] SUCCESSFULLY ATTEMPTED TO SNIPE PRODUCT!")
webhook = DiscordWebhook(url=logs, content='[INFO :] SUCCESSFULLY ATTEMPTED TO SNIPE PRODUCT!')
if pakelog == "y":
response = webhook.execute()
newtime()
print("[",timestampStr,"]""[INFO :] PLEASE CHECK YOUR ORDER LIST ON UNPAID!.")
webhook = DiscordWebhook(url=logs, content='[INFO :] PLEASE CHECK YOUR ORDER LIST ON UNPAID!')
if pakelog == "y":
response = webhook.execute()
print("Your request product has been sniped at",strftime("%Y-%m-%d %H:%M:%S", gmtime()))
sleep(2)
webhook = DiscordWebhook(url=logs, content='Your request product has been successfully sniped!')
if pakelog == "y":
response = webhook.execute()
print("We will close our script by 10 seconds.")
webhook = DiscordWebhook(url=logs, content='The console will close at few seconds.')
if pakelog == "y":
response = webhook.execute()
sleep(10)
if __name__ == '__main__':
#try:
scope = ["https://spreadsheets.google.com/feeds",'https://www.googleapis.com/auth/spreadsheets',"https://www.googleapis.com/auth/drive.file","https://www.googleapis.com/auth/drive"]
creds = ServiceAccountCredentials.from_json_keyfile_name("data/config-cd7413190612.json", scope)
client = gspread.authorize(creds)
sheet = client.open("config").sheet1
datas = sheet.get_all_records()
user = open(r"data/config.json", "r")
dataJsons = json.load(user)
emailConfig = dataJsons['lisensi']['email']
passwordConfig = dataJsons['lisensi']['pwd']
userAgentConfig = dataJsons['lisensi']['user-agent']
for data in datas:
email = data['Email']
password = data['Password']
userAgent = data['User Agent']
status = data['Status']
if email == emailConfig and password == passwordConfig and userAgent == userAgentConfig and status == 'Active':
print('Login config success!!')
main()
elif email == emailConfig and password == passwordConfig and userAgent == userAgentConfig and status != 'Active':
print('Login Failed!! Your Config non-active')
sleep(3)
#except:
#print('Login Failed!! error connection!!')
#sleep(3) |
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from email.mime.base import MIMEBase
from email import encoders
import smtplib
import os
import ssl
import sys
username = os.getenv('OUTLOOKUSER') if os.getenv('OUTLOOKUSER') else sys.exit('Missing outlook user variable')
password = os.getenv('OUTLOOKPASS') if os.getenv('OUTLOOKPASS') else sys.exit('Missing outlook password variable')
sender_email = input("Sender Email")
receiver_email = input("Receiver Email")
msg = MIMEMultipart("alternative")
msg['From'] = sender_email
msg['To'] = receiver_email
msg['Subject'] = "Delivery Slot Available"
text = "INSERT DATE AND TIME HERE"
html = """\
<html>
<body>
<p> <strong> {} </strong> </p>
</body>
</html>
""".format("INSERT DATE AND TIME HERE")
part1 = MIMEText(text, "plain")
part2 = MIMEText(html, "html")
msg.attach(part1)
msg.attach(part2)
mailServer = smtplib.SMTP('smtp-mail.outlook.com', 587)
mailServer.ehlo()
mailServer.starttls()
mailServer.ehlo()
mailServer.login(username, password)
mailServer.sendmail(sender_email, receiver_email, msg.as_string())
mailServer.quit() |
"""
Written by sourabh agrawal
In this program i am implementing double linked list using python3 and concepts of classes
user can perform
->insertion at beginning
->insertion at end
->insertion at any given position
->deletion from beginning
->deletion from end
->deletion after any given no
->printing the linked list
"""
# !usr/bin/python3
class linkedlist:
head = None
def __init__(self, val):
self.value = val
self.next = None
self.prev = None
def insertionatfront(self, no): # front insertion
node = linkedlist(no)
if self.head is None:
self.head = node
else:
temp = self.head
node.next = temp
temp.prev = node
self.head = node
del temp
def insertionback(self, no): # insertion at the end
if self.head is None:
self.insertionatfront(no)
else:
node = linkedlist(no)
temp = self.head
while temp.next is not None:
temp = temp.next
temp.next = node
node.prev = temp
del temp
def insertion(self, no): # insertion at any given position
if self.head is None:
self.insertionatfront(no)
else:
pos = int(input("Enter the no after which you want to insert this number"))
node = linkedlist(no)
temp = self.head
while temp is not None and temp.value != pos:
temp = temp.next
node.next = temp.next
temp.next.prev = node
node.prev = temp
temp.next = node
del temp
def deletionfront(self): # deletion from beginning
if self.head is None:
print("list is already empty")
else:
temp = self.head
self.head = temp.next
temp.next.prev = self.head
print("Node %d is deleted" % temp.value)
del temp
def deletionend(self): # deletion from the end
if self.head is None:
print("list is already empty")
else:
temp = self.head
while temp.next is not None:
temp = temp.next
temp.prev.next = None
print("Node %d is deleted" % temp.value)
del temp
def deletion(self): # deletion after any given element
if self.head is None:
print("list is already empty")
else:
no = int(input("Enter the no which you want to delete"))
temp = self.head
while temp.next is not None and temp.value != no:
temp = temp.next
temp.next.prev = temp.prev
temp.prev.next = temp.next
print("Node %d is deleted" % temp.value)
del temp
def view(self): # printing the linked list
temp = self.head
print()
while temp is not None:
print(temp.value, end=" ")
temp = temp.next
print("")
del temp
def main():
print("Choose from the menu")
n = 1
l = linkedlist(0)
while n:
print("\nPress 1 for inserting at the beginning")
print("Press 2 for inserting at the last")
print("Press 3 for inserting at the position")
print("Press 4 for delete from the beginning")
print("Press 5 for delete from the end")
print("Press 6 for delete from a position")
print("Press 7 to view the linked list")
print("press 0 for exit")
print("press any other no to perform the operations again")
choise = int(input("\nEnter your choise now\t"))
if choise is 1:
no = int(input("Enter the no to insert at the beginning"))
l.insertionatfront(no)
elif choise is 2:
no = int(input("Enter the no to insert at the end"))
l.insertionback(no)
elif choise is 3:
no = int(input("Enter the no to insert"))
l.insertion(no)
elif choise is 4:
l.deletionfront()
elif choise is 5:
l.deletionend()
elif choise is 6:
l.deletion()
elif choise is 7:
l.view()
elif choise is 0:
break
if __name__ == '__main__':
main()
|
"""
Lissajous curve sketcher (using Matplotlib.pyplot).
This script plots a Lissajous figure and provides a graphical user
interface to allow the user to vary the parameters in the Lissajous
parametric equations.
"""
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.widgets import Slider
# Create the figure and a set of axes for the the plot
fig, ax = plt.subplots()
plt.subplots_adjust(left=0.25, bottom=0.25)
# Initialise the array for values of t
t = np.linspace(0, 2 * np.pi, 1000)
# Define the functions x(t) and y(t)
def x(omegaX, t):
"""Calculate the Lissajous x coordinate."""
return np.sin(omegaX * t)
def y(omegaY, phi, t):
"""Calculate the Lissajous y coordinate."""
return np.sin(omegaY * t + phi)
# Initial plot
graph, = plt.plot(x(1, t), y(1, 0, t))
# Lay out the plot and the sliders used to modify the plot
axOmegaX = plt.axes([0.25, 0.15, 0.65, 0.03])
axOmegaY = plt.axes([0.25, 0.1, 0.65, 0.03])
axDelta = plt.axes([0.25, 0.05, 0.65, 0.03])
axNum = plt.axes([0.25, 0, 0.65, 0.03])
# Create the sliders
sOmegaX = Slider(axOmegaX, 'OmegaX', 1, 30.0,
valinit=1, valstep=0.1)
sOmegaY = Slider(axOmegaY, 'OmegaY', 1, 30.0,
valinit=1, valstep=0.1)
sDelta = Slider(axDelta, 'Phase shift', 0, 2 * np.pi,
valinit=0, valstep=np.pi / 12)
sNum = Slider(axNum, 'Number of cycles', 0.5, 10,
valinit=1, valstep=0.5)
# Define the update functions
# which are called every time the user uses one of the sliders
def update(val):
"""Update the values to be plotted and replot figure."""
# Get the values from the sliders
omegaX = sOmegaX.val
omegaY = sOmegaY.val
phi = sDelta.val
# Update the data used for plotting
graph.set_data(x(omegaX, t), y(omegaY, phi, t))
# Re-plot the data
fig.canvas.draw_idle()
def updateT(val):
"""Update the values to be plotted and replot figure."""
# Make sure we update the t we defined earlier
global t
# Get the values from the sliders
omegaX = sOmegaX.val
omegaY = sOmegaY.val
phi = sDelta.val
num = sNum.val
# Update t
t = np.linspace(0, 2 * num * np.pi, int(1000 * num))
# Update the data used for plotting
graph.set_data(x(omegaX, t), y(omegaY, phi, t))
# Re-plot the data
fig.canvas.draw_idle()
# Associate the update functions with the sliders
sOmegaX.on_changed(update)
sOmegaY.on_changed(update)
sDelta.on_changed(update)
sNum.on_changed(updateT)
# Display everything
plt.show()
|
import networkx as nx
import numpy as np
import pandas as pd
import scipy
from scipy import sparse
import pickle
import sklearn as sk
from sklearn.cluster import KMeans
from sklearn.manifold import TSNE
import sys
import os
import graphwave as gw
from characteristic_functions import *
FB15K237_dir = '/home/haoyu/downloads/FB15K-237/processed'
NELL995_dir = '/home/haoyu/downloads/NELL-995/processed'
WN18RR_dir = '/home/haoyu/downloads/WN18-RR/processed'
def read_embedding(file_dir):
return np.load(file_dir)
def read_entity2id_inverse(file_dir):
id2entity = {}
with open(file_dir, 'rb') as f:
entity2id = pickle.load(f)
for k, v in entity2id.items():
id2entity[v] = k
return id2entity
def normalize(chi):
return chi / np.linalg.norm(chi, axis=1, keepdims=True)
def get_k_neighbour(chi, node, k, id2entity):
chi = normalize(chi)
sim = np.reshape(chi[node].dot(chi.T), -1)
sim_nodes = np.argpartition(sim, -k)[-k:]
return [id2entity[node] for node in sim_nodes]
def visualize(chi_file, entity2id_file, node):
chi = read_embedding(chi_file)
id2entity = read_entity2id_inverse(entity2id_file)
print(id2entity[node])
print(get_k_neighbour(chi, node, 10, id2entity))
visualize(os.path.join(FB15K237_dir, 'train.npz.chi.npy'), os.path.join(FB15K237_dir, 'entity2id.pkl'), 0) |
from ast import Str
import sys
import os
from datetime import datetime
tags = ['add one url','remove one']
appLogs = []
epoch = datetime.utcfromtimestamp(0)
class DownloadAction:
url = ""
action_add = epoch
action_remove = epoch
def parse(self, l):
if tags[0] in l:
return self.addOne(l)
elif tags[1] in l:
return self.remove(l)
else :
return False
def addOne(self, l):
tag = tags[0]
if len(self.url) > 0:
return False
self.url = self.stringBettwen(l, tag, ']')
self.action_add = timeFromLine(l)
return True
def remove(self, l):
tag = tags[1]
b = 'url.'
e = ' opt:'
url = self.stringBettwen(l, b, e)
if len(self.url) > 0:
if self.url in url:
if self.action_remove == epoch:
self.action_remove = timeFromLine(l)
return True
return False
def stringBettwen(self, l, b, e):
i = l.find(b)
if i < 0:
return None
i += len(b)
j = len(l)
if e != None:
j = l.find(e, i)
if j < 0:
return None
return l[i:j].strip('[').strip(']').strip()
class UrlRunTime:
start = epoch
complete = epoch
traceId = ""
urltext = ""
succeed = False
class AppRun:
appRunDate = epoch
actionList = []
def timeDiff(dt1, dt2):
return (dt1 - dt2).total_seconds()
def urlFromLine(l):
for x in tags:
if x in l:
return x;
return ''
def timeFromLine(lw):
ls = lw.split()
if len(ls) <= 2:
return epoch
ts = ls[0] + ' ' + ls[1]
a = ts.split('.')
if len(a) > 1:
if len(a[1]) == 1:
ts = a[0]+'.00'+a[1]
elif len(a[1]) == 2:
ts = a[0]+'.0'+a[1]
if len(ts) > 24:
return datetime.utcfromtimestamp(100000)
t = datetime.strptime(ts, '%Y-%m-%d %H:%M:%S.%f')
return t
def findUrl(id, list):
for x in list:
if x.traceId == id:
return x
return None
def actionDataFromLine(appRun, l):
for action in appRun.actionList:
if action.parse(l):
return
action = DownloadAction()
if action.parse(l):
appRun.actionList.append(action)
else :
print("lost:" + l)
return
def appRunTimeFromLine(l):
if 'app started' in l:
return datetime.utcfromtimestamp(1)
return epoch
def loadLog(path):
print("loading " + path)
file = open(path, 'r')
lines = file.readlines()
appRun = AppRun()
for l in lines:
t = appRunTimeFromLine(l)
if t != epoch:
run = AppRun()
run.appRunDate = t
appLogs.append(run)
run.actionList = []
appRun = run
elif appRun.appRunDate != epoch:
for x in tags:
if x in l:
actionDataFromLine(appRun, l)
break
print("app run times:" + str(len(appLogs)))
resultPath = path + "_image.csv"
f = open(resultPath, "w")
f.write("apprun, Url,add,remove,Duration\n")
index = 0
for log in appLogs:
index = index + 1
print("download times:" + str(len(log.actionList)))
for r in log.actionList:
f.write(str(index)+"," + r.url + "," + str(r.action_add) + "," + str(r.action_remove) +"," + str(timeDiff(r.action_remove, r.action_add)) + os.linesep )
f.close()
print("output: " + resultPath)
if __name__ == "__main__":
if len(sys.argv) == 2:
loadLog(os.path.abspath(sys.argv[1]))
else :
print("python analysis.py logfile_path") |
#lists
#all the operation of list add remove etc
N = int(input())
ls=[]
def insert1(pos, num):
ls.insert(pos, num)
def remove1(num):
ls.remove(num)
def append1(num):
ls.append(num)
def sort1():
ls.sort()
def pop1():
ls.pop()
def reverse1():
ls.reverse()
def print1():
print(ls)
while N>0:
x=input()
x=x.split()
if x[0]=="insert":
insert1(int(x[1]), int(x[2]))
if x[0]=="print":
print1()
if x[0]=="remove":
remove1(int(x[1]))
if x[0]=="sort":
sort1()
if x[0]=="pop":
pop1()
if x[0]=="reverse":
reverse1()
if x[0]=="append":
append1(int(x[1]))
N=N-1 |
#!/usr/bin/env python
#Hisar FRC
#copyright: Terobero
#Hisar School
import sys
import time
import pygame, serial
import RPi.GPIO as GPIO
from pygame.locals import *
import random
pygame.init()
sys.path.insert(0,"/home/pi/Desktop/HisArcade/pins")
import gamePins
gamePins.gameSetup()
scoreboard = gamePins.getScores("FRC")
screen=pygame.display.set_mode((1024,718))#,pygame.FULLSCREEN)
pygame.display.set_caption("Hisar FRC!")
white = [255,255,255]
#Creating 4 boxes and Background.
back = pygame.Surface((1024,718))
pygame.font.init()
fontSmall = pygame.font.Font("Fonts/ARCADECLASSIC.TTF",30)
background = back.convert()
background.fill((0,0,0))
red, yellow, green, blue=(235,53,47),(235,230,45),(0,185,10),(73,170,235)
directUp = fontSmall.render("up", True,white)
directDown = fontSmall.render("down", True,white)
directLeft = fontSmall.render("left", True,white)
directRight = fontSmall.render("right", True,white)
directExit = fontSmall.render("exit", True,white)
directPress = fontSmall.render("press", True, white)
FPS = 5
#images
robot = pygame.image.load("FRC/robot.png")
cube = pygame.image.load("FRC/cube.png")
small = pygame.image.load("FRC/small.png")
big = pygame.image.load("FRC/big.png")
#clock and font objects
clock = pygame.time.Clock()
all_fonts = pygame.font.get_fonts()
font = pygame.font.Font("Fonts/ARCADECLASSIC.TTF",40)
#fonts and texts
font1 = pygame.font.Font("Fonts/ARCADECLASSIC.TTF",60)
font2 = pygame.font.Font("Fonts/ARCADECLASSIC.TTF",30)
font3 = pygame.font.Font("Fonts/ka1.ttf", 60)
text1 = font3.render("Citadel", True,white)
text3 = font2.render("by TEROBERO", True,white)
text4 = font.render("GAME OVER", True,white)
text5 = font.render("YOU WIN", True,white)
def bg(): #draws the background
screen.blit(background,(0,0))
text2 = font.render("Score " + str(score), True,white)
screen.blit(background,(0,0))
pygame.draw.rect(screen,white,Rect((150,150),(640,480)),2) #150 - 150 to 790 - 630
screen.blit(text1,(300,30))
screen.blit(text2,(825,150))
screen.blit(text3,(500,680))
screen.blit(directDown,(900,340))
screen.blit(directUp,(900,420))
screen.blit(directRight,(900,500))
screen.blit(directLeft,(900,580))
pygame.draw.polygon(screen,(225,240,229),[[825,335],[865,335],[845,370]],0)
pygame.draw.polygon(screen,(225,240,229),[[825,450],[865,450],[845,415]],0)
pygame.draw.polygon(screen,(225,240,229),[[825,495],[825,540],[865,518]],0)
pygame.draw.polygon(screen,(225,240,229),[[865,575],[865,620],[825,598]],0)
pygame.draw.circle(screen, (red), (845,680),20,0)
screen.blit(directExit,(890,660))
def gameOver():
text2 = font.render("Score " + str(score), True, white)
screen.blit(background,(0,0))
screen.blit(text1,(280.,100.))
pygame.draw.circle(screen, (red), (470,450),20,0)
screen.blit(directExit,(520,445))
screen.blit(text2,(450.,350.))
screen.blit(text3,(440.,670.))
score = 0
level = 1 # 1 = maze, 2 = take & up/down, 3 = throw, 4 = climb
grid = [[0, 0, 0, 0, 0, 0, 3, 0, 0, 0], #1 = robot, 2 = switch/scale, 3 = cube
[0, 0, 2, 0, 2, 2, 0, 2, 0, 0],
[0, 3, 2, 0, 2, 2, 0, 2, 0, 0],
[0, 0, 2, 0, 2, 2, 0, 2, 0, 0],
[0, 0, 2, 0, 2, 2, 0, 2, 0, 3],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0]]
x = 0 # 0-11
y = 5 # 0-7
_x = 150
_y = 630
grabbed = False
while True:
if not GPIO.input(gamePins.red):
execfile('launchGPIO.py')
bg()
pygame.mouse.set_visible(False)
if level == 1:
for event in pygame.event.get():
if event.type == KEYDOWN:
if event.key == K_q:
pygame.quit()
exit()
if not GPIO.input(gamePins.up):
if y - 1 >= 0:
if grid[y-1][x] is not 2:
grid[y][x] = 0
if grid[y-1][x] == 3:
level = 2
score += 50
grid[y-1][x] = 1
y = y - 1
elif not GPIO.input(gamePins.down):
if y + 1 <= 5:
if grid[y+1][x] is not 2:
grid[y][x] = 0
if grid[y+1][x] == 3:
level = 2
score += 50
grid[y+1][x] = 1
y = y + 1
elif not GPIO.input(gamePins.left):
if x - 1 >= 0:
if grid[y][x-1] is not 2:
grid[y][x] = 0
if grid[y][x-1] == 3:
level = 2
score += 50
grid[y][x-1] = 1
x = x - 1
elif not GPIO.input(gamePins.right):
if x + 1 <= 9:
if grid[y][x+1] is not 2:
grid[y][x] = 0
if grid[y][x+1] == 3:
level = 2
score += 50
grid[y][x+1] = 1
x = x + 1
sizey = 80
sizex = 64
screen.blit(robot, (150+sizex*x, 150+sizey*y)) #robot
screen.blit(small, (150+sizex*2, 150+sizey*1)) #switchs & scale
screen.blit(small, (150+sizex*7, 150+sizey*1))
screen.blit(big, (150+sizex*4, 150+sizey*1))
for _x in range(0,10):
for _y in range(0,6):
if grid[_y][_x] == 3:
screen.blit(cube, (150+sizex*_x,150+sizey*_y)) #cube
if level == 2:
for event in pygame.event.get():
if event.type == KEYDOWN:
if event.key == K_q:
exit()
#150 - 150 to 790 - 630
#robot 150 - 400 for x, 150 to 630 for y
pygame.draw.rect(screen, (255,0,0), Rect((150,150), (250,480))) #robot
pygame.draw.rect(screen, (255,255,255), Rect((_x, _y), (300, 50))) #kol
if grabbed:
pygame.draw.rect(screen, (255,255,0), Rect((640 - _x, 530 - _y), (100, 100))) #kup
else:
pygame.draw.rect(screen, (), Rect((640, 530), (100, 100))) #kup
if not GPIO.input(gamePins.green) and not grabbed:
if _x >= 640 and _y >= 530:
grabbed = True
elif not GPIO.input(gamePins.up):
if _y >= 100:
_y -= 5
elif not GPIO.input(gamePins.down):
if _y <= 570:
_y += 5
elif not GPIO.input(gamePins.left):
if _x >= 160:
_x -= 5
elif not GPIO.input(gamePins.right):
if _x <= 480:
_x += 5
if grabbed and _y <= 200:
level = 3
score += 50
if level == 3:
'''
if gameEnd:
gameOver()
if not GPIO.input(gamePins.red):
clearGrid()
GameEnd=True
time.sleep(1)
execfile("launchGPIO.py")
'''
pygame.display.update()
clock.tick(FPS)
|
import threading
import thread
import time
doExit = 0
class newThread (threading.Thread):
def __init__(self, threadID, name, counter):
self.threadID = threadID
self.name = name
self.counter = counter
threading.Thread.__init__(self)
def run(self):
print "Starting " + self.name
print_time(self.name, self.counter, 5)
print "Exiting " + self.name
def print_time(threadName, delay, counter):
while counter:
if doExit:
thread.exit()
time.sleep(delay)
print "%s: %s" % (threadName, time.ctime(time.time()))
counter -= 1
#Create new threads
thread1 = newThread(1, "Thread01", 1)
thread2 = newThread(2, "Thread02", 2)
#Start new Threads
thread1.start()
thread2.run()
while thread2.isAlive():
if not thread1.isAlive():
doExit = 1
pass
print "Exiting Main Thread"
|
import pytest
from programmers_42578 import solution
@pytest.mark.parametrize("clothes,expected",
[
[[["yellow_hat", "headgear"], ["blue_sunglasses", "eyewear"], ["green_turban", "headgear"]], 5],
[[["crow_mask", "face"], ["blue_sunglasses", "face"], ["smoky_makeup", "face"]], 3]
])
def test_solution_default_condition(clothes, expected):
assert solution(clothes) == expected |
#!/usr/bin/python3
from flask import Flask, request, render_template
from model import Model
import settings
app = Flask(__name__)
model = Model.from_pickles(settings.MODEL_FILE, settings.VECTORIZER_FILE)
@app.route('/', methods=['POST', 'GET'])
def index(text='', prediction_message=''):
if request.method == "POST":
text = request.form["text"]
prediction_message = model.get_santa_answer(text)
return render_template('index.html', text=text, prediction_message=prediction_message)
if __name__ == '__main__':
app.run(host='0.0.0.0', threaded=True, port=80)
|
import re
import pandas as pd
import boto3
import pandas as pd
import numpy as np
import psycopg2
import string
"""extract specified features from corpus of text documents"""
class FeatureExtraction(object):
def __init__(self, data):
"""
INPUT:
- data = Path to data file as JSON string
ATTRIBUTES:
- Data = pandas dataframe converted from json string with document column
- chart_note = a medical document as a string
- lookup_dx = retrieve problem list with ICD codes from chart_note
- lookup_visit_date = retrieve visit date of patient from chart_note
- lookup_age = retrieve age of patient from chart_note
- lookup_sex = retrieve gender of patient from chart_note
- lookup_race = retrieve race of patient from chart_note
"""
self.chart_note = None
self.data = data
self.features = pd.DataFrame()
def feature_dataframe(self, ID, DD, text):
"""returns dataframe with extracted features from corpus"""
self.features['id'] = self.data[ID]
self.features['doc_id'] = self.data[DD]
self.features['dt'] = self.data[text].apply(self.lookup_visit_date)
self.features['dx'] = self.data[text].apply(self.lookup_dx)
self.features['age'] = self.data[text].apply(self.lookup_age)
self.features['sex'] = self.data[text].apply(self.lookup_sex)
self.features['race'] = self.data[text].apply(self.lookup_race)
def clean_and_decode(self, read_column, write_column):
"""converts column of bytes to column of strings"""
for i in range(len(self.data)):
self.data[read_column][i] = self.data[write_column][i].decode("utf-8")
def lookup_dx(self, chart_note):
"""returns list of diagnosis from a chart_note string"""
d = re.findall("Diagnosis:.(.*?)\n\n", chart_note, flags=re.S | re.I)
x = re.findall("medical history:.(.*?)\n\n", chart_note, flags=re.S | re.I)
if len(d) >= 1:
diags = [x.replace(',', '') for x in d]
diags = [x.replace('\n', ',').strip() for x in diags]
diags = [x.replace('\t', '') for x in diags]
diags = [x.replace(' ', '') for x in diags]
diags = diags[0].split(",")
return [x.lower().strip(' ') for x in diags]
elif len(x) >= 1:
d = re.findall("history:.(.*?)\n\n", chart_note, flags=re.S | re.I)
diags = [x.replace(',', '') for x in d]
diags = [x.replace('\n', ',').strip() for x in diags]
diags = [x.replace('\t', '') for x in diags]
diags = [x.replace(' ', '') for x in diags]
diags = diags[0].split(",")
return [x.lower().strip(' ') for x in diags]
else:
return None
def lookup_visit_date(self, chart_note):
'''returns visit date as timestamp from chart_note'''
dt = re.findall("date:.(.*?)\n", chart_note, flags=re.I)
if len(dt) >= 1:
return dt[0].strip()
def lookup_age(self, chart_note):
'''returns age as a string from a chart_note string'''
age = re.findall("age:.(.*?)\n", chart_note, flags=re.I)
if len(age) >= 1:
return age[0].strip()
def lookup_sex(self, chart_note):
'''returns sex as a string from a chart_note string'''
sex = re.findall("sex:.(.*?)\n", chart_note, flags=re.I)
if len(sex) >= 1:
return sex[0].strip()
def lookup_race(self, chart_note):
'''returns race as a string from a chart_note string'''
race = re.findall("race:.(.*?)\n", chart_note, flags=re.I)
if len(race) >= 1:
return race[0].strip()
if __name__ == '__main__':
Extraction = FeatureExtraction(data)
|
import threading as th
def hello(name):
while True:
print('Hello {}'.format(name))
def main():
th.Thread(target=hello, args=('Alice',)).start()
th.Thread(target=hello, args=('Bob',)).start()
main() |
#!/usr/bin/env python
# Red King Simulation Sonification
# Copyright (C) 2016 Foam Kernow
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import math
import numpy as np
import iso226
import copy
import techno
import strain
import time
def pitch(note):
return math.pow(2,(note-69)/12.0)*440
class blip:
def __init__(self,bands,bar_length):
self.level = [0 for i in range(0,bands)]
self.blips = []
self.events = []
self.bar_length = int(bar_length*44100)
self.pos = 0
def init(self):
self.blips = []
self.events = []
self.pos = 0
def update(self,level):
self.blips=strain.find_centres(level)
def render(self,out,mode):
if len(self.blips)>0:
step = self.bar_length/len(self.blips)
for i,b in enumerate(self.blips):
# midi note to frequency
p = pitch(b[0]+69)*4
# make an event for this note
self.events.append({'pos':i*step,
'freq':p,
'tec':techno.techno(0.3+b[1]*0.5,0.4),
'vol':iso226.iso226(90,p)})
if mode=="TECHNO": env = 150
else: env = 50
print(self.blips)
for i in range(0,self.bar_length):
if self.pos<len(out):
for e in self.events:
if mode=="TECHNO":
s = 0.016*e['tec'].generate(self.pos/44100.0*e['freq'])*e['vol']
else:
s = 0.008*math.sin(self.pos/44100.0*e['freq'])*e['vol']
if i>e['pos'] and i<=e['pos']+env:
env_lev = 1-(e['pos']+env-i)/float(env)
out[self.pos] += s*env_lev
if i>e['pos']+env:
out[self.pos] += s
e['vol']*=0.9995
self.pos+=1
if i%50==0: time.sleep(0.3)
# remove old events
new_events=[]
for e in self.events:
if e['vol']>0.001:
new_events.append(e)
self.events = new_events
#print(str(len(self.events))+" events...")
|
Emp=[]
def findConnections(name1, name2):
if len(Emp)==0:
connect=[name1,name2]
Emp.append(connect)
else:
found=1
for i in Emp:
if name1 in i :
i.append(name2)
found=1
return
elif name2 in i:
i.append(name1)
found=1
return
else:
found=0
if found==0:
connect=[name1,name2]
Emp.append(connect)
def Connections():
for i in Emp:
print(i)
def queryConnections(name1, name2):
for i in Emp:
if (name1 in i) and (name2 in i):
print("Yes")
return
print("No")
n_entries=int(input("Enter number of employees to be filled"))
for i in range(n_entries):
print("enter entries")
entries=list(input().split(' '))
findConnections(entries[0],entries[1])
entries.clear()
Connections()
print(len(Emp))
n_queries=int(input("Enter number of employees to be querried"))
for i in range(n_queries):
print("Enter queries")
entries=list(input().split(' '))
queryConnections(entries[0],entries[1])
|
"""
The set [1,2,3,…,n] contains a total of n! unique permutations.
By listing and labeling all of the permutations in order,
We get the following sequence (ie, for n = 3):
"123"
"132"
"213"
"231"
"312"
"321"
Given n and k, return the kth permutation sequence.
Note: Given n will be between 1 and 9 inclusive.
"""
class Solution(object):
def getPermutation(self, n, k):
"""
:type n: int
:type k: int
:rtype: str
"""
facts = [1]
for i in range(1, n):
facts.append(i * facts[-1])
# def getPermutationRecur(unused, i):
# l = len(unused)
# if l == 0:
# return ""
# first_digit = i // facts[l - 1]
# return unused[first_digit] + getPermutationRecur(unused[:first_digit] + unused[first_digit + 1:], i % facts[l - 1])
# return getPermutationRecur([str(v + 1) for v in range(n)], k - 1)
ans = ""
unused = [str(v + 1) for v in range(n)]
i = k - 1
while n > 0:
n -= 1
ans += unused[i // facts[n]]
unused.remove(ans[-1])
i %= facts[n]
return ans
ans = Solution()
for i in range(1, 7):
print(ans.getPermutation(3, i))
for i in range(1, 25):
print(ans.getPermutation(9, i))
|
import hdfs
import pymongo
import json
import os
import time
# 启动 mongodb
# sudo mongod --dbpath=/Users/h2p/Documents/Project/data/db
client = hdfs.Client('http://*:50070', root='/')
print('连接 hdfs')
# client = hdfs.Client('http://*:50070', root='/')
# client = hdfs.Client('http://*:50070', root='/')
print('连接 mongodb')
# myClient = pymongo.MongoClient(host='*', port=20000)
myClient = pymongo.MongoClient(host='127.0.0.1', port=27017)
mydb = myClient['CloudComputing']
mycol = mydb['UserInfo']
print('读取已转移 Mongo Id')
Mongo_json_OK = []
with open('Mongo_json_OK.txt', 'r', encoding='utf-8') as f:
mongoId = f.readline().strip()
while mongoId:
Mongo_json_OK.append(id)
mongoId = f.readline().strip()
print('读取 Mongo 数据')
count = len(Mongo_json_OK)
for item in mycol.find():
item['_id'] = str(item['_id'])
if item['_id'] not in Mongo_json_OK:
filePath = './json/'+item['_id']+'.json'
with open(filePath, 'w', encoding='utf-8') as f:
json.dump(item, f, ensure_ascii=False)
print('上传文件 %s 到 hdfs' % item['_id'])
client.upload('/input/', filePath, overwrite=True)
os.remove(filePath)
Mongo_json_OK.append(item['_id'])
with open('Mongo_json_OK.txt', 'a', encoding='utf-8') as f:
f.write(item['_id']+'\n')
count += 1
print('%d : %s' % (count, item['_id']))
time.sleep(1)
myClient.close() |
import numpy as np
class Optimizer:
def __init__(self, name="Base optimizer"):
self.name = name
def initialize(self, params):
pass
def apply(self, params, grads, step_i=None):
pass
class SGD(Optimizer):
def __init__(self, lr=0.001, momentum=0.0, nesterov=False, bias_correction=False):
super().__init__(name="Momentum")
self.lr = lr
self.momentum = momentum
self.nesterov = nesterov
self.bias_correction = bias_correction
# Params
self.V = {}
def initialize(self, params):
for k in params.keys():
self.V[k] = np.zeros_like(params[k])
def apply(self, params, grads, step_i=None):
for k in params.keys():
# Exclude no trainable params
if k not in grads:
continue
# Momentum
v_prev = self.V[k]
self.V[k] = self.momentum * self.V[k] + (1 - self.momentum) * grads[k]
# Bias correction. It is not common to use it here
# Note: Vanilla SGD does not need bias correction
if self.bias_correction and self.momentum != 0.0:
b_correction = 1.0/(1.0 - self.lr**step_i)
self.V[k] *= b_correction
# Compute update
if not self.nesterov:
new_v = self.V[k]
else:
# Not sure if this is correct
new_v = -self.momentum * v_prev + (1 + self.momentum) * self.V[k]
# Step
params[k] -= self.lr * new_v
class RMSProp(Optimizer):
def __init__(self, lr=0.001, rho=0.9, epsilon=10e-8):
super().__init__(name="RMSProp")
self.lr = lr
self.rho = rho
self.epsilon = epsilon
# Params
self.S = {}
def initialize(self, params):
for k in params.keys():
self.S[k] = np.zeros_like(params[k])
def apply(self, params, grads, step_i=None):
for k in params.keys():
# Exclude no trainable params
if k not in grads:
continue
# Momentum
self.S[k] = self.rho * self.S[k] + (1.0 - self.rho) * grads[k]**2
# Step
new_v = grads[k]/(np.sqrt(self.S[k]+self.epsilon))
params[k] -= self.lr * new_v
class Adam(Optimizer):
def __init__(self, lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=10e-8, bias_correction=False):
super().__init__(name="Adam")
self.lr = lr
self.beta_1 = beta_1
self.beta_2 = beta_2
self.epsilon = epsilon
self.bias_correction = bias_correction
# Params
self.V = {}
self.S = {}
def initialize(self, params):
for k in params.keys():
self.V[k] = np.zeros_like(params[k])
self.S[k] = np.zeros_like(params[k])
def apply(self, params, grads, step_i=None):
for k in params.keys():
# Exclude no trainable params
if k not in grads:
continue
# Momentum
self.V[k] = self.beta_1 * self.V[k] + (1.0 - self.beta_1) * grads[k]
self.S[k] = self.beta_2 * self.S[k] + (1.0 - self.beta_2) * grads[k] ** 2
# Bias correction
if self.bias_correction:
b_correction = 1.0 / (1.0 - self.lr ** step_i)
vk_corrected = self.V[k] * b_correction
sk_corrected = self.S[k] * b_correction
else:
vk_corrected, sk_corrected = self.V[k], self.S[k]
# Step
new_v = vk_corrected / (np.sqrt(sk_corrected + self.epsilon))
params[k] -= self.lr * new_v
|
import os
import tkinter
from tkinter import filedialog
cur_path=os.getcwd()
root = tkinter.Tk()
root.withdraw()
files = filedialog.askopenfilenames(parent=root,initialdir =cur_path,title = "Choose files to be renamed")
print(f'{len(files)}files selected')
print()
outpath = filedialog.askdirectory(parent=root,initialdir=cur_path,title='Please select a directory')
prefix= input("Enter prefix : ")
for i,file in enumerate(files):
os.rename(file,os.path.join(outpath,prefix+str(i+1)+".png"))
print(f'{len(files)}files renamed')
print()
|
import logging
from builtins import classmethod
import csv
import os
from elastic.management.loaders.mapping import MappingProperties
from elastic.management.loaders.loader import Loader
import json
from data_pipeline.helper.gene import Gene
logger = logging.getLogger(__name__)
class GenePathways(Gene):
''' GenePathways class defines functions for building pathway_genesets index type within gene index
The pathway_genesets index type is currently built by parsing the following:
1. Refer section [MSIGDB] in download.ini for source files
'''
@classmethod
def gene_pathway_parse(cls, download_files, stage_output_file, section, config=None):
''' Function to delegate parsing of gene pathway files based on the file formats eg: gmt - genematrix '''
cls._genematrix(download_files, stage_output_file, section, config)
@classmethod
def _genematrix(cls, download_files, stage_output_file, section, config=None):
'''Function to delegate parsing of pathway files based on the source eg: kegg, reactome, go'''
abs_path_staging_dir = os.path.dirname(stage_output_file)
source = None
is_public = True if section['is_public'] == 1 else False
for file in download_files:
stage_output_file = abs_path_staging_dir + '/' + os.path.basename(file) + '.json'
source = cls._get_pathway_source(file)
cls._process_pathway(file, stage_output_file, section, source, is_public, config)
@classmethod
def _get_pathway_source(cls, file):
'''Function to check for the pathway source in file name eg: kegg, reactome, go'''
if 'kegg' in file:
source = 'kegg'
elif 'reactome' in file:
source = 'reactome'
elif 'biocarta' in file:
source = 'biocarta'
elif 'all' in file:
source = 'GO'
else:
source = 'unknown'
return(source)
@classmethod
def _process_pathway(cls, download_file, stage_output_file, section, source, is_public, config=None):
'''Function to parse the pathway input files eg: kegg, reactome, go
INPUT file format:
Pathway name \t Pathyway url \t List of entrez ids
REACTOME_RNA_POL_I_TRANSCRIPTION_TERMINATION
http://www.broadinstitute.org/gsea/msigdb/cards/REACTOME_RNA_POL_I_TRANSCRIPTION_TERMINATION1022
2068 2071 25885 284119 2965 2966 2967 2968 4331
The entrez ids are converted to ensembl ids and logs are written to track the conversion rates (LESS/MORE/EQUAL)
'''
json_target_file_path = stage_output_file.replace(".out", ".json")
json_target_file = open(json_target_file_path, mode='w', encoding='utf-8')
json_target_file.write('{"docs":[\n')
count = 0
tmp_row_count_file = open(download_file, encoding='utf-8')
row_count = sum(1 for row in tmp_row_count_file)
logger.debug('Number of lines in the file ' + str(row_count))
load_mapping = True
gene_sets = []
with open(download_file, encoding='utf-8') as csvfile:
reader = csv.reader(csvfile, delimiter='\t', quoting=csv.QUOTE_NONE)
for row in reader:
gene_sets.extend(row[2:])
csvfile.close()
ens_look_up = Gene._entrez_ensembl_lookup(gene_sets, section, config)
with open(download_file, encoding='utf-8') as csvfile:
reader = csv.reader(csvfile, delimiter='\t', quoting=csv.QUOTE_NONE)
for row in reader:
path_object = dict()
pathway_name = row[0]
pathway_url = row[1]
gene_sets = row[2:]
converted_genesets = [ens_look_up[entrez] for entrez in gene_sets if entrez in ens_look_up]
path_object["pathway_name"] = pathway_name
path_object["pathway_url"] = pathway_url
path_object["gene_sets"] = converted_genesets
path_object["source"] = source
path_object["is_public"] = is_public
json_target_file.write(json.dumps(path_object))
count += 1
if row_count == count:
json_target_file.write('\n')
else:
json_target_file.write(',\n')
json_target_file.write('\n]}')
logger.debug("No. genes to load "+str(count))
logger.debug("Json written to " + json_target_file_path)
logger.debug("Load mappings")
if load_mapping:
status = cls._load_pathway_mappings(section)
print(status)
@classmethod
def _load_pathway_mappings(cls, section):
'''Function to load the elastic mappings'''
idx = section['index']
idx_type = section['index_type']
pathway_mapping = MappingProperties(idx_type)
pathway_mapping.add_property("pathway_name", "string")
pathway_mapping.add_property("pathway_url", "string")
pathway_mapping.add_property("gene_sets", "string")
pathway_mapping.add_property("source", "string")
pathway_mapping.add_property("is_public", "string")
load = Loader()
options = {"indexName": idx, "shards": 1}
status = load.mapping(pathway_mapping, idx_type, **options)
return status
|
from flask import Blueprint, jsonify, Flask, redirect, request, url_for, render_template
from flask_wtf import FlaskForm
from wtforms import StringField, IntegerField, PasswordField, DateField
from wtforms.validators import DataRequired
class ArticleForm(FlaskForm):
title = StringField("Title", validators=[DataRequired()])
date = DateField("Date", format='%m/%d/%Y', validators=[DataRequired()])
author = StringField("Author", validators=[DataRequired()])
image = StringField("Image", validators=[DataRequired()])
caption = StringField("Caption", validators=[DataRequired()])
location = StringField("Location", validators=[DataRequired()])
article = StringField("Article", validators=[DataRequired()])
category = StringField("Category", validators=[DataRequired()])
scope = StringField("Scope", validators=[DataRequired()])
name = StringField("Name", validators=[DataRequired()])
password = PasswordField("Password", validators=[DataRequired()])
class EditArticleForm(FlaskForm):
id = IntegerField("Id", validators=[DataRequired()])
title = StringField("Title")
date = DateField("Date", format='%m/%d/%Y')
author = StringField("Author")
image = StringField("Image")
caption = StringField("Caption")
location = StringField("Location")
article = StringField("Article")
category = StringField("Category")
scope = StringField("Scope")
name = StringField("Name", validators=[DataRequired()])
password = PasswordField("Password", validators=[DataRequired()])
class RemoveArticleForm(FlaskForm):
id = IntegerField("Id", validators=[DataRequired()])
name = StringField("Name", validators=[DataRequired()])
password = PasswordField("Password", validators=[DataRequired()])
|
from flask import Flask,render_template,request,redirect
import pickle
import pandas as pd
import numpy as np
with open("Laura/Assets/Model/locations.txt", "r") as f:
locations = f.read()
locations = locations.strip()[1:len(locations)-1]
locations = locations.split(',')
location_data = [i.strip() for i in locations]
#These datapoints have "'" as a data among them so , and striping the "'" in each name
locations = [i[1:len(i)-1] for i in location_data if i != "'"]
ans = 0
def predict_price(location,sqft,bath,bhk):
X = pd.read_csv("Laura/Assets/Model/Database.csv",sep=',')
pickle_in = open("Laura/Assets/Model/Laura_Best_Model.pickle","rb")
Laura = pickle.load(pickle_in)
try:
loc_index = np.where(X.columns==location)[0][0]
except:
loc_index = 0
x = np.zeros(len(X.columns))
x[0] = sqft
x[1] = bath
x[2] = bhk
if loc_index>0:
x[loc_index] = 1
ans = Laura.predict([x])[0]
return ans
app = Flask(__name__)
@app.route('/',methods=['GET','POST'])
def index():
if request.method == 'POST':
name = request.form.get('Name')
email = request.form.get('Email')
return redirect('/'+name)
else:
return render_template('index.html')
@app.route('/<string:name>',methods=['GET','POST'])
def predict(name):
if request.method == 'POST':
Location = request.form['location']
print(Location)
Sqft = float(request.form['sqft'])
bed = int(request.form['bed'])
bath = int(request.form['bath'])
predicted_price = predict_price(location=Location, sqft=Sqft, bath=bath, bhk=bed)
return redirect('/'+name+'/results='+ str(round(predicted_price,3)))
return render_template('predict.html',name=name,locations = locations)
@app.route('/<string:name>/results=<string:ans>')
def show_result(name,ans):
return render_template('result.html',ans = float(ans))
if __name__=='__main__':
app.run(debug=True) |
import errno
import gi
import glob
import io
import logging
import os
import re
import time
import v4l2
import sdnotify
import signal
import sys
import traceback
from fcntl import ioctl
from .config import *
from .streamer import *
from .advertise import StreamAdvert
from .janus import JanusInterface
gi.require_version('Gst', '1.0')
from gi.repository import GLib,Gst
Gst.init(None)
### Main visiond App Class
class visiondApp():
def __init__(self, config):
self.config = config
self.logger = logging.getLogger('visiond.' + __name__)
self.stream = None
self.zeroconf = None
self.janus = None
self._should_shutdown = False
self.notify = sdnotify.SystemdNotifier()
signal.signal(signal.SIGINT, self.signal_handler)
signal.signal(signal.SIGTERM, self.signal_handler)
def signal_handler(self, sig, frame):
self.shutdown()
def run(self):
self.logger.info("Starting maverick-visiond")
if 'debug' in self.config.args and self.config.args.debug:
Gst.debug_set_active(True)
Gst.debug_set_default_threshold(self.config.args.debug)
if 'retry' not in self.config.args or not self.config.args.retry:
self.retry = 30
else:
self.retry = float(self.config.args.retry)
# Start the zeroconf thread
if self.config.args.zeroconf:
self.zeroconf = StreamAdvert(self.config)
self.zeroconf.start()
else:
self.zeroconf = None
self.janus = JanusInterface(self.config, self.zeroconf)
self.janus.start()
# Start the pipeline. Trap any errors and wait for 30sec before trying again.
while not self._should_shutdown:
try:
if 'pipeline_override' in self.config.args and self.config.args.pipeline_override:
self.logger.info("pipeline_override set, constructing manual pipeline")
self.manualconstruct()
else:
self.logger.info("pipeline_override is not set, auto-constructing pipeline")
self.autoconstruct()
except ValueError as e:
self.logger.critical("Error constructing pipeline: {}, retrying in {} sec".format(repr(e), self.retry))
# Inform systemd that start is complete
#self.logger.info("Notifying systemd of startup failure")
#self.notify.notify("ERRNO=1")
#self.notify.notify("STATUS=Error constructing pipeline: {}".format(repr(e)))
self.logger.info("Notifying systemd of startup completion")
self.notify.notify("READY=1")
self.notify.notify("STATUS=Manual Pipeline Initialisation Complete")
sys.exit(0)
def manualconstruct(self):
if self.config.args.pipeline_override not in self.config.args:
self.logger.critical('manualconstruct() called but no pipeline_override config argument specified')
sys.exit(0)
self.logger.info("Manual Pipeline Construction")
self.logger.info("Creating pipeline from config: " + self.config.args.pipeline_override)
try:
# Create the pipeline from config override
self.pipeline = Gst.parse_launch(self.config.args.pipeline_override)
# Set pipeline to playing
self.pipeline.set_state(Gst.State.PLAYING)
except Exception as e:
raise ValueError('Error constructing manual pipeline specified: {}'.format(repr(e)))
# Inform systemd that start is complete
self.logger.info("Notifying systemd of startup completion")
self.notify.notify("READY=1")
self.notify.notify("STATUS=Manual Pipeline Initialisation Complete")
while True:
time.sleep(5)
def autoconstruct(self):
# If camera device set in config use it, otherwise autodetect
cameradev = None
devicepaths = glob.glob("/dev/video*")
if self.config.args.camera_device:
self.logger.debug('camera_device specified: {}'.format(self.config.args.camera_device))
cameradev = self.config.args.camera_device
else:
# device not set, carry on and try to autodetect
for devicepath in sorted(devicepaths):
if not cameradev and self.check_input(devicepath):
cameradev = devicepath
self.logger.info('v4l2 device '+devicepath+' is a camera, autoselecting')
elif not cameradev:
self.logger.debug('v4l2 device '+devicepath+' is not a camera, ignoring')
if not cameradev:
raise ValueError('Error detecting camera video device')
# Check the camera has a valid input
try:
self.vd = io.TextIOWrapper(open(cameradev, "r+b", buffering=0))
cp = v4l2.v4l2_capability()
except Exception as e:
raise ValueError("Camera not specified in config, or camera not valid: {}".format(repr(e)))
if not self.check_input():
raise ValueError('Specified camera not valid')
# Log info
self.camera_info()
# Try and autodetect Jetson/Tegra CSI connection
if self.driver == 'tegra-video':
self.logger.info('Nvidia Jetson/Tegra CSI connection detected, switching to nvarguscamerasrc')
self.input = "nvarguscamerasrc"
elif 'input' not in self.config.args or not self.config.args.input:
self.input = "v4l2src"
else:
self.input = self.config.args.input
# Try and autodetect MFC device
self.mfcdev = None
for devicepath in devicepaths:
dp = io.TextIOWrapper(open(devicepath, "r+b", buffering=0))
ioctl(dp, v4l2.VIDIOC_QUERYCAP, cp)
if cp.card == "s5p-mfc-enc":
self.mfcdev = dp
self.logger.info(f'MFC Hardware encoder detected, autoselecting {devicepath}')
# If format set in config use it, otherwise autodetect
streamtype = None
if self.config.args.format:
streamtype = self.config.args.format
else:
if self.input == "nvarguscamerasrc":
self.logger.info('Nvidia Jetson/Tegra input detected, forcing Tegra stream format')
streamtype = 'tegra'
elif re.search("C920", self.card):
self.logger.info("Logitech C920 detected, forcing H264 passthrough")
streamtype = 'h264'
# format not set, carry on and try to autodetect
elif self.check_format('yuv'):
self.logger.info('Camera YUV stream available, using yuv stream')
streamtype = 'yuv'
# Otherwise, check for an mjpeg->h264 encoder pipeline.
elif self.check_format('mjpeg'):
self.logger.info('Camera MJPEG stream available, using mjpeg stream')
streamtype = 'mjpeg'
# Lastly look for a h264 stream
elif self.check_format('h264'):
self.logger.info('Camera H264 stream available, using H264 stream')
streamtype = 'h264'
if not streamtype:
raise ValueError('Error detecting camera video format')
# If encoder set in config use it, otherwise set to h264
encoder = None
if self.config.args.encoder:
encoder = self.config.args.encoder
if not encoder:
encoder = "h264"
self.logger.debug("Using encoder: {}".format(encoder))
# If raspberry camera detected set pixelformat to I420, otherwise set to YUY2 by default
pixelformat = "YUY2"
ioctl(self.vd, v4l2.VIDIOC_QUERYCAP, cp)
if cp.driver == "bm2835 mmal":
self.logger.info("Raspberry Pi Camera detected, setting pixel format to I420")
pixelformat = "I420"
# If raw pixelformat set in config override the defaults
if 'pixelformat' in self.config.args and self.config.args.pixelformat:
pixelformat = self.config.args.pixelformat
self.logger.debug("Using pixelformat: {}".format(pixelformat))
# Create and start the stream
try:
self.logger.info("Creating stream object - device: {}, stream: {}, pixelformat: {}, encoder: {}, input: {}".format(cameradev, streamtype, pixelformat, encoder, self.input))
Streamer(self.config, streamtype, pixelformat, encoder, self.input, cameradev)
if self.zeroconf:
# Update the stream advertisement with the new info
self.zeroconf.update({"stream":"replace_with_stream_info"})
except Exception as e:
if self.zeroconf:
self.zeroconf.update({"stream":""})
raise ValueError('Error creating {} stream: {}'.format(streamtype, repr(e)))
# Inform systemd that start is complete
self.logger.info("Notifying systemd of startup completion")
self.notify.notify("READY=1")
self.notify.notify("STATUS=Automatic Pipeline Initialisation Complete")
while not self._should_shutdown:
time.sleep(1)
def camera_info(self):
# Log capability info
cp = v4l2.v4l2_capability()
ioctl(self.vd, v4l2.VIDIOC_QUERYCAP, cp)
self.logger.debug("driver: " + cp.driver.decode())
self.logger.debug("card: " + cp.card.decode())
self.driver = cp.driver.decode()
self.card = cp.card.decode()
# Log controls available
queryctrl = v4l2.v4l2_queryctrl(v4l2.V4L2_CID_BASE)
while queryctrl.id < v4l2.V4L2_CID_LASTP1:
try:
ioctl(self.vd, v4l2.VIDIOC_QUERYCTRL, queryctrl)
except IOError as e:
# this predefined control is not supported by this device
assert e.errno == errno.EINVAL
queryctrl.id += 1
continue
self.logger.debug("Camera control: " + queryctrl.name.decode())
queryctrl = v4l2.v4l2_queryctrl(queryctrl.id + 1)
queryctrl.id = v4l2.V4L2_CID_PRIVATE_BASE
while True:
try:
ioctl(self.vd, v4l2.VIDIOC_QUERYCTRL, queryctrl)
except IOError as e:
# no more custom controls available on this device
assert e.errno == errno.EINVAL
break
self.logger.debug("Camera control: " + queryctrl.name.decode())
queryctrl = v4l2.v4l2_queryctrl(queryctrl.id + 1)
# Log formats available
capture = v4l2.v4l2_fmtdesc()
capture.index = 0
capture.type = v4l2.V4L2_BUF_TYPE_VIDEO_CAPTURE
try:
while (ioctl(self.vd, v4l2.VIDIOC_ENUM_FMT, capture) >= 0):
self.logger.debug("Camera format: " + capture.description.decode())
capture.index += 1
except:
pass
def check_input(self, vd=None, index=0):
if vd == None:
vd = self.vd
else:
vd = io.TextIOWrapper(open(vd, "r+b", buffering=0))
input = v4l2.v4l2_input(index)
try:
ioctl(vd, v4l2.VIDIOC_ENUMINPUT, input)
self.logger.debug('V4l2 device input: ' + input.name.decode() + ':' + str(input.type))
if input.type != 2:
return False # If input type is not camera (2) then return false
return True
except Exception as e:
self.logger.debug("Error checking input: {}".format(repr(e)))
return False
def check_format(self, format):
capture = v4l2.v4l2_fmtdesc()
capture.index = 0
capture.type = v4l2.V4L2_BUF_TYPE_VIDEO_CAPTURE
available = False
try:
while (ioctl(self.vd, v4l2.VIDIOC_ENUM_FMT, capture) >= 0):
self.logger.debug("Checking format: {} : {}".format(format, capture.description.decode()))
if format.lower() == "h264":
if re.search('H264', capture.description.decode().lower()) or re.search('H.264', capture.description.decode().lower()):
available = True
elif format.lower() == "mjpeg":
if re.search('jpeg', capture.description.decode().lower()):
available = True
elif format.lower() == "yuv" or format.lower() == "raw":
if re.search('^yu', capture.description.decode().lower()):
available = True
else:
if re.search(format.lower(), capture.description.decode().lower()):
available = True
capture.index += 1
except:
pass
return available
def shutdown(self):
self._should_shutdown = True
self.logger.info("Shutting down visiond")
if self.stream:
if self.stream.webrtc:
self.stream.webrtc.shutdown()
if self.stream.webrtc_signal_server:
self.stream.webrtc_signal_server.shutdown()
self.stream.webrtc_signal_server.join()
self.stream.stop()
if self.janus:
self.janus.shutdown()
self.janus.join()
if self.zeroconf:
self.zeroconf.shutdown()
self.zeroconf.join()
|
from game.game import Game
HOST = '127.0.0.1'
PORT = 32198
def main():
game = Game(HOST, PORT)
game.run()
if __name__ == '__main__':
main()
|
from sqlalchemy import func
from flask_appbuilder import Model
from flask_appbuilder.models.mixins import AuditMixin, FileColumn, ImageColumn
from flask_appbuilder.models.decorators import renders
from sqlalchemy import (Column, Integer, String, ForeignKey,
Sequence, Float, Text, BigInteger, Date,
DateTime, Time, Boolean, CheckConstraint,
UniqueConstraint, Table)
from sqlalchemy.orm import relationship, query, defer, deferred
from sqlalchemy_utils import aggregated
from .mixins import *
# from sqlalchemy_mixins import ActiveRecordMixin
#from ../../pjwide/mixins import *
from flask_appbuilder.filemanager import get_file_original_name, ImageManager
"""
You can use the extra Flask-AppBuilder fields and Mixin's
AuditMixin will add automatic timestamp of created and modified by who
"""
# class BaseModel(ActiveRecordMixin, Model):
# __abstract__ = True
# pass
class Gender(RefTypeMixin, Model):
__tablename__ = 'gender'
class CaseStatus(RefTypeMixin, Model):
__tablename__ = 'case_status'
class CourtLevel(RefTypeMixin, Model):
__tablename__ = 'court_level'
class CaseType(RefTypeMixin, Model):
__tablename__ = 'case_type'
class CaseCategory(RefTypeMixin, Model):
__tablename__ = 'case_category'
class HearingType(RefTypeMixin, Model):
__tablename__ = 'hearing_type'
class EventType(RefTypeMixin, Model):
__tablename__ = 'event_type'
##### Reference Tables #####
# 10 Regions/County
class Region(RefTypeMixin, Model):
__tablename__ = 'region'
capital = Column(String(30))
districts = relationship('District', backref = 'region')
# Subcounty/216 Districts in Ghana
class District(RefTypeMixin, Model):
__tablename__ = 'district'
region_fk = Column(Integer, ForeignKey('region.id'))
region = relationship(Region, back_populates = 'district')
capital = Column(String(30))
towns = relationship('Town', backref='district')
courts = relationship('Court', back_populates = 'district')
# class Subcounty(RefTypeMixin, PlaceMixin, AuditMixin, Model):
# __tablename__ = 'subcounty'
# id = Column(Integer, autoincrement=True, primary_key=True)
# county_fk = Column(Integer, ForeignKey('county.id'))
# #county = relationship(County)
# wards = relationship('Ward', backref = 'subcounty')
#
# class Ward( PlaceMixin, AuditMixin, Model): #RefTypeMixin,
# __tablename__ = 'ward'
# id = Column(Integer, autoincrement=True, primary_key=True)
# name = Column(String(30))
# subcounty_fk = Column(Integer, ForeignKey('subcounty.id'))
# #subcounty = relationship(Subcounty)
# constituency_fk = Column(Integer, ForeignKey('constituency.id'))
class Town(RefTypeMixin, Model):
__tablename__ = 'town'
district_fk = Column(Integer, ForeignKey('district.id'))
#district = relationship(District)
urban_status = Column(String(30))
local_authority = Column(String(50))
rank = Column(Integer)
class Constituency(RefTypeMixin, Model):
__tablename__ = 'constituency'
region_fk = Column(Integer, ForeignKey('region.id'))
region = relationship(Region)
district_name = Column(String(30))
#region = Column(String(40))
#wards = relationship('Ward', backref='constituency')
# @aggregated('wards', Column(Integer))
# def ward_count(self):
# return func.count('1')
#ward_count = Column(Integer)
###### Monitored Entities [Schools, Courts, Shops, Whatever] #####
# Modify These to Suit
class Court(RefTypeMixin, ContactMixin, PlaceMixin, Model):
__tablename__ = 'court'
id = Column(Integer, Sequence('court_id_seq'), primary_key=True)
registrar = Column(String(30), nullable=True)
district_fk = Column(Integer, ForeignKey('district.id'))
district = relationship("District", back_populates="courts")
class PoliceStation(RefTypeMixin, ContactMixin, PlaceMixin, Model):
__tablename__ = 'police_station'
id = Column(Integer, Sequence('police_id_seq'), primary_key=True)
district_fk = Column(Integer, ForeignKey('district.id'))
district = relationship("District", back_populates="police_stations")
officer_commanding = Column(String(40))
cell_count = Column(Integer)
class Prison(RefTypeMixin, PlaceMixin, ContactMixin, Model):
__tablename__ = 'prison'
district_fk = Column(Integer, ForeignKey('district.id'))
district = relationship("District", back_populates="prisons")
holding_capacity = Column(Integer)
personcase = Table('percase', Model.metadata,
#Column('id', Integer, ForeignKey('tag.id')),
Column('person_id', Integer, ForeignKey('person.id')),
Column('case_id', Integer, ForeignKey('case.id'))
)
#person case
class Person(PersonMixin, ContactMixin, ParentageMixin, AuditMixin, Model):
__tablename__ = 'person'
discriminator = Column('type', String(50))
__mapper_args__ = {'polymorphic_on': discriminator}
class Plaintiff(Person):
__mapper_args__ = {'polymorphic_identity': 'plaintiff'}
class Defendant(Person):
__mapper_args__ = {'polymorphic_identity': 'defendant'}
class PoliceStaff(Person):
__mapper_args__ = {'polymorphic_identity': 'policeman'}
class PrisonStaff(Person):
__mapper_args__ = {'polymorphic_identity': 'warden'}
class Prosecutor(Person):
__mapper_args__ = {'polymorphic_identity': 'prosecutor'}
class Registrar(Person):
__mapper_args__ = {'polymorphic_identity': 'registrar'}
class Judge(Person):
__mapper_args__ = {'polymorphic_identity': 'judge'}
class Magistrate(Person):
__mapper_args__ = {'polymorphic_identity': 'magistrate'}
class Detective(Person):
__mapper_args__ = {'polymorphic_identity': 'detective'}
# class CaseHearings(Model):
# __tablename__ = 'case_hearings'
#
# hearing_type_fk = Column(Integer, ForeignKey('hearing_type.id'))
# hearing_type = relationship('Hearing')
#
# case_fk = Column(Integer, ForeignKey('case.id'))
# cases = relationship('Case')
#
# reason = Column(String(200))
# hearing_date = Column(Date, nullable=False, default=datetime.today)
# 'date_reported', 'ob_number', 'case_type', 'case_category'
class Case(AuditMixin, Model):
__tablename__ = 'case'
id = Column(Integer, autoincrement=True, primary_key=True)
open_date = Column(DateTime, default=datetime.now, nullable=False)
station_id = Column(Integer, ForeignKey('police_station.id'))
station = relationship("PoliceStation")
ob_number = Column(Integer, autoincrement=True, unique=True)
report = Column(Text)
casetype_id= Column(Integer,ForeignKey('case_type.id'), nullable=False )
case_type = relationship("CaseType")
case_category_id = Column(Integer,ForeignKey('case_category.id'), nullable=True )
case_category = relationship("CaseCategory")
status_id = Column(Integer, ForeignKey('case_status.id'))
status = relationship(CaseStatus)
#people = relationship('Person', secondary=personcase, backref='case')
# complaint_no,
reporting_officer = Column(String(80))
investigating_officer = Column(String(80))
investigation_outcomes = Column(Text)
investigation_status = Column(String(80))
evidence_collected = Column(Text)
evidence_pictures= Column(Text)
offender_identification= Column(Text)
offenders_arrested= Column(Text)
arrest_location= Column(Text)
arresting_officer= Column(Text)
arrest_narrative= Column(Text)
warrant_date= Column(Date)
warrant_details= Column(Text)
probable_cause= Column(Text)
document_list= Column(Text)
document_count= Column(Integer)
documents= Column(Text)
charge_date= Column(Date)
charge_description= Column(Text)
court_id = Column(Integer, ForeignKey('court.id'))
charge_court= relationship(Court)
first_hearing_date= Column(Date)
# hearings = relationship('HearingType',
# secondary = case_hearings,
# back_populates = 'cases')
hearing_dates= Column(Text)
court_outcome= Column(Text)
case_duration= Column(Integer)
offender_picture = Column(Text)
sentence_date= Column(Date)
sentence= Column(Text)
case_closed= Column(Boolean, default=False)
##### Admin Tables #####
class ContactForm(RefTypeMixin, AuditMixin, Model):
__tablename__ = 'contact_form'
message = Column(Text)
class Complaint(AuditMixin, Model):
__tablename__ = 'complaint'
id = Column(Integer, autoincrement=True, primary_key=True)
report_date = Column(DateTime)
report_time = Column(DateTime)
event_date = Column(Date)
event_place = Column(String(80))
complainant = Column(String(80))
comp_phone = Column(String(80))
comp_email = Column(String(40))
comp_address = Column(Text)
comp_age = Column(Integer)
comp_dob = Column(Date)
comp_is_minor = Column(Boolean)
comp_gender_fk = Column(Integer, ForeignKey('gender.id'))
comp_gender = relationship(Gender)
casetype_id = Column(Integer, ForeignKey('case_type.id'), nullable=False)
case_type = relationship("CaseType")
case_category_id = Column(Integer, ForeignKey('case_category.id'), nullable=True)
case_category = relationship("CaseCategory")
complainant_role = Column(Text)
complaint = Column(Text, default='')
complaint_language = Column(String(80))
observations = Column(Text)
injuries = Column(Text)
loss = Column(Text)
damage = Column(Text)
theft = Column(Text)
narcotics = Column(Boolean)
fraud = Column(Text)
domestic_abuse = Column(Boolean)
complainant_is_victim = Column(Boolean)
victim_name = Column(String(80))
victim_phone = Column(String(80))
victim_email = Column(String(80))
victim_address = Column(String(80))
victim_age = Column(Integer)
victim_dob = Column(Date)
victim_gender = Column(Boolean)
victim_pwd = Column(Boolean)
victim_religion = Column(String(80))
victim_ethnicity = Column(String(80))
offender_count = Column(Integer)
offenders_known_to_victim = Column(Boolean)
offender_known_to_complainant = Column(Boolean)
offender_description = Column(Text)
police_interpretation = Column(Text)
is_a_crime = Column(Boolean)
is_a_case = Column(Boolean)
case_number = Column(String(80))
closed = Column(Boolean, default=False)
# To Build
# Case-History
# Notifications
# Documents/ Scans/ Dockets
# Lawyer Registry
# DPP/State Counsel Registry (Teams & Team Leaders)
# User Profiles/Preferences
# Filing Fees
##### Features #####
# Contact Form
# Wizards
# Wizard Session save
|
from itertools import chain
import string
alphabet = { i : str(c) for i,c in enumerate(chain(range(10), string.ascii_uppercase)) }
inv_alphabet = { val : key for key, val in alphabet.items() }
divmod36 = lambda r: divmod(r, 36)
def to_base_36(r):
def helper(r):
if not r: return
q, r = divmod36(r)
yield from helper(q)
yield alphabet[r]
return ''.join(helper(r))
def to_int(r):
return sum(inv_alphabet[x] * 36 ** i for i, x in enumerate(reversed(str(r))))
# Convert integer to base36
print(to_base_36(11168434984638296100531098218969554919276774))
# Convert word to integer
print(to_int("ANTIDISESTABLISHMENTARIANISM"))
|
"""
The MIT License (MIT)
Copyright (c) 2016 Intel Corporation
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import pytest
import uuid
from sqlalchemy import and_
from sqlalchemy import create_engine
from sqlalchemy_utils import create_database, drop_database, database_exists
import metadb.api.dbimport as dbimport
import metadb.models as models
from unittest import TestCase
import vcf
sampleN = 'sampleN'
sampleT = 'sampleT'
test_header = ["#CHROM", "POS", "ID", "REF",
"ALT", "QUAL", "FILTER", "INFO", "FORMAT"]
test_data = [
"1",
"10177",
"rs367896724",
"A",
"AC",
"100",
"PASS",
"AC=1;AF=0.425319;AN=6;NS=2504;DP=103152;EAS_AF=0.3363;AMR_AF=0.3602;AFR_AF=0.4909;EUR_AF=0.4056;SAS_AF=0.4949;AA=|||unknown(NO_COVERAGE);VT=INDEL",
"GT",
"1|0",
"0|0"]
class TestDBImportLevel0(TestCase):
"""
Tests all MetaDB Parent Object registration:
Level 0 of the MetaDB relationship hierarchy
"""
@classmethod
def setUpClass(self):
self.DBURI = "postgresql+psycopg2://@:5432/dbimport"
if database_exists(self.DBURI):
drop_database(self.DBURI)
create_database(self.DBURI)
engine = create_engine(self.DBURI)
models.bind_engine(engine)
self.assembly = "testAssembly"
self.workspace = "/test/dbimport/workspace"
def test_registerReferenceSet(self):
rguid = str(uuid.uuid4())
r2guid = str(uuid.uuid4())
fguid = rguid + '-longer'
with dbimport.DBImport(self.DBURI).getSession() as session:
# register new
result = session.registerReferenceSet(rguid, self.assembly)
assert result.assembly_id == self.assembly
assert result.guid == rguid
# registering an already registered referenceset, by assembly
reg_result = session.registerReferenceSet(r2guid, self.assembly)
assert reg_result.assembly_id == self.assembly
assert reg_result.guid == rguid
# registering an already registered referenceset, by guid
session.registerReferenceSet(rguid, 'hg19')
assert reg_result.assembly_id == self.assembly
assert reg_result.guid == rguid
# verify only one reference set exists
num_rs = session.session.query(models.ReferenceSet).count()
assert num_rs == 1
# negative test
with pytest.raises(ValueError) as exec_info:
session.registerReferenceSet(fguid, "negAssembly")
assert "DataError" in str(exec_info.value)
def test_registerWorkspace(self):
wguid = str(uuid.uuid4())
fguid = wguid + "-longer"
with dbimport.DBImport(self.DBURI).getSession() as session:
# new with path consistency
result = session.registerWorkspace(wguid, self.workspace + "/")
assert result.name == self.workspace
assert result.name[-1] != "/"
# registered
reg_result = session.registerWorkspace(wguid, self.workspace)
assert reg_result.name == self.workspace
assert reg_result.guid == wguid
# negative
with pytest.raises(ValueError) as exec_info:
neg_result = session.registerWorkspace(
fguid, "negative/workspace")
assert "DataError" in str(exec_info.value)
def test_registerIndividual(self):
iguid = str(uuid.uuid4())
i2guid = str(uuid.uuid4())
fguid = iguid + "-longer"
with dbimport.DBImport(self.DBURI).getSession() as session:
# new
result = session.registerIndividual(iguid, name="testIndividual")
assert result.name == "testIndividual"
assert result.guid == iguid
# not new, by name
reg_result = session.registerIndividual(
i2guid, name="testIndividual")
assert reg_result.name == "testIndividual"
assert reg_result.guid == iguid
# now new, by guid
result = session.registerIndividual(iguid, name="DO76")
assert result.name == "testIndividual"
assert result.guid == iguid
# negative
with pytest.raises(ValueError) as exec_info:
not_none = session.registerIndividual(fguid, name="None")
assert 'DataError' in str(exec_info.value)
with pytest.raises(ValueError) as exec_info:
neg_result = session.registerIndividual(
str(uuid.uuid4()), name=None)
assert 'IntegrityError' in str(exec_info.value)
@classmethod
def tearDownClass(self):
drop_database(self.DBURI)
class TestDBImportLevel1(TestCase):
"""
Tests all MetaDB Child Object Registration
Level 1 of the MetaDB relationship hierarchy
"""
@pytest.fixture(autouse=True)
def set_tmpdir(self, tmpdir):
self.tmpdir = tmpdir
@classmethod
def setUpClass(self):
self.DBURI = "postgresql+psycopg2://@:5432/dbimport"
if database_exists(self.DBURI):
drop_database(self.DBURI)
create_database(self.DBURI)
engine = create_engine(self.DBURI)
models.bind_engine(engine)
self.references = {"1": 249250621, "2": 243199373, "3": 198022430}
self.array = "test"
# referenceset, workspace, and individual registration previously
# tested
with dbimport.DBImport(self.DBURI).getSession() as session:
self.referenceset = session.registerReferenceSet(
str(uuid.uuid4()), "testAssembly", references=self.references)
self.workspace = session.registerWorkspace(
str(uuid.uuid4()), "/test/dbimport/workspace")
self.individual = session.registerIndividual(
str(uuid.uuid4()), name="testIndividual")
def test_sortReferences(self):
# get a pyvcf contig dict
with open("test/data/header.vcf", "r") as f:
header = f.read()
vcfile = self.tmpdir.join("test1.vcf")
test1_header = list(test_header)
test1_header.append(sampleN)
test1_header.append(sampleT)
with open(str(vcfile), 'w') as inVCF:
inVCF.write("{0}".format(header))
inVCF.write("{0}\n".format("\t".join(test1_header)))
inVCF.write("{0}\n".format("\t".join(test_data)))
with open(str(vcfile), 'r') as f:
r = vcf.Reader(f)
self.contigs = r.contigs
result1 = dbimport.sortReferences(
{"1": 249250621, "2": 243199373, "3": 198022430, "MT": 1000})
assert result1.get('MT', None) is None
assert result1.get('M', None) is not None
result2 = dbimport.sortReferences(self.contigs)
assert result2 == self.contigs
def test_registerReference(self):
rguid = str(uuid.uuid4())
r2guid = str(uuid.uuid4())
mguid = str(uuid.uuid4())
fguid = rguid + "-longer"
with dbimport.DBImport(self.DBURI).getSession() as session:
# regsiter with references
result = session.registerReferenceSet(
self.referenceset.guid,
self.referenceset.assembly_id,
references=self.references)
assert result.assembly_id == self.referenceset.assembly_id
assert result.guid == self.referenceset.guid
# validate all references were registered
refs = session.session.query(
models.Reference).filter(
models.Reference.reference_set_id == result.id,
models.Reference.name.in_(
self.references.keys())).all()
assert len(refs) == len(self.references)
# register a single reference
result2 = session.registerReference(
r2guid, self.referenceset.id, "4", 191154276)
assert result2.name == "4"
assert result2.length == 191154276
# validate MT -> M
resultM = session.registerReference(
mguid, self.referenceset.id, "MT", 16571)
assert resultM.name == "M"
assert resultM.guid == mguid
# validate return of registered reference given reference set id
# and reference name
reg_result = session.registerReference(
str(uuid.uuid4()), self.referenceset.id, "M", 16000)
assert reg_result.name == "M"
assert reg_result.guid == mguid
# negative
with pytest.raises(ValueError) as exec_info:
neg_result = session.registerReference(
fguid, self.referenceset.id, "5", 180915260)
assert "DataError" in str(exec_info.value)
def test_registerReferenceOffset(self):
with dbimport.DBImport(self.DBURI).getSession() as session:
# get chr4 and validate offset
# separate test (since sqlalchemy won't update until previous test
# finishes)
result = session.registerReference(
str(uuid.uuid4()), self.referenceset.id, "4", 191154276)
assert result.tiledb_column_offset == 759519666
def test_registerDBArray(self):
aguid = str(uuid.uuid4())
fguid = aguid + "-longer"
with dbimport.DBImport(self.DBURI).getSession() as session:
# new array
result = session.registerDBArray(
aguid, self.referenceset.id, self.workspace.id, self.array)
assert result.name == self.array
assert result.guid == aguid
# registered array
reg_result = session.registerDBArray(
str(uuid.uuid4()), self.referenceset.id, self.workspace.id, self.array)
assert reg_result.name == self.array
assert reg_result.guid == aguid
# negative
with pytest.raises(ValueError) as exec_info:
neg_result = session.registerDBArray(
fguid, self.referenceset.id, self.workspace.id, "negative")
assert "DataError" in str(exec_info.value)
def test_registerSample(self):
sguid = str(uuid.uuid4())
s2guid = str(uuid.uuid4())
fguid = sguid + "-longer"
figuid = str(uuid.uuid4()) + "-longer"
with dbimport.DBImport(self.DBURI).getSession() as session:
# new sample
result = session.registerSample(
sguid, self.individual.guid, name="testSample")
assert result.guid == sguid
assert result.name == "testSample"
# registered, get by individual id and name
reg_result = session.registerSample(
s2guid, self.individual.guid, name="testSample")
assert reg_result.guid == sguid
assert reg_result.name == "testSample"
# registered, get by guid
reg2_result = session.registerSample(
sguid, self.individual.guid, name="alreadyreg")
assert reg2_result.guid == sguid
assert reg2_result.name == "testSample"
# negative
with pytest.raises(ValueError) as exec_info:
neg_result = session.registerSample(
fguid, self.individual.guid, name="negative")
assert "DataError" in str(exec_info.value)
# negative individual guid
with pytest.raises(ValueError) as exec_info:
negI_result = session.registerSample(
sguid, figuid, name="negativeIndividual")
assert "Invalid Individual Id" in str(exec_info.value)
def test_registerVariantSet(self):
vguid = str(uuid.uuid4())
fguid = vguid + "-longer"
with dbimport.DBImport(self.DBURI).getSession() as session:
# new
result = session.registerVariantSet(
vguid, self.referenceset.id, "Dataset")
assert result.guid == vguid
assert result.reference_set_id == 1
# registered, return by guid
reg_result = session.registerVariantSet(
vguid, self.referenceset.id, "AlreadyReg")
assert reg_result.guid == vguid
assert reg_result.dataset_id == "Dataset"
# negative
with pytest.raises(ValueError) as exec_info:
result = session.registerVariantSet(
fguid, self.referenceset.id, "negative")
assert "DataError" in str(exec_info.value)
# negative referenceset
with pytest.raises(ValueError) as exec_info:
result = session.registerVariantSet(vguid, -1, "negative_rs")
assert "must be registered" in str(exec_info.value)
@classmethod
def tearDownClass(self):
drop_database(self.DBURI)
class TestDBImportLevel2(TestCase):
"""
Test Registration of CallSet - dependent on most other models
"""
@classmethod
def setUpClass(self):
self.DBURI = "postgresql+psycopg2://@:5432/dbimport"
if database_exists(self.DBURI):
drop_database(self.DBURI)
create_database(self.DBURI)
engine = create_engine(self.DBURI)
models.bind_engine(engine)
# all these function have been previously tested
with dbimport.DBImport(self.DBURI).getSession() as session:
self.referenceset = session.registerReferenceSet(
str(uuid.uuid4()), "testAssembly")
self.workspace = session.registerWorkspace(
str(uuid.uuid4()), "/test/dbimport/workspace")
self.array = session.registerDBArray(
str(uuid.uuid4()), self.referenceset.id, self.workspace.id, "test")
self.array2 = session.registerDBArray(
str(uuid.uuid4()), self.referenceset.id, self.workspace.id, "test2")
self.variantset = session.registerVariantSet(
str(uuid.uuid4()), self.referenceset.id, "Dataset")
self.variantset2 = session.registerVariantSet(
str(uuid.uuid4()), self.referenceset.id, "Dataset2")
self.variantset3 = session.registerVariantSet(
str(uuid.uuid4()), self.referenceset.id, "Dataset3")
self.variantset4 = session.registerVariantSet(
str(uuid.uuid4()), self.referenceset.id, "Dataset4")
self.individual = session.registerIndividual(
str(uuid.uuid4()), name="testIndividual")
self.source = session.registerSample(
str(uuid.uuid4()), self.individual.guid, name="source")
self.target = session.registerSample(
str(uuid.uuid4()), self.individual.guid, name="target")
def test_registerCallSet(self):
cguid = str(uuid.uuid4())
c2guid = str(uuid.uuid4())
fguid = cguid + "-longer"
with dbimport.DBImport(self.DBURI).getSession() as session:
# no variant set
with pytest.raises(ValueError) as exec_info:
result = session.registerCallSet(
cguid,
self.source.guid,
self.target.guid,
self.workspace.name,
self.array.name,
name="CallSet1")
assert "requires association" in str(exec_info)
# register new, validate addition of that variant set
result = session.registerCallSet(
cguid,
self.source.guid,
self.target.guid,
self.workspace.name,
self.array.name,
name="CallSet1",
variant_set_ids=[
self.variantset.id])
assert result.variant_sets[0].id == self.variantset.id
assert result.guid == cguid
assert result.name == "CallSet1"
# add a variant set to callset, validation of no duplication of
# variant set addition
result_vs = session.registerCallSet(
cguid,
self.source.guid,
self.target.guid,
self.workspace.name,
self.array.name,
name="CallSet1",
variant_set_ids=[
self.variantset.id])
assert result_vs.variant_sets[0].id == self.variantset.id
assert len(result_vs.variant_sets) == 1
assert result_vs.guid == cguid
# add a variant set to callset, validation of no duplication of
# variant set addition
with pytest.raises(ValueError) as exec_info:
result_vs3 = session.registerCallSet(
cguid,
self.source.guid,
self.target.guid,
self.workspace.name,
self.array.name,
name="CallSet1",
variant_set_ids=[5])
assert "VariantSet must be registered" in str(exec_info.value)
# already registered, return based on (name, source sample, target
# sample)
reg_result = session.registerCallSet(
c2guid,
self.source.guid,
self.target.guid,
self.workspace.name,
self.array.name,
name="CallSet1")
assert reg_result.guid == cguid
assert reg_result.name == "CallSet1"
# already registered, return based on guid
reg2_result = session.registerCallSet(
cguid,
self.source.guid,
self.target.guid,
self.workspace.name,
self.array.name,
name="CallSetRegistered")
assert reg2_result.guid == cguid
assert reg2_result.name == "CallSet1"
# validate workspace remove ending "/"
reg_ws_result = session.registerCallSet(
cguid,
self.source.guid,
self.target.guid,
self.workspace.name + "/",
self.array.name,
name="CallSet1")
assert reg_ws_result.guid == cguid
assert reg_ws_result.name == "CallSet1"
# check db array reg error
with pytest.raises(ValueError) as exec_info:
reg_a_result = session.registerCallSet(
cguid,
self.source.guid,
self.target.guid,
self.workspace.name,
"notregistered",
name="CallSet1")
assert "DBArray needs to exist" in str(exec_info.value)
# register callset to a new array
reg_a2_result = session.registerCallSet(
cguid,
self.source.guid,
self.target.guid,
self.workspace.name,
self.array2.name,
name="CallSet1")
assert reg_a2_result.guid == cguid
assert reg_a2_result.name == "CallSet1"
# validate callset registration to that array
ca = session.session.query(models.CallSetToDBArrayAssociation) .filter(
models.CallSetToDBArrayAssociation.db_array_id == self.array2.id) .all()
assert len(ca) == 1
assert ca[0].callset_id == reg_a2_result.id
# negative callset registration
with pytest.raises(ValueError) as exec_info:
neg_result = session.registerCallSet(
fguid,
self.source.guid,
self.target.guid,
self.workspace.name,
self.array.name,
name="negative",
variant_set_ids=[
self.variantset.id])
assert "DataError" in str(exec_info.value)
# negative callset registration - invalid sample_guid
with pytest.raises(ValueError) as exec_info:
negs_result = session.registerCallSet(str(uuid.uuid4()), str(uuid.uuid4()), str(uuid.uuid4(
)), self.workspace.name, self.array.name, name="negative", variant_set_ids=[self.variantset.id])
assert "Issue retrieving Sample info" in str(exec_info.value)
# test update variant set list
vsl_result = session.updateVariantSetList(
[1, 2, 3], callset=result)
assert [x.id for x in vsl_result.variant_sets] == [
self.variantset.id, self.variantset2.id, self.variantset3.id]
# test update variant sets through registerCallSet
c_result = session.registerCallSet(
cguid,
self.source.guid,
self.target.guid,
self.workspace.name,
self.array.name,
name="CallSet1",
variant_set_ids=[
self.variantset3.id,
self.variantset4.id])
assert self.variantset4.id == c_result.variant_sets[-1].id
@classmethod
def tearDownClass(self):
drop_database(self.DBURI)
|
'''
Lab 18: Peaks and Valleys
Define the following functions:
peaks - Returns the indices of peaks. A peak has a lower number on both the left and the right.
valleys - Returns the indices of 'valleys'. A valley is a number with a higher number on both the left and the right.
peaks_and_valleys - uses the above two functions to compile a single list of the peaks and valleys in order of appearance in the original data.
output:
>>> data = [1, 2, 3, 4, 5, 6, 7, 6, 5, 4, 5, 6, 7, 8, 9, 8, 7, 6, 7, 8, 9]
>>> peaks(data)
[6, 14]
>>> valleys(data)
[9, 17]
>>> peaks_and_valleys(data)
[6, 9, 14, 17]
'''
data = [1, 2, 3, 4, 5, 6, 7, 6, 5, 4, 5, 6, 7, 8, 9, 8, 7, 6, 7, 8, 9]
def find_peaks(data):
ls_peak = []
for n in range(len(data)):
if n < 2:
continue
if data[n-2] == data[n] and data[n-1] > data[n]:
ls_peak.append(n-1)
return ls_peak
def find_valleys(data):
ls_valley = []
for n in range(len(data)):
if n < 2:
continue
if data[n-2] == data[n] and data[n-1] < data[n]:
ls_valley.append(n-1)
return ls_valley
def peak_valley_combine(ls_peak, ls_valley):
return (ls_peak + ls_valley)
def main():
print("Welcome to peak-and-valley app!\n")
print(f"Peaks: {find_peaks(data)}")
print(f"Valleys: {find_valleys(data)}")
combine = peak_valley_combine(find_peaks(data), find_valleys(data))
combine.sort()
print(f"Peaks and Valleys: {combine}")
main() |
import numpy as np
import cv2
# This function adds 1 to the areas passed in the list of boxes to heatmap.
def add_heat(heatmap, bbox_list):
# Iterate through list of bboxes
for box in bbox_list:
# Add += 1 for all pixels inside each bbox
heatmap[box[0][1]:box[1][1], box[0][0]:box[1][0]] += 1
return heatmap
# Funtion to apply a threshold below which the value will be set to 0.
def apply_threshold(heatmap, threshold):
# Zero out pixels below the threshold
heatmap[heatmap <= threshold] = 0
return heatmap
# Function to plot the boxes containing cars obtained with the label function.
def draw_labeled_bboxes(img, labels):
# Iterate through all detected cars
for car_number in range(1, labels[1]+1):
# Find pixels with each car_number label value
nonzero = (labels[0] == car_number).nonzero()
# Identify x and y values of those pixels
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
# Define a bounding box based on min/max x and y
centroidx = (np.max(nonzerox) + np.min(nonzerox))//2
centroidy = (np.max(nonzeroy) + np.min(nonzeroy))//2
size = 60
bbox = (centroidx-size, centroidy-size), (centroidx+size, centroidy+size)
# bbox = ((np.min(nonzerox), np.min(nonzeroy)), (np.max(nonzerox), np.max(nonzeroy)))
# Draw the box on the image
cv2.rectangle(img, bbox[0], bbox[1], (0,0,255), 6)
# Return the image
return img
|
class Locator():
#Login_Page
click_Sign = '//*[@id="header"]/div[2]/div/div/nav/div[1]/a'
Email_addrees = '//*[@id="email_create"]'
Create_an_account = '//*[@id="SubmitCreate"]/span'
Mesenger01 = '//div[@id="create_account_error"]//li'
# Đăng kí account
Title = '//*[@id="id_gender1"]'
First_name = '//*[@id="customer_firstname"]'
Last_name = '//*[@id="customer_lastname"]'
Email = '//*[@id="email"]'
Password = '//*[@id="passwd"]'
Days = '//*[@id="days"]'
Click_days = '//*[@id="days"]/option[5]'
Months = '//*[@id="months"]'
Click_Months = '//*[@id="months"]/option[3]'
Year = '//*[@id="years"]'
Click_year = '//*[@id="years"]/option[25]'
compaly = '//*[@id="company"]'
adrres = '//*[@id="address1"]'
adrres02 = '//*[@id="address2"]'
City = '//*[@id="city"]'
State = '//*[@id="id_state"]'
State01 = '//*[@id="id_state"]/option[3]'
zipcode = '//*[@id="postcode"]'
Additional = '//*[@id="other"]'
Home_phone = '//*[@id="phone"]'
Mobile_phone = '//*[@id="phone_mobile"]'
Register = '//*[@id="submitAccount"]/span'
My_account = '//*[@id="columns"]/div[1]/span[2]'
#Homepage
Newsletter = '//*[@id="newsletter-input"]'
Enter_email = '//*[@id="newsletter_block_left"]/div/form/div/button'
contact = '//*[@id="contact-link"]/a'
newlet = '//*[@id="columns"]/p'
#test04
Subjec_Heading = '//*[@id="id_contact"]'
Subjec_Heading01 ='//*[@id="id_contact"]/option[2]'
Home_email_add = '//*[@id="email"]'
Order_reference = '//*[@id="id_order"]'
Attach = '//*[@id="fileUpload"]'
Send = '//*[@id="submitMessage"]'
Mensenger = '//*[@id="message"]'
th = '//*[@id="center_column"]/p'
#Locator tìm kiếm
Search = '//*[@id="search_query_top"]'
text_ser = '//div[@class="ac_results"]/ul/li'
But_ton_sea = '/html/body/div/div[1]/header/div[3]/div/div/div[2]/form/button'
li_sp = '//ul[@class="product_list grid row"]//a[@class="product-name"]'
so_sp = '//h1//span[2][@class="heading-counter"]'
gia_sp = '//div[@class = "product-image-container"]//*[@class = "price product-price"]'
Mensenger_sear = '//div[@class = "center_column col-xs-12 col-sm-9"]/p'
#Locatoe mua hàng
Click_sp = '//*[@id="homefeatured"]/li[1]'
Add_to_card = '//p[@class = "buttons_bottom_block no-print"]/button'
Continue_shopping = '//div//span[@class = "continue btn btn-default button exclusive-medium"]'
text_sl_sp = '//input[@class="text"]'
Button_check_out = '//a[@class = "btn btn-default button button-medium"]'
Button_check_out_summary = '//p//a[@title = "Proceed to checkout"]'
id_email = '//*[@id="email"]'
pass_word = '//*[@id="passwd"]'
By_sign = '//*[@id="SubmitLogin"]'
Button_address_checkout ='//*[@id="center_column"]/form/p/button'
Button_i_agree = '//div[@class = "checker hover"]'
Button_shiping_checkout = '//*[@id="form"]/p/button'
Mesenger_by = '//div[@class = "fancybox-wrap fancybox-desktop fancybox-type-html fancybox-opened"]'
#thêm vào rỏ hàng
Add_to_card_01 = '//*[@id="homefeatured"]/li[1]/div/div[2]/div[2]/a[1]'
Continue_01 ='//*[@id="layer_cart"]/div[1]/div[2]/div[4]/span'
Add_to_card_02 = '//ul[@class = "product_list grid row homefeatured tab-pane active"]//a[@class= "button ajax_add_to_cart_button btn btn-default"]'
Continue_02 = '//span[@class = "continue btn btn-default button exclusive-medium"]'
Button_cart = '//*[@id="header"]/div[3]/div/div/div[3]/div/a'
input_sl ='//td//input[@class = "cart_quantity_input form-control grey"]'
Button_clear_sp = '//td[@class = "cart_delete text-center"]'
Button_check_out_by = '//*[@id="center_column"]/p[2]/a[1]'
Button_4 = '//*[@id="uniform-cgv"]'
Button_check_out_shipping = '//button[@class = "button btn btn-default standard-checkout button-medium"]'
Get_text_sp = '//td[@class ="cart_total"]//span[@class = "price"]'
Get_text_sum_sp ='//*[@id="total_product"]'
# get_20 = ' //div[@class = "content_price"]//span[@class = "price-percent-reduction"]'
Click_img = '//ul[@class = "product_list grid row homefeatured tab-pane active"]//img[@class= "replace-2x img-responsive"]'
Img2 = '//img[@id = "bigpic"]'
img_im = '//*[@id="product"]/div[2]/div/div[1]'
Img_003 = '//*[@id="bigpic"]'
Text_img_1 = '//div[@class = "fancybox-title fancybox-title-float-wrap"]'
text_img_2 = '/html/body/div/div[2]/div/div[3]/div/div/div/div[3]/h1'
Quantity: str = '//input[@class = "text"]'
But_toncheckkk = '//a[@class = "btn btn-default button button-medium"]'
Null_quantity = '//p[@class = "fancybox-error"]'
click_pay_py = '//a[@class = "cheque"]'
But_ton_confirn = '//button[@class = "button btn btn-default button-medium"]'
check_merseger = '//p[@class = "alert alert-success"]'
cloed_mull = '//a[@class = "fancybox-item fancybox-close"]'
But_ton_add_to = '//button[@class = "exclusive"]'
Button_twitter = '//button[@class = "btn btn-default btn-twitter"]'
Cm_sign = '//div[@class = "header_user_info"]'
Button_go_home = '//ul[@class = "footer_links clearfix"]//a[@class = "btn btn-default button button-small"]'
But_ton_cmt = '//a[@id = "new_comment_tab_btn"]'
Cmt_title = '//input[@id = "comment_title"]'
Cmt_cmt = '//textarea[@id = "content"]'
Button_send = '//button[@id = "submitNewMessage"]'
Mer_rv = '//div[@class="fancybox-inner"]'
Send_to_fr = '//*[@id="send_friend_button"]'
input_name = '//input[@id = "friend_name"]'
input_email = '//input[@id = "friend_email"]'
Button_send_email = '//button[@id = "sendEmail"]'
mer_sento_fr = '//div[@class = "fancybox-inner"]'
input_id_tw = '//input[@name = "session[username_or_email]"]'
input_password = '//input[@name = "session[password]"]'
click_buton_tw = '//*[@id="layers"]/div[2]/div/div/div/div/div/div[2]/div[2]/div/div[2]/div[1]' |
from keras.layers import Conv2D, MaxPooling2D, Dense, Flatten, Dropout, Activation, BatchNormalization
from keras.models import Sequential
def create_model():
model = Sequential()
model.add(Conv2D(filters=16, kernel_size=3, input_shape=(150, 150, 3)))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(filters=16, kernel_size=3))
model.add(MaxPooling2D(pool_size=2))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(filters=32, kernel_size=3))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(filters=32, kernel_size=3))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=2))
model.add(Conv2D(filters=64, kernel_size=3))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=2))
model.add(Flatten())
model.add(Dense(512, activation='relu'))
model.add(Dropout(0.3))
model.add(Dense(512, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(136))
# Summarize the model
return model |
# Asal sayının kontrol edildiği fonksiyon tanımlama
from math import sqrt # * import math
def AsalKontrol(n):
# Fonksiyona gelen değer asal ise geriye True, değilse False döner.
bolen= 2
kok = sqrt(n) # * math.sqrt(n)
while bolen <= kok:
if n % bolen == 0: # Kalan kontrolü yapılıyor
return False # Tam bölünme işlemi gerçekleşti. Asal Değil
bolen += 1 # Bir sonraki bölen değerine geçiliyor.
return True # Tüm değer kontrollerinden sonra kalanlı bölme gerçekleşmediğinde, True değeri dönüyor.
#print(AsalKontrol(5)) |
startTime = [9,8,7,6,5,4,3,2,1]
endTime = [10,10,10,10,10,10,10,10,10]
queryTime = 5
def busyStudent(startTime, endTime, queryTime):
count = 0
for i,j in zip(startTime,endTime):
if i<=queryTime and j>= queryTime:
count += 1
return count
print(busyStudent(startTime,endTime,queryTime)) |
import numpy as np
from sklearn.externals import joblib
import time
#constants
isolated_models_dir='/home/kharesp/learned_models'
models_dir='/home/kharesp/learned_models/600_runs'
deadline=1000
threshold=100
class StaticPlacement:
def __init__(self,max_topics,max_brokers):
self.max_topics=max_topics
self.max_brokers=max_brokers
self.load_isolated_topic_model()
self.load_models()
def load_models(self):
self.scalers={}
self.models={}
for topic in range(2,self.max_topics+1,1):
self.scalers[topic]=joblib.load('%s/%d_colocation_scaler.pkl'%(models_dir,topic))
self.models[topic]=joblib.load('%s/%d_colocation.pkl'%(models_dir,topic))
def load_isolated_topic_model(self):
self.isolated_topic_scaler=joblib.load('%s/isolated_topic_scaler.pkl'%(isolated_models_dir))
self.isolated_topic_poly=joblib.load('%s/isolated_topic_poly.pkl'%(isolated_models_dir))
self.isolated_topic_model=joblib.load('%s/isolated_topic_model.pkl'%(isolated_models_dir))
def combinations(self,placement,topic):
result=[]
for b in range(self.max_brokers):
current_placement=placement.copy()
open_positions= np.argwhere(np.isnan(current_placement[:,b]))
if np.size(open_positions,0)==0:
continue
if np.size(open_positions,0)==self.max_topics:
current_placement[0,b]=topic
result.append(current_placement.copy())
break
current_placement[open_positions[0],b]=topic
result.append(current_placement.copy())
return result
def k_colocation_feasibility(self,existing_topics):
if len(existing_topics)==1:
return True
scaler=self.scalers[len(existing_topics)]
model=self.models[len(existing_topics)]
for current_topic in existing_topics:
curr_name,curr_interval,curr_rate= current_topic.split(':')
f_p= int(curr_interval)
f_r= int(curr_rate)
bkg_load=0
bkg_sum_rate=0
bkg_sum_processing=0
for background_topic in existing_topics:
bkg_name,bkg_interval,bkg_rate= background_topic.split(':')
if (curr_name == bkg_name):
continue
bkg_load+=int(bkg_interval) * int(bkg_rate)/1000.0
bkg_sum_rate+=int(bkg_rate)
bkg_sum_processing+=int(bkg_interval)
X=[[f_p,f_r,bkg_load,bkg_sum_processing,bkg_sum_rate]]
predicted_latency=np.exp(model.predict(scaler.transform(X)))
if (predicted_latency[0] > (deadline - threshold)):
return False
return True
def check_feasibility(self,placement):
for b in range(self.max_brokers):
topics_on_broker=placement[:,b]
#filter out nan
topics_on_broker=topics_on_broker[~np.isnan(topics_on_broker)]
topic_list=['t%d:%d:%d'%(t,self.topic_intervals['t%d'%(t)],
self.topic_rates['t%d'%(t)]) for t in topics_on_broker]
if len(topic_list)>0:
if not self.k_colocation_feasibility(topic_list):
return False
return True
def number_of_brokers(self,placement):
number_of_brokers=0
for b in range(self.max_brokers):
topics_on_broker=placement[:,b]
#filter out nan
topics_on_broker=topics_on_broker[~np.isnan(topics_on_broker)]
if len(topics_on_broker) > 0:
number_of_brokers+=1
return number_of_brokers
def latencies(self,existing_topics):
l=[]
if len(existing_topics)==1:
name,interval,rate= existing_topics[0].split(':')
X=[[int(interval),int(rate)]]
scaled_features= self.isolated_topic_scaler.transform(X)
polynomial_features= self.isolated_topic_poly.transform(scaled_features)
predicted_latency=np.exp(self.isolated_topic_model.predict(polynomial_features))[0][0]
l.append(predicted_latency)
else:
scaler=self.scalers[len(existing_topics)]
model=self.models[len(existing_topics)]
for current_topic in existing_topics:
curr_name,curr_interval,curr_rate= current_topic.split(':')
f_p= int(curr_interval)
f_r= int(curr_rate)
bkg_load=0
bkg_sum_rate=0
bkg_sum_processing=0
for background_topic in existing_topics:
bkg_name,bkg_interval,bkg_rate= background_topic.split(':')
if (curr_name == bkg_name):
continue
bkg_load+=int(bkg_interval) * int(bkg_rate)/1000.0
bkg_sum_rate+=int(bkg_rate)
bkg_sum_processing+=int(bkg_interval)
X=[[f_p,f_r,bkg_load,bkg_sum_processing,bkg_sum_rate]]
predicted_latency=np.exp(model.predict(scaler.transform(X)))
l.append(predicted_latency)
while len(l) < self.max_topics:
l.append(0)
return l
def latency_matrix(self,placement):
l= np.zeros((self.max_topics,self.max_brokers))
for b in range(self.max_brokers):
topics_on_broker=placement[:,b]
#filter out nan
topics_on_broker= topics_on_broker[~np.isnan(topics_on_broker)]
if len(topics_on_broker)>0:
topic_list= ['t%d:%d:%d'%(t,self.topic_intervals['t%d'%(t)],
self.topic_rates['t%d'%(t)]) for t in topics_on_broker]
latency_list= self.latencies(topic_list)
l[:,b]=latency_list
return l
def average_latency(self,placement):
return np.mean(self.latency_matrix(placement))
def predictions(self,placement):
topic_predictions={}
lm= self.latency_matrix(placement)
for b in range(self.max_brokers):
for t in range(self.max_topics):
topic= placement[t,b]
latency= lm[t,b]
if not np.isnan(topic):
topic_predictions['t%d'%(topic)]=latency
predictions_str=''
for i in range(len(topic_predictions)):
predictions_str+='t%d:%f,'%(i+1,topic_predictions['t%d'%(i+1)])
return predictions_str.rstrip(',')
def placement_string(self,placement):
placement_str=''
for b in range(self.max_brokers):
topics_on_broker= placement[:,b]
#filter out nan
topics_on_broker= topics_on_broker[~np.isnan(topics_on_broker)]
if (len(topics_on_broker)>0):
topic_names_on_broker=['t%d'%(v) for v in topics_on_broker]
placement_str+='b%d:%s;'%(b+1,','.join(topic_names_on_broker))
return placement_str.rstrip(';')
def place(self,topic_list):
self.topic_intervals={}
self.topic_rates={}
for tdesc in topic_list:
name,interval,rate= tdesc.split(':')
self.topic_intervals[name]=int(interval)
self.topic_rates[name]=int(rate)
self.placement=np.full((self.max_topics,self.max_brokers),np.nan)
for t in [int(desc.split(':')[0][1:]) for desc in topic_list]:
alternatives=self.combinations(self.placement,t)
print('\nPossible alternatives are:')
for alt in alternatives:
print(alt)
feasible_alternatives= [a for a in alternatives if self.check_feasibility(a)]
#get placement with minimum number of brokers
number_of_brokers=[self.number_of_brokers(p) for p in feasible_alternatives]
self.placement=feasible_alternatives[number_of_brokers.index(min(number_of_brokers))]
print('Chosen alternative:')
print(self.placement)
#get placement with minimum overall average latency
#placement_latencies=[self.average_latency(p) for p in feasible_alternatives]
#self.placement=feasible_alternatives[placement_latencies.index(min(placement_latencies))]
return self.placement
if __name__=="__main__":
tests=1
n=10
s= StaticPlacement(6,5)
with open('/home/kharesp/static_placement/requests/5_below_threshold/n_%d'%(n),'r') as f:
#open('/home/kharesp/static_placement/placement/5_below_threshold/varying_n/mpc/n_%d'%(n),'w') as placement_f,\
#open('/home/kharesp/static_placement/results/5_below_threshold/varying_n/mpc/n_%d'%(n),'w') as results_f,\
#open('/home/kharesp/static_placement/placement/5_below_threshold/varying_n/mpc/prediction_%d'%(n),'w') as predictions_f:
for idx,line in enumerate(f):
if (idx+1) > tests:
break
start_time_milli= int(round(time.time()*1000))
placement= s.place(line.rstrip().split(','))
end_time_milli= int(round(time.time()*1000))
print(placement)
#time_to_find_placement= end_time_milli - start_time_milli
#placement_str= s.placement_string(placement)
#number_of_brokers= s.number_of_brokers(placement)
#predictions_str= s.predictions(placement)
#placement_f.write(placement_str+'\n')
#predictions_f.write(predictions_str+'\n')
#results_f.write('%d,%d\n'%(number_of_brokers,time_to_find_placement))
|
from foodAlertsAPI import (
foodAlertsAPI,
Alert,
Problem,
ProductDetails,
RelatedMedia,
BatchDescription,
Allergen,
Business,
PathogenRisk,
)
from datetime import date
from backports.datetime_fromisoformat import MonkeyPatch
MonkeyPatch.patch_fromisoformat()
f = foodAlertsAPI()
# getAlerts()
def testGetAlertsReturnsAlertList():
alerts = f.getAlerts(10)
assert all(isinstance(a, Alert) for a in alerts)
def testGetAlertsLimitWorks():
alertsSize5 = f.getAlerts(5)
assert len(alertsSize5) == 5
alertsSize1 = f.getAlerts(1)
assert len(alertsSize1) == 1
def testGetAlertsTypeFiltersWork():
alertsAA = f.getAlerts(10, filters={"type": "AA"})
assert all(alert.type() == "AA" for alert in alertsAA)
alertsFAFA = f.getAlerts(10, filters={"type": "FAFA"})
assert all(alert.type() == "FAFA" for alert in alertsFAFA)
alertsPRIN = f.getAlerts(10, filters={"type": "PRIN"})
assert all(alert.type() == "PRIN" for alert in alertsPRIN)
def testGetAlertsSortByWorks():
alerts = f.getAlerts(10, sortBy="created")
assert all(
date.fromisoformat(alerts[i].created())
<= date.fromisoformat(alerts[i + 1].created())
for i in range(len(alerts) - 1)
)
# searchAlerts()
def testSearchAlertsReturnsAlertList():
alerts = f.searchAlerts("milk", limit=5)
assert all(isinstance(a, Alert) for a in alerts)
def testSearchAlertsLimitWorks():
alertsSize5 = f.searchAlerts("milk", limit=5)
assert len(alertsSize5) == 5
alertsSize1 = f.searchAlerts("milk", limit=1)
assert len(alertsSize1) == 1
def testSearchAlertsTypeFiltersWork():
alertsAA = f.searchAlerts("milk", limit=5, filters={"type": "AA"})
assert all(alert.type() == "AA" for alert in alertsAA)
alertsFAFA = f.searchAlerts("eggs", limit=5, filters={"type": "FAFA"})
assert all(alert.type() == "FAFA" for alert in alertsFAFA)
alertsPRIN = f.searchAlerts("meat", limit=5, filters={"type": "PRIN"})
assert all(alert.type() == "PRIN" for alert in alertsPRIN)
def testSearchAlertsSortByWorks():
alerts = f.searchAlerts("milk", limit=5, sortBy="created")
assert all(
date.fromisoformat(alerts[i].created())
<= date.fromisoformat(alerts[i + 1].created())
for i in range(len(alerts) - 1)
)
# getAlert()
def testGetAlertReturnsAlertObject():
alert = f.getAlert("FSA-AA-01-2018")
assert isinstance(alert, Alert)
# Classes
def testAlertObjectsHaveRequiredPropsInDefaultView():
alert = f.getAlerts(1)[0]
# any alert should have these fields in default view
assert alert.id() != None and isinstance(alert.id(), str)
assert alert.title() != None and isinstance(alert.title(), str)
assert alert.created() != None and isinstance(alert.created(), str)
assert alert.modified() != None and isinstance(alert.modified(), str)
assert alert.notation() != None and isinstance(alert.notation(), str)
assert alert.problem() != None and all(
isinstance(a, Problem) for a in alert.problem()
)
assert alert.productDetails() != None and all(
isinstance(a, ProductDetails) for a in alert.productDetails()
)
assert alert.status() != None and isinstance(alert.id(), str)
assert alert.type() != None and isinstance(alert.id(), str)
def testAlertObjectsHaveRequiredPropsInFullView():
alert = f.getAlerts(1, detailed=True)[0]
# any alert should have these fields in full view
assert alert.id() != None and isinstance(alert.id(), str)
assert alert.title() != None and isinstance(alert.title(), str)
assert alert.shortTitle() != None and isinstance(alert.shortTitle(), str)
assert alert.description() != None and isinstance(alert.description(), str)
assert alert.created() != None and isinstance(alert.created(), str)
assert alert.modified() != None and isinstance(alert.modified(), str)
assert alert.notation() != None and isinstance(alert.notation(), str)
assert alert.problem() != None and all(
isinstance(a, Problem) for a in alert.problem()
)
assert alert.productDetails() != None and all(
isinstance(a, ProductDetails) for a in alert.productDetails()
)
assert alert.status() != None and isinstance(alert.status(), str)
assert alert.type() != None and isinstance(alert.type(), str)
def testAlertObjectFromGetAlertHasRequiredProps():
alert = f.getAlert("FSA-AA-01-2019")
assert alert.id() != None and isinstance(alert.id(), str)
assert alert.title() != None and isinstance(alert.title(), str)
assert alert.shortTitle() != None and isinstance(alert.shortTitle(), str)
assert alert.description() != None and isinstance(alert.description(), str)
assert alert.created() != None and isinstance(alert.created(), str)
assert alert.modified() != None and isinstance(alert.modified(), str)
assert alert.notation() != None and isinstance(alert.notation(), str)
assert alert.problem() != None and all(
isinstance(a, Problem) for a in alert.problem()
)
assert alert.productDetails() != None and all(
isinstance(a, ProductDetails) for a in alert.productDetails()
)
assert alert.status() != None and isinstance(alert.status(), str)
assert alert.type() != None and isinstance(alert.type(), str)
# ---------- this section checks that the same Alert fetched from different endpoints is parsed correctly ---------- #
def testAlertObjectActionTaken():
alert1 = f.getAlert("FSA-AA-01-2018")
assert alert1.actionTaken() != None
assert isinstance(alert1.actionTaken(), str)
alert2 = f.getAlerts(1, detailed=True, filters={"notation": "FSA-AA-01-2018"})[0]
assert alert2.actionTaken() != None
assert isinstance(alert2.actionTaken(), str)
def testAlertObjectConsumerAdvice():
alert1 = f.getAlert("FSA-AA-01-2018")
assert alert1.consumerAdvice() != None
assert isinstance(alert1.consumerAdvice(), str)
alert2 = f.getAlerts(1, detailed=True, filters={"notation": "FSA-AA-01-2018"})[0]
assert alert2.consumerAdvice() != None
assert isinstance(alert2.consumerAdvice(), str)
def testAlertObjectSMSText():
alert1 = f.getAlert("FSA-AA-01-2018")
assert alert1.SMStext() != None
assert isinstance(alert1.SMStext(), str)
alert2 = f.getAlerts(1, detailed=True, filters={"notation": "FSA-AA-01-2018"})[0]
assert alert2.SMStext() != None
assert isinstance(alert2.SMStext(), str)
def testAlertObjectTwitterText():
alert1 = f.getAlert("FSA-AA-01-2018")
assert alert1.twitterText() != None
assert isinstance(alert1.twitterText(), str)
alert2 = f.getAlerts(1, detailed=True, filters={"notation": "FSA-AA-01-2018"})[0]
assert alert2.twitterText() != None
assert isinstance(alert2.twitterText(), str)
def testAlertObjectAlertURL():
alert1 = f.getAlert("FSA-AA-01-2018")
assert alert1.alertURL() != None
assert isinstance(alert1.alertURL(), str)
alert2 = f.getAlerts(1, detailed=True, filters={"notation": "FSA-AA-01-2018"})[0]
assert alert2.alertURL() != None
assert isinstance(alert2.alertURL(), str)
def testAlertObjectShortURL():
alert1 = f.getAlert("FSA-AA-01-2018")
assert alert1.shortURL() != None
assert isinstance(alert1.shortURL(), str)
alert2 = f.getAlerts(1, detailed=True, filters={"notation": "FSA-AA-01-2018"})[0]
assert alert2.shortURL() != None
assert isinstance(alert2.shortURL(), str)
def testAlertObjectRelatedMedia():
"""getAlert() and getAlerts(detailed=True) return different types for relatedMedia. This test checks
whether the parsing is correct and gives the same result for each case
"""
# this alert is known to have a relatedMedia property
alert1 = f.getAlert("FSA-AA-01-2019")
assert alert1.relatedMedia() != None
assert all(isinstance(a, RelatedMedia) for a in alert1.relatedMedia())
assert all(isinstance(m.id(), str) for m in alert1.relatedMedia())
alert2 = f.getAlerts(1, detailed=True, filters={"notation": "FSA-AA-01-2019"})[0]
assert alert2.relatedMedia() != None
assert all(isinstance(a, RelatedMedia) for a in alert2.relatedMedia())
assert all(isinstance(m.id(), str) for m in alert2.relatedMedia())
def testAlertObjectProblem():
# this alert is known to have the problem property
alert1 = f.getAlert("FSA-AA-01-2019")
assert alert1.problem() != None
assert all(isinstance(p, Problem) for p in alert1.problem())
assert all(isinstance(p.riskStatement(), str) for p in alert1.problem())
alert2 = f.getAlerts(1, detailed=True, filters={"notation": "FSA-AA-01-2019"})[0]
assert alert2.problem() != None
assert all(isinstance(p, Problem) for p in alert2.problem())
assert all(isinstance(p.riskStatement(), str) for p in alert2.problem())
def testAlertObjectProblemAllergen():
alert1 = f.getAlert("FSA-AA-01-2019")
for p in alert1.problem():
for a in p.allergen():
assert isinstance(a, Allergen)
assert isinstance(a.label(), str)
assert isinstance(a.notation(), str)
assert isinstance(a.riskStatement(), str)
alert2 = f.getAlerts(1, detailed=True, filters={"notation": "FSA-AA-01-2019"})[0]
for p in alert2.problem():
for a in p.allergen():
assert isinstance(a, Allergen)
assert isinstance(a.label(), str)
assert isinstance(a.notation(), str)
assert isinstance(a.riskStatement(), str)
def testAlertObjectProblemAllergenLabels():
alert = f.getAlert("FSA-AA-01-2019")
assert all(isinstance(a, str) for a in alert.allergenLabels())
def testAlertObjectProblemPathogenRisk():
alert = f.getAlert("FSA-PRIN-42-2019")
for p in alert.problem():
assert isinstance(p.pathogenRisk(), PathogenRisk)
assert isinstance(p.pathogenRisk().label(), str)
assert isinstance(p.pathogenRisk().notation(), str)
assert isinstance(p.pathogenRisk().riskStatement(), str)
def testAlertObjectProductDetails():
# this alert is known to have the productDetails property
alert1 = f.getAlert("FSA-AA-01-2019")
assert alert1.productDetails() != None
assert all(isinstance(p, ProductDetails) for p in alert1.productDetails())
assert all(isinstance(p.productName(), str) for p in alert1.productDetails())
for p in alert1.productDetails():
for b in p.batchDescription():
assert isinstance(b, BatchDescription)
alert2 = f.getAlerts(1, detailed=True, filters={"notation": "FSA-AA-01-2019"})[0]
assert alert2.productDetails() != None
assert all(isinstance(p, ProductDetails) for p in alert2.productDetails())
assert all(isinstance(p.productName(), str) for p in alert2.productDetails())
for p in alert2.productDetails():
for b in p.batchDescription():
assert isinstance(b, BatchDescription)
def testAlertObjectReportingBusiness():
alert1 = f.getAlert("FSA-PRIN-23-2018")
assert alert1.reportingBusiness() != None
assert isinstance(alert1.reportingBusiness(), str)
alert2 = f.getAlerts(1, detailed=True, filters={"notation": "FSA-PRIN-23-2018"})[0]
assert alert2.reportingBusiness() != None
assert isinstance(alert2.reportingBusiness(), str)
def testAlertObjectOtherBusiness():
alert1 = f.getAlert("FSA-PRIN-23-2018")
assert alert1.otherBusiness() != None
for a in alert1.otherBusiness():
assert isinstance(a, Business)
assert isinstance(a.commonName(), str)
alert2 = f.getAlerts(1, detailed=True, filters={"notation": "FSA-PRIN-23-2018"})[0]
assert alert2.otherBusiness() != None
for a in alert2.otherBusiness():
assert isinstance(a, Business)
assert isinstance(a.commonName(), str)
def testAlertObjectPreviousAlert():
alert1 = f.getAlert("FSA-AA-10-2019-update-1")
assert alert1.previousAlert() != None
assert isinstance(alert1.previousAlert(), str)
alert2 = f.getAlerts(
1, detailed=True, filters={"notation": "FSA-AA-10-2019-update-1"}
)[0]
assert alert2.previousAlert() != None
assert isinstance(alert2.previousAlert(), str)
|
#!/usr/bin/python3
def safe_print_list(my_list=[], x=0):
num = 0
try:
for n in range(0, x):
print("{}".format(my_list[n]), end='')
num += 1
except IndexError:
pass
finally:
print('')
return num
|
'''
Created on 2020-04-07 16:21:24
Last modified on 2020-09-30 11:35:23
@author: L. F. Pereira (lfpereira@fe.up.pt)
Main goal
---------
Show that the square root of a matrix is working properly.
Notes
-----
-scipy could be used in the scripts, but it does not exist in Abaqus.
'''
# imports
# third-party
import numpy as np
from f3dasm.misc.linalg import symmetricize_vector
from f3dasm.misc.linalg import sqrtm
# initialization
a = [1.831, 0.731, 0.985]
b = [2.031, 0.162, 2.021, 1.245, 0, 2.561]
# computations
# get matrices
A = symmetricize_vector(a)
B = symmetricize_vector(b)
# get matrices square roots
A_sqrt = sqrtm(A)
B_sqrt = sqrtm(B)
# print results
print('2D:')
print('A:', A)
print('sqrtm(A):', A_sqrt)
print('verification:', A - np.matmul(A_sqrt, A_sqrt))
print('3D:')
print('B:', B)
print('sqrtm(B):', B_sqrt)
print('verification:', B - np.matmul(B_sqrt, B_sqrt))
|
def extra_long_factorials(in_num):
"""
Calculate and print the factorial of a given integer.
Works well up to in_num = 998, than "RecursionError: maximum recursion depth exceeded in comparison" occurred
:param in_num: an integer
:return: factorial of in_num
"""
if n == 1 or n == 0:
return 1
if n > 1:
return extra_long_factorials(n - 1) * n
if __name__ == '__main__':
n = int(input())
result = extra_long_factorials(n)
print(result)
|
""" stažení více souborů najednou:
import wget
soubory = ["https://kodim.cz/czechitas/progr2-python/python-pro-data-1/agregace-a-spojovani/assets/u202.csv",
"https://kodim.cz/czechitas/progr2-python/python-pro-data-1/agregace-a-spojovani/assets/u203.csv",
"https://kodim.cz/czechitas/progr2-python/python-pro-data-1/agregace-a-spojovani/assets/u302.csv"]
for soubor in soubory:
wget.download(soubor)
"""
import pandas
u202 = pandas.read_csv("u202.csv")
# dát true tam, kde chybí hodnota
chybi_true = u202["znamka"].isnull()
# print(chybi_true)
# dat false tam, kde chybí hodnota
chybi_false = u202["znamka"].notnull()
# vytisknout hodnoty, které jsou prázdné
# print(u202[u202["znamka"].isnull()])
# vrátí datový set očištěn od chybějících dat
# print(u202[u202["znamka"].dropna()])
# odstraní všechny sloupce, které obsahují chybějící data
# print(u202[u202["znamka"].dropna(axis=1)])
# nahradí všechna chybějící data a hodnoty hodnotou x
# print(u202[u202["znamka"].fillna(x)])
# jak tabulky spojit:
# nejprve každou tabulku uložíme do DataFrame s tím, že vyhodíme studenty, kteří na maturitu nedorazili
u202 = pandas.read_csv('u202.csv').dropna()
u203 = pandas.read_csv('u203.csv').dropna()
u302 = pandas.read_csv('u302.csv').dropna()
# funkce concat - pozor - rozbije index
maturita1 = pandas.concat([u202, u203, u302])
# když chceme index přepočítat, ale zase nevíme, kdo maturoval v jaké místnosti
maturita2 = pandas.concat([u202, u203, u302], ignore_index=True)
# uložíme si proto do původních tří tabulek nový sloupeček, kdo byl v jaké místnosti
u202["mistnost"] = "u202"
u203["mistnost"] = "u203"
u302["mistnost"] = "u302"
maturita = pandas.concat([u202, u203, u302], ignore_index=True)
# takhle už mám pěknou vyčištěnou tabulku, takže si ji uložím do csv, index ukládat nebudeme, ten si necháme vyrobit automaticky
maturita.to_csv("maturita.csv", index=False)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
from alipay.aop.api.domain.AccessReturnQrcodeResult import AccessReturnQrcodeResult
class KoubeiSalesKbassetStuffQrcodereturnSyncResponse(AlipayResponse):
def __init__(self):
super(KoubeiSalesKbassetStuffQrcodereturnSyncResponse, self).__init__()
self._return_qrcode_results = None
@property
def return_qrcode_results(self):
return self._return_qrcode_results
@return_qrcode_results.setter
def return_qrcode_results(self, value):
if isinstance(value, list):
self._return_qrcode_results = list()
for i in value:
if isinstance(i, AccessReturnQrcodeResult):
self._return_qrcode_results.append(i)
else:
self._return_qrcode_results.append(AccessReturnQrcodeResult.from_alipay_dict(i))
def parse_response_content(self, response_content):
response = super(KoubeiSalesKbassetStuffQrcodereturnSyncResponse, self).parse_response_content(response_content)
if 'return_qrcode_results' in response:
self.return_qrcode_results = response['return_qrcode_results']
|
import cv2
import numpy as np
videoF = cv2.VideoCapture('video_roi.mp4')
video = []
if(videoF.isOpened()):
ret, frame = videoF.read()
video.append(frame)
while(videoF.isOpened()):
ret, frame = videoF.read()
if not ret:
break
video.append(frame)
videoF.release()
video = np.array(video)
videoF = []
show = True
while show:
for frame0 in video:
cv2.imshow('Frame',frame0)
key = cv2.waitKey(25) & 0xFF
if key == ord('q'):
show = False
break
elif key == ord(' '):
frame = frame0.copy()
# K-Means
Z = frame0.reshape((-1,3))
Z = np.float32(Z)
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 10, 1.0) #Max iter = 10, epsilon=1.0
K = 40
ret,label,center=cv2.kmeans(Z,K,None,criteria,10,cv2.KMEANS_RANDOM_CENTERS)
center = np.uint8(center)
res = center[label.flatten()]
res2 = res.reshape((frame0.shape))
# Segmentación
img = cv2.cvtColor(res2, cv2.COLOR_BGR2HSV)
low = (100,110,30)
high = (170,255,180)
mask1 = cv2.inRange(img, low, high)
full_mask = mask1 #+ mask2
full_mask = np.uint8(full_mask)
blur = cv2.medianBlur(full_mask,13)
contours, hierarchy = cv2.findContours(blur.copy(), cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_NONE)
valid_cntrs = []
for cntr in contours:
x,y,w,h = cv2.boundingRect(cntr)
if cv2.contourArea(cntr) >= 200 and cv2.contourArea(cntr) <= 8000 and h/w < 1.8 and h/w > 0.5:
valid_cntrs.append(cntr)
frame = cv2.rectangle(frame,(x,y),(x+w,y+h),(0,255,0),2)
cv2.imshow('Frame',frame)
# cv2.imshow('Mask',full_mask)
# cv2.imshow('Mask-Blur',blur)
# cv2.imshow('K-Means',res2)
# lo_square = np.full((100, 100, 3), low, dtype=np.uint8)
# do_square = np.full((100, 100, 3), high, dtype=np.uint8)
# cv2.imshow('Low',cv2.cvtColor(lo_square, cv2.COLOR_HSV2RGB))
# cv2.imshow('High',cv2.cvtColor(do_square, cv2.COLOR_HSV2RGB))
cv2.waitKey(0)
# cv2.destroyWindow('Mask')
# cv2.destroyWindow('Mask-Blur')
# cv2.destroyWindow('K-Means')
# cv2.destroyWindow('Low')
# cv2.destroyWindow('High')
continue
cv2.destroyAllWindows()
|
#!/usr/bin/python3
import socket
def reciever(ip,port):
re_ip=ip
re_port=port # fixed with us
# creating udp socket
s=socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
# Binding ip and port
s.bind((re_ip,re_port))
# code to recieve data
data=s.recvfrom(1000)
data = data[0]
return data
|
"""
Copyright (c) 2016-2020 Keith Sterling http://www.keithsterling.com
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import yaml
from programy.utils.logging.ylogger import YLogger
from programy.utils.classes.loader import ClassLoader
from programy.storage.entities.store import Store
from programy.storage.stores.nosql.mongo.store.mongostore import MongoStore
from programy.storage.entities.services import ServicesStore
from programy.storage.stores.nosql.mongo.dao.service import Service
from programy.services.config import ServiceConfiguration
from programy.utils.console.console import outputLog
class MongoServiceStore(MongoStore, ServicesStore):
ServiceS = 'services'
def __init__(self, storage_engine):
MongoStore.__init__(self, storage_engine)
ServicesStore.__init__(self)
def collection_name(self):
return MongoServiceStore.ServiceS
def _get_entity(self, service_data):
name = service_data.get('name')
category = service_data.get('category')
service_class = service_data.get('service_class')
default_response = service_data.get('default_response')
default_srai = service_data.get('default_srai')
default_aiml = service_data.get('default_aiml')
load_default_aiml = service_data.get('load_default_aiml', True)
type = 'generic'
rest_timeout = None
rest_retries = None
rest_api = None
rest_apikey = None
if 'rest' in service_data:
type = 'rest'
rest_data = service_data.get('rest', None)
if rest_data is not None:
rest_timeout = rest_data.get('timeout')
rest_retries = rest_data.get('retries')
rest_api = rest_data.get('api')
rest_apikey = rest_data.get('apikey')
return Service(type=type, name=name, category=category, service_class=service_class,
default_response=default_response, default_srai=default_srai, default_aiml=default_aiml,
rest_timeout=rest_timeout, rest_retries=rest_retries)
def load(self, collector, name=None):
YLogger.info(self, "Loading %s services from Mongo", self.collection_name())
services = self.get_all_services()
for service in services:
try:
configuration = ServiceConfiguration.from_mongo(service)
collector.add_service(service['name'], ClassLoader.instantiate_class(service['service_class'])(configuration))
except Exception as excep:
YLogger.exception(self, "Failed pre-instantiating Service [%s]", excep, service['service_class'])
def get_all_services(self):
collection = self.collection()
return collection.find()
def upload_from_file(self, filename, fileformat=Store.TEXT_FORMAT, commit=True, verbose=False):
YLogger.info(self, "Uploading %s to Mongo from file [%s]", self.collection_name(), filename)
try:
if self._load_services_from_file(filename, verbose) is True:
return 1, 1
except Exception as excep:
YLogger.exception(self, "Error loading file [%s]", excep, filename)
return 0, 0
def _load_services_from_file(self, filename, verbose):
result = False
with open(filename, "r+") as file:
yaml_data = yaml.load(file, Loader=yaml.FullLoader)
service_data = yaml_data['service']
service = self._get_entity(service_data)
result = self.add_document(service)
if result is True:
if verbose is True:
outputLog(self, "[%s] = [%s]" % (service_data['name'], service_data['service_class']))
return result
|
#!/usr/bin/env python
'''
Copyright (c) 2020 RIKEN
All Rights Reserved
See file LICENSE for details.
'''
import os,sys,datetime,multiprocessing
import os
from os.path import abspath,dirname,realpath,join
import log,traceback
# http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
def which(program):
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
elif 'PATH' in os.environ:
for path in os.environ['PATH'].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
log.logger.debug('%s found: %s' % (program, exe_file))
return exe_file
return None
def check(args, argv, base):
log.logger.debug('started')
try:
log.logger.debug('command line:\n'+ ' '.join(argv))
# check python version
version=sys.version_info
if (version[0] >= 3) and (version[1] >= 7):
log.logger.debug('Python version=%d.%d.%d' % (version[0], version[1], version[2]))
else:
log.logger.error('Please use Python 3.7 or later. Your Python is version %d.%d.' % (version[0], version[1]))
exit(1)
# check cpu num
cpu_num=multiprocessing.cpu_count()
if args.p > cpu_num:
log.logger.error('Too many thread number. Please specify the number less than your cpu cores. You specified = %d, cpu cores = %d.' % (args.p, cpu_num))
exit(1)
# check PATH
for i in ['samtools', 'bcftools', 'bamCoverage', 'gatk']:
if which(i) is None:
log.logger.error('%s not found in $PATH. Please check %s is installed and added to PATH.' % (i, i))
exit(1)
if args.bwa is True:
if which('bwa') is None:
log.logger.error('bwa not found in $PATH. Please check bwa is installed and added to PATH.')
exit(1)
else:
if which('hisat2') is None:
log.logger.error('hisat2 not found in $PATH. Please check hisat2 is installed and added to PATH.')
exit(1)
if args.denovo is True:
if which('metaspades.py') is None:
log.logger.error('metaspades.py not found in $PATH. Please check metaspades.py is installed and added to PATH.')
exit(1)
# check prerequisite modules
import gzip
import matplotlib
import pysam
# for singularity
if args.singularity is True:
if args.vref is None:
args.vref='/usr/local/bin/integrated_HHV6_recon/lib/hhv6.fa'
if args.vrefindex is None:
args.vrefindex='/usr/local/bin/integrated_HHV6_recon/lib/hisat2_index/hhv6'
if args.picard is None:
args.picard='/usr/local/bin/picard.jar'
else:
if args.vref is None:
args.vref=os.path.join(base, 'lib/hhv6.fa')
if args.vrefindex is None:
args.vrefindex=os.path.join(base, 'lib/hisat2_index/hhv6')
# check file paths
if args.picard is None:
log.logger.error('Please specify path to picard.jar with `-picard` flag.')
exit(1)
else:
if os.path.exists(args.picard) is False:
log.logger.error('%s not found. Please check %s is installed.' % (args.picard, args.picard))
exit(1)
if args.bwa is True:
if os.path.exists(args.vrefindex +'.bwt') is False:
log.logger.error('bwa index (%s) was not found.' % args.vrefindex)
exit(1)
else:
if os.path.exists(args.vrefindex +'.1.ht2') is False:
log.logger.error('hisat2 index (%s) was not found.' % args.vrefindex)
exit(1)
if args.c is not None:
if args.fa is None:
log.logger.error('Reference genome was not specified.')
exit(1)
elif os.path.exists(args.fa) is False:
log.logger.error('Reference genome (%s) was not found.' % args.fa)
exit(1)
if args.alignmentin is False and args.fastqin is False and args.ONT_bamin is False:
log.logger.error('Please specify either -alignmentin or -fastqin or -ONT_bamin.')
exit(1)
elif (args.alignmentin is True and args.fastqin is True) or (args.alignmentin is True and args.ONT_bamin is True) or (args.ONT_bamin is True and args.fastqin is True):
log.logger.error('Please specify either -alignmentin or -fastqin or -ONT_bamin.')
exit(1)
elif args.alignmentin is True:
if args.c is not None:
if os.path.exists(args.c) is False:
log.logger.error('CRAM file (%s) was not found.' % args.c)
exit(1)
elif args.b is not None:
if os.path.exists(args.b) is False:
log.logger.error('BAM file (%s) was not found.' % args.b)
exit(1)
else:
log.logger.error('Please specify BAM or CRAM file (-b or -c option).')
exit(1)
elif args.fastqin is True:
if args.fq1 is None:
log.logger.error('Please specify unmapped file.')
exit(1)
elif os.path.exists(args.fq1) is False:
log.logger.error('Unmapped file (%s) was not found.' % args.fq1)
exit(1)
if args.single is False:
if args.fq2 is None:
log.logger.error('Please specify unmapped file.')
exit(1)
elif os.path.exists(args.fq2) is False:
log.logger.error('Unmapped file (%s) was not found.' % args.fq2)
exit(1)
if args.all_discordant is True:
log.logger.info('"-all_discordant" option is only available when "-alignmentin" option was specified. Will ignore this and proceed anyway.')
elif args.ONT_bamin is True:
import pysam
if args.ONT_bam is None:
log.logger.error('Please specify BAM file.')
exit(1)
if os.path.exists(args.ONT_bam) is False:
log.logger.error('BAM file (%s) was not found.' % args.ONT_bam)
exit(1)
if os.path.exists(args.ONT_bam + '.bai') is False:
log.logger.warning('BAM index was not found. Making...')
pysam.index(args.ONT_bam, '-@ %d' % args.p)
except SystemExit:
log.logger.debug('\n'+ traceback.format_exc())
exit(1)
except:
log.logger.error('\n'+ traceback.format_exc())
exit(1)
def check_quick_check(args, argv, base):
log.logger.debug('started')
try:
log.logger.debug('command line:\n'+ ' '.join(argv))
# check python version
version=sys.version_info
if (version[0] >= 3) and (version[1] >= 7):
log.logger.debug('Python version=%d.%d.%d' % (version[0], version[1], version[2]))
else:
log.logger.error('Please use Python 3.7 or later. Your Python is version %d.%d.' % (version[0], version[1]))
exit(1)
# check cpu num
cpu_num=multiprocessing.cpu_count()
if args.p > cpu_num:
log.logger.error('Too many thread number. Please specify the number less than your cpu cores. You specified = %d, cpu cores = %d.' % (args.p, cpu_num))
exit(1)
# check PATH
for i in ['samtools', 'hisat2']:
if which(i) is None:
log.logger.error('%s not found in $PATH. Please check %s is installed and added to PATH.' % (i, i))
exit(1)
# check prerequisite modules
import gzip
import pysam
# for singularity
if args.singularity is True:
args.vref='/usr/local/bin/integrated_HHV6_recon/lib/hhv6.fa'
args.vrefindex='/usr/local/bin/integrated_HHV6_recon/lib/hisat2_index/hhv6'
else:
args.vref=os.path.join(base, 'lib/hhv6.fa')
args.vrefindex=os.path.join(base, 'lib/hisat2_index/hhv6')
# check file paths
if os.path.exists(args.vrefindex +'.1.ht2') is False:
log.logger.error('hisat2 index (%s) was not found.' % args.vrefindex)
exit(1)
if args.c is not None or args.cl is not None:
if args.fa is None:
log.logger.error('Reference genome was not specified.')
exit(1)
elif os.path.exists(args.fa) is False:
log.logger.error('Reference genome (%s) was not found.' % args.fa)
exit(1)
infiles=[]
for infile in [args.b, args.c, args.bl, args.cl]:
if infile is not None:
infiles.append(infile)
if len(infiles) >= 2:
log.logger.error('Too many input files. Please select one: %s' % str(infiles))
exit(1)
elif len(infiles) == 0:
log.logger.error('Please specify BAM or CRAM file to be analyzed.')
exit(1)
if args.b is not None:
if os.path.exists(args.b) is False:
log.logger.error('Input file (%s) was not found.' % args.b)
exit(1)
elif args.c is not None:
if os.path.exists(args.c) is False:
log.logger.error('Input file (%s) was not found.' % args.c)
exit(1)
elif args.bl is not None:
if os.path.exists(args.bl) is False:
log.logger.error('Input file (%s) was not found.' % args.bl)
exit(1)
elif args.cl is not None:
if os.path.exists(args.cl) is False:
log.logger.error('Input file (%s) was not found.' % args.cl)
exit(1)
except SystemExit:
log.logger.debug('\n'+ traceback.format_exc())
exit(1)
except:
log.logger.error('\n'+ traceback.format_exc())
exit(1)
|
import numpy as np
from enum import IntEnum
from operator import add
from gym_minigrid.roomgrid import RoomGrid, WorldObj, fill_coords, point_in_circle, point_in_rect, COLORS, spaces
from gym_minigrid.register import register
from gym_minigrid.minigrid import OBJECT_TO_IDX, COLOR_TO_IDX
class Reward(WorldObj):
def __init__(self):
super().__init__('reward', 'green')
self.reward = 0
self.steps = 0
self.item_type = 'reward'
def update(self, reward):
if reward != 0:
event = [('reward', str(int(reward)))]
else:
event = list()
self.reward = reward
return event
def render(self, img):
fill_coords(img, point_in_rect(0, 1, 0, 1), self.reward * COLORS[self.color])
def encode(self):
return OBJECT_TO_IDX[self.type], COLOR_TO_IDX[self.color], self.reward
class Demon(WorldObj):
def __init__(self, name, env, color: str = 'grey', movement_type: str = 'random'):
super().__init__('demon', color)
self.name = name
self.env = env
self.movement_type = movement_type
self.dir = 0
def render(self, img):
fill_coords(img, point_in_circle(0.5, 0.5, 0.31), COLORS[self.color])
def can_contain(self):
return False
def toggle(self, env, pos):
return False
def can_pickup(self):
return False
def can_pickup_content(self):
return False
def move(self):
old_pos = self.cur_pos
if self.movement_type == 'random':
self._move_random()
elif self.movement_type == 'vertical':
self._move_vertical()
else:
raise ValueError('Movement type not recognized ({})'.format(self.movement_type))
if np.any(old_pos != self.cur_pos):
return [(self.name, 'move')]
else:
return list()
def _move_random(self):
old_pos = self.cur_pos
top = tuple(map(add, old_pos, (-1, -1)))
try:
self.env.place_obj(self, top=top, size=(3, 3), max_tries=100)
self.env.grid.set(*old_pos, None)
except:
pass
def _move_vertical(self):
old_pos = self.cur_pos
if self.dir == 0:
new_pos = old_pos[0], old_pos[1] + 1
if self.env.grid.get(*new_pos) is None and np.any(new_pos != self.env.agent_pos):
self.env.grid.set(*new_pos, self)
self.env.grid.set(*old_pos, None)
self.init_pos = new_pos
self.cur_pos = new_pos
else:
self.dir = 1
elif self.dir == 1:
new_pos = old_pos[0], old_pos[1] - 1
if self.env.grid.get(*new_pos) is None and np.any(new_pos != self.env.agent_pos):
self.env.grid.set(*new_pos, self)
self.env.grid.set(*old_pos, None)
self.init_pos = new_pos
self.cur_pos = new_pos
else:
self.dir = 0
class Item(WorldObj):
def __init__(self, name, item_type, color, ons, enables, items, enabled, on_delay, enable_delay):
super().__init__(item_type, color)
self.name = name
self.item_type = item_type
self.color = color
self.on_delay = on_delay
self.enable_delay = enable_delay
self.ons = ons
self.enables = enables
self.items = items
self.enabled = enabled
self.steps = 0
self.enable_steps = 0
self.is_on = False
def enable(self):
if self.enable_steps > 0 or self.enabled:
return
self.enable_steps = self.enable_delay + 1
def turn_on(self):
if self.steps > 0 or not self.enabled or self.is_on:
return
self.steps = self.on_delay
def update(self):
events = list()
if self.steps == 1:
self.is_on = not self.is_on
events.append((self.name, 'on' if self.is_on else 'off'))
for item_name in self.enables:
self.items[item_name].enable()
for item_name in self.ons:
self.items[item_name].turn_on()
self.steps = max(0, self.steps - 1)
if self.enable_steps == 1:
self.enabled = not self.enabled
events.append((self.name, 'enabled' if self.enabled else 'disabled'))
self.enable_steps = max(0, self.enable_steps - 1)
return events
def __str__(self):
return 'name: {}, color: {}, on: {}, enabled: {}'.format(self.name, self.color, self.is_on, self.enabled)
class Button(Item):
def __init__(self, name, color, ons, enables, items, enabled, on_delay, enable_delay):
super().__init__(name, 'button', color, ons, enables, items, enabled, on_delay, enable_delay)
def render(self, img):
if not self.enabled:
fill_coords(img, point_in_rect(0, 1, 0, 1), COLORS[self.color])
fill_coords(img, point_in_rect(0.25, 0.75, 0.25, 0.75), COLORS['grey'])
elif self.is_on:
fill_coords(img, point_in_rect(0, 1, 0, 1), COLORS[self.color])
fill_coords(img, point_in_rect(0.25, 0.75, 0.25, 0.75), COLORS['white'])
else:
fill_coords(img, point_in_rect(0, 1, 0, 1), COLORS[self.color])
def toggle(self, env, pos):
self.turn_on()
return True
def encode(self):
if self.enabled:
code = 1 if not self.is_on else 2
else:
code = 0
return OBJECT_TO_IDX[self.type], COLOR_TO_IDX[self.color], code
class Light(Item):
def __init__(self, name, color, ons, enables, items, enabled, on_delay, enable_delay):
super().__init__(name, 'light', color, ons, enables, items, enabled, on_delay, enable_delay)
def render(self, img):
if self.is_on:
fill_coords(img, point_in_circle(0.5, 0.5, 0.51), COLORS[self.color])
fill_coords(img, point_in_circle(0.48, 0.48, 0.31), COLORS['white'])
else:
fill_coords(img, point_in_circle(0.5, 0.5, 0.51), COLORS[self.color])
def encode(self):
return OBJECT_TO_IDX[self.type], COLOR_TO_IDX[self.color], self.is_on
class LightRoom(RoomGrid):
def __init__(self, config, mode, on_delay=1, enable_delay=1, num_demons=0, demon_movement='random', seed=None,
place_random=False, max_steps=None):
self.place_random = place_random
self.enable_delay = enable_delay
self.on_delay = on_delay
self.config = config
self.num_demons = num_demons
self.demon_movement = demon_movement
self.mode = mode
self.items = None
self.demons = None
self.room_size = 6
self.num_cols = 1
self.num_rows = 1
self.max_steps = max_steps or 2 * self.room_size ** 2
class Actions(IntEnum):
left = 0
right = 1
forward = 2
toggle = 3
super().__init__(
num_rows=self.num_rows,
num_cols=self.num_cols,
room_size=self.room_size,
max_steps=self.max_steps,
seed=seed,
)
# Action enumeration for this environment
self.actions = Actions
# Actions are discrete integer values
self.action_space = spaces.Discrete(len(self.actions))
def _gen_grid(self, width, height):
super()._gen_grid(width, height)
def allow_press(env, pos):
positions = [(pos[0] + 1, pos[1]), (pos[0] - 1, pos[1]), (pos[0], pos[1] + 1), (pos[0], pos[1] - 1)]
busy_spots = np.sum([int(env.grid.get(x, y) is not None) for x, y in positions])
return busy_spots > 2
self.wait_step = False
self.episode_events = list()
self.items = dict()
self.demons = list()
reward = Reward()
self.grid.set(0, 0, reward)
self.items['reward'] = reward
lights = 0
buttons = 0
for i, (name, (cls, color, ons, enables)) in enumerate(self.config.items()):
enabled = np.all([name not in item[3] for item in self.config.values()])
item = cls(name, color, ons, enables, self.items, enabled, self.on_delay if cls is Light else 1, self.enable_delay)
self.items[name] = item
if self.place_random:
self.place_obj(item, reject_fn=allow_press)
else:
if cls is Light:
self.place_obj(item, (1, lights + 1), (1, 1))
lights += 1
else:
self.place_obj(item, (4, buttons + 1), (1, 1))
buttons += 1
for i in range(self.num_demons):
demon = Demon('demon_{}'.format(i + 1), self, movement_type=self.demon_movement)
self.demons.append(demon)
self.place_obj(demon)
self.place_agent(0, 0)
self.mission = 'do nothing'
def explain_button_enabled(self, event):
# B-1 OR L-1
# Due to button on or light on
explanation_1 = -self.enable_delay - self.on_delay
explanation_2 = -self.enable_delay
return [(explanation_1, explanation_2)]
def explain_light_on(self, event):
if self.mode == '1A':
light_number = int(event[0][-1]) - 1
return [tuple(-self.on_delay * (i + 1) for i in range(light_number))]
# Light is caused by pressing button.
return [(-self.on_delay,)]
def explain_reward(self):
if self.mode == 'Indep':
last_light_1 = self.find_last(('light_1', 'on'))[0][0]
last_light_2 = self.find_last(('light_2', 'on'))[0][0]
last_light_3 = self.find_last(('light_3', 'on'))[0][0]
return [
(last_light_1, last_light_1 - self.on_delay),
(last_light_2, last_light_2 - self.on_delay),
(last_light_3, last_light_3 - self.on_delay)
]
return [(-1,)]
def find_last(self, event):
if [event] not in self.episode_events[:-1]:
return list()
return (-1 * self.episode_events[:-1][::-1].index([event]) - 1,)
def get_relations(self):
# each phenomenon is explained as [[x OR y OR...] AND [x OR y OR ...] AND ...]
phenomenon = self.episode_events[-1]
if not phenomenon or phenomenon[0] in [('agent', 'move'), ('button_1', 'on')]:
last_move = self.find_last(('agent', 'move'))
relations = [(0,)]
if last_move:
relations.append(last_move)
# TODO: if button already pressed, nothing happens... add link to (button_x on) if agent in front of button_x and toggles
elif phenomenon[0] in [('button_2', 'on'), ('button_3', 'on')]:
relations = [(0,)]
last_move = self.find_last(('agent', 'move'))
if last_move:
relations.append(last_move)
if self.mode == 'Chain':
button_enabled = self.find_last(('button_{}'.format(phenomenon[0][0][-1]), 'enabled'))
relations.append(button_enabled)
elif phenomenon[0] in [('light_1', 'on'), ('light_2', 'on'), ('light_3', 'on')]:
relations = self.explain_light_on(phenomenon[0])
elif phenomenon[0] == ('reward', '1'):
relations = self.explain_reward()
elif phenomenon[0] in [('button_2', 'enabled'), ('button_3', 'enabled')]:
relations = self.explain_button_enabled(phenomenon[0])
else:
raise ValueError('Not recognized event {}'.format(phenomenon))
return relations
def step(self, action):
prev_agent_state = (self.agent_dir, self.agent_pos[0], self.agent_pos[1])
# Freeze agent when a change in the obs will happen
if np.any([item.steps == 1 or getattr(item, 'enable_steps', 0) == 1 for item in self.items.values()]):
done = self.step_count >= self.max_steps
info = dict(step=self.step_count, events=list())
reward = 0
elif self.wait_step:
info = dict(step=self.step_count, events=list())
reward = 1.
done = True
else:
obs, reward, done, info = super().step(action)
info['events'] = list()
for item in self.items.values():
if not isinstance(item, Reward):
info['events'].extend(item.update())
for demon in self.demons:
info['events'].extend(demon.move())
if self.agent_dir != prev_agent_state[0]:
info['events'].append(('agent', 'move'))
if self.agent_pos[0] != prev_agent_state[1] or self.agent_pos[1] != prev_agent_state[2]:
info['events'].append(('agent', 'move'))
if np.all([light.is_on for light in self.items.values() if light.item_type == 'light']):
self.wait_step = True
# reward = 1.
# done = True
# Needs to be updated after reward is computed!
for item in self.items.values():
if isinstance(item, Reward):
info['events'].extend(item.update(reward))
self.episode_events.append(info['events'])
info['relations'] = self.get_relations()
to_index = [('nothing', 'nothing'), ('agent', 'move'), ('button_1', 'on'), ('button_2', 'on'), ('button_3', 'on'), ('button_2', 'enabled'), ('button_3', 'enabled'), ('light_1', 'on'), ('light_2', 'on'), ('light_3', 'on'), ('reward', '1')]
info['event_index'] = to_index.index(info['events'][0]) if info['events'] else 0
obs = self.gen_obs()
return obs, reward, done, info
CONFIG_CHAIN = {
'light_1': (Light, 'yellow', [], ['button_2']),
'light_2': (Light, 'orange', [], ['button_3']),
'light_3': (Light, 'green', [], []),
'button_1': (Button, 'yellow', ['light_1'], []),
'button_2': (Button, 'orange', ['light_2'], []),
'button_3': (Button, 'green', ['light_3'], [])
}
CONFIG_1A = {
'light_1': (Light, 'yellow', ['light_2'], []),
'light_2': (Light, 'orange', ['light_3'], []),
'light_3': (Light, 'green', [], []),
'button_1': (Button, 'yellow', ['light_1'], []),
'button_2': (Button, 'orange', [], []),
'button_3': (Button, 'green', [], [])
}
CONFIG_INDEP = {
'light_1': (Light, 'yellow', [], []),
'light_2': (Light, 'orange', [], []),
'light_3': (Light, 'green', [], []),
'button_1': (Button, 'yellow', ['light_1'], []),
'button_2': (Button, 'orange', ['light_2'], []),
'button_3': (Button, 'green', ['light_3'], [])
}
CONFIG = {'Chain': CONFIG_CHAIN, '1A': CONFIG_1A, 'Indep': CONFIG_INDEP}
class LightRoomEnv(LightRoom):
def __init__(self, mode, **kwargs):
super().__init__(config=CONFIG[mode], mode=mode, **kwargs)
class LightEnableRoomDelayChainD1VEnv(LightRoom):
def __init__(self, seed=None):
super().__init__(CONFIG_CHAIN, mode='Chain', on_delay=7, num_demons=1, demon_movement='vertical', seed=seed)
class LightEnableRoomDelay1AD1VEnv(LightRoom):
def __init__(self, seed=None):
super().__init__(CONFIG_1A, mode='1A', on_delay=7, num_demons=1, demon_movement='vertical', seed=seed)
class LightEnableRoomDelayIndepD1VEnv(LightRoom):
def __init__(self, seed=None):
super().__init__(CONFIG_INDEP, mode='Indep', on_delay=7, num_demons=1, demon_movement='vertical', seed=seed)
class LightEnableRoomDelayChainEnv(LightRoom):
def __init__(self, seed=None):
super().__init__(CONFIG_CHAIN, mode='Chain', on_delay=7, num_demons=0, seed=seed)
class LightEnableRoomDelay1AEnv(LightRoom):
def __init__(self, seed=None):
super().__init__(CONFIG_1A, mode='1A', on_delay=7, num_demons=0, seed=seed)
class LightEnableRoomDelayIndepEnv(LightRoom):
def __init__(self, seed=None):
super().__init__(CONFIG_INDEP, mode='Indep', on_delay=7, num_demons=0, seed=seed)
register(id='MiniGrid-LightRoomEnv-v0', entry_point='gym_minigrid.envs:LightRoomEnv')
register(id='MiniGrid-LightEnableDelayChainRoom-v0', entry_point='gym_minigrid.envs:LightEnableRoomDelayChainEnv')
register(id='MiniGrid-LightEnableDelayChainD1VRoom-v0', entry_point='gym_minigrid.envs:LightEnableRoomDelayChainD1VEnv')
register(id='MiniGrid-LightEnableDelay1ARoom-v0', entry_point='gym_minigrid.envs:LightEnableRoomDelay1AEnv')
register(id='MiniGrid-LightEnableDelay1AD1VRoom-v0', entry_point='gym_minigrid.envs:LightEnableRoomDelay1AD1VEnv')
register(id='MiniGrid-LightEnableDelayIndepRoom-v0', entry_point='gym_minigrid.envs:LightEnableRoomDelayIndepEnv')
register(id='MiniGrid-LightEnableDelayIndepD1VRoom-v0', entry_point='gym_minigrid.envs:LightEnableRoomDelayIndepD1VEnv')
|
from datetime import datetime
import numpy as np #for numerical computations like log,exp,sqrt etc
import pandas as pd #for reading & storing data, pre-processing
import matplotlib.pylab as plt #for visualization
#for making sure matplotlib plots are generated in Jupyter notebook itself
from statsmodels.tsa.stattools import adfuller
import pmdarima as pm
df = pd.read_csv('laundry.csv')
df.set_index('timestamp')
print(df)
df.describe()
plt.plot(df.weight)
plt.show()
df_weight_mean = df.weight.rolling(window = 20).mean() # Use rolling to see moving average
df_weight_mean.plot() # plotting
plt.show()
# Perform Augmented Dickey–Fuller test:
print('Results of Dickey Fuller Test:')
dftest = adfuller(df.weight, autolag='AIC')
dfoutput = pd.Series(dftest[0:4], index=['Test Statistic', 'p-value', '#Lags Used', 'Number of Observations Used'])
for key, value in dftest[4].items():
dfoutput['Critical Value (%s)' % key] = value
print(dfoutput)
# if p-value is less tha 0.05 then it is a stationary
model = pm.auto_arima(df.weight, start_p=1, start_q=1, # auto ARIMA function to find the best fit for p, d and q
test='adf', # use adftest to find optimal 'd'
max_p=10, max_q=10, # maximum p and q
m=1, # frequency of series
d=None, # let model determine 'd'
seasonal=True, # No Seasonality
start_P=1,
D=0,
trace=True,
error_action='ignore',
suppress_warnings=True,
stepwise=True)
print(model.summary())
model.plot_diagnostics(figsize=(7,5))
plt.show()
n_periods = 100
fc, confint = model.predict(n_periods=n_periods, return_conf_int=True)
index_of_fc = np.arange(len(df.weight), len(df.weight)+n_periods)
# make series for plotting purpose
fc_series = pd.Series(fc, index=index_of_fc)
lower_series = pd.Series(confint[:, 0], index=index_of_fc)
upper_series = pd.Series(confint[:, 1], index=index_of_fc)
# Plot
plt.plot(df.weight)
plt.plot(fc_series, color='darkgreen')
plt.fill_between(lower_series.index,
lower_series,
upper_series,
color='k', alpha=.15)
plt.title("Final Forecast of WWW Usage")
plt.show() |
import os
import pytest
import pandas as pd
from shclassify import (load_observations, load_model,
DATA_DIR, generate_fake_observations, calculate_prob,
choose_class_from_probs)
from shclassify.core import MODEL_FILES
def test_load_observations_raises_if_bad_path():
with pytest.raises(OSError):
load_observations('badpath')
def test_load_observations(path_to_observations_file):
df = load_observations(path_to_observations_file)
assert type(df) is pd.DataFrame
@pytest.mark.parametrize('model_filename', MODEL_FILES)
def test_load_model(model_filename):
model_path = os.path.join(DATA_DIR, model_filename)
df = load_model(model_path)
assert type(df) is pd.DataFrame
def test_generate_data():
fake = generate_fake_observations(2)
assert type(fake) is pd.DataFrame
assert fake.shape[1] == len(set(fake.columns.values))
assert '(Intercept)' not in list(fake.columns.values)
@pytest.mark.parametrize('model_filename', MODEL_FILES)
def test_calculate_prob(model_filename):
obs = generate_fake_observations(1000)
model_path = os.path.join(DATA_DIR, model_filename)
model = load_model(model_path)
probs = calculate_prob(obs, model)
assert type(probs) is pd.DataFrame
# result has shape of N_OBS, N_CLASSES
assert probs.shape == (obs.shape[0],model.shape[1])
@pytest.mark.xfail(message='Thin wrapper around binary and multinomial choice')
def test_choose_class_from_probs():
assert False
def test_calculate_prob_raises_if_var_missing_from_observations():
# test is crucial - otherwise calculate_prob will return NaN for all obs
assert False
|
import immlib
import pefile
import os
import traceback
from collections import namedtuple
ExportedEntry = namedtuple("ExportedEntry", ["name", "address"])
class TargetDLL:
def __init__(self, dll):
self.filename = os.path.basename(dll.lower())
if not self.filename.endswith("dll"):
self.filename = self.filename + ".dll"
self.exporteds = []
self._resolve_exports()
def _resolve_exports(self):
dbg = immlib.Debugger()
mod = dbg.getModule(self.filename)
if not mod:
raise Exception("{} is not loaded".format(self.filename))
path = mod.getPath()
pe = pefile.PE(path)
dll_name = os.path.splitext(os.path.basename(path))[0]
for e in pe.DIRECTORY_ENTRY_EXPORT.symbols:
name = "{}.{}".format(dll_name, e.name)
addr = dbg.getAddress(name)
if addr == -1:
dbg.log("failed to get address of {}".format(name))
continue
self.exporteds.append(ExportedEntry(name, addr))
class DLLHook(immlib.LogBpHook):
def __init__(self, exp):
immlib.LogBpHook.__init__(self)
self.__exp = exp
self.__dbg = immlib.Debugger()
def hook(self):
self.add(self.__exp.name, self.__exp.address)
self.__dbg.setComment(self.__exp.address, self.__exp.name);
self.__dbg.log(
"hooked 0x{:08x} {}".format(self.__exp.address, self.__exp.name),
address = self.__exp.address)
def run(self, regs):
eip = regs["EIP"]
self.__dbg.log("{0:08x} {1}".format(eip, self.__exp.name), address = eip)
def usage(dbg):
dbg.log("!hookexports <target dll> (hook exported functions from <target dll>)")
def main(args):
dbg = immlib.Debugger()
dll = args[0]
try:
target_dll = TargetDLL(dll)
for exp in target_dll.exporteds:
hooker = DLLHook(exp)
hooker.hook()
except:
for line in traceback.format_exc().split("\n"):
dbg.log(line)
return "NG!"
return ""
|
import json
SECRETS_FILE = 'secrets.json'
# Creating an instance of the Bittrex class with our secrets.json file
with open(SECRETS_FILE) as secrets_file:
secrets = json.load(secrets_file)
secrets_file.close()
# Setting up Twilio for SMS alerts
account_sid = secrets['twilio_key']
auth_token = secrets['twilio_secret']
tg_bot_token = secrets.get('tg_bot_token', '')
|
# Generated by Django 3.1.7 on 2021-05-30 19:09
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('WebApp', '0006_remove_empdetails_designation'),
]
operations = [
migrations.RenameField(
model_name='project',
old_name='project_name',
new_name='project',
),
migrations.RenameField(
model_name='project',
old_name='teamname',
new_name='team_name',
),
]
|
import timeit
def t1():
li = []
for i in range(10000):
li.append(i)
def t2():
li = []
for i in range(10000):
li = li +[i]
def t3():
li = [i for i in range(10000)]
def t4():
li = list(range(10000))
def t5():
li = []
for i in range(10000):
li.insert(0, i)
time1 = timeit.Timer('t1()', 'from __main__ import t1')
time2 = timeit.Timer('t2()', 'from __main__ import t2')
time3 = timeit.Timer('t3()', 'from __main__ import t3')
time4 = timeit.Timer('t4()', 'from __main__ import t4')
time5 = timeit.Timer('t5()', 'from __main__ import t5')
print('append:%s' % time1.timeit(number=100)) # 0.127835
print('[]+[]:%s' % time2.timeit(number=100)) # 19.7184115
print('列表推导式:%s' % time3.timeit(number=100)) # 0.04835290000000114
print('list():%s' % time4.timeit(number=100)) # 0.026356299999999777
print('insert:%s' % time5.timeit(number=100)) # 2.6034982000000007
|
class Solution:
def simplifyPath(self, path: str) -> str:
stack = []
for portion in path.split('/'):
if portion == '..':
if stack:
stack.pop()
elif portion and portion != '.':
stack.append(portion)
return '/' + '/'.join(stack) |
from xml.sax.handler import ContentHandler
from dateutil.parser import parse as parse_datetime
from cve_search.lib.Toolkit import toStringFormattedCPE
class CVEHandler(ContentHandler):
def __init__(self):
self.cves = []
self.inCVSSElem = 0
self.inSUMMElem = 0
self.inDTElem = 0
self.inPUBElem = 0
self.inAccessvElem = 0
self.inAccesscElem = 0
self.inAccessaElem = 0
self.inCVSSgenElem = 0
self.inImpactiElem = 0
self.inImpactcElem = 0
self.inImpactaElem = 0
def startElement(self, name, attrs):
if name == 'entry':
self.cves.append({'id': attrs.get('id'), 'references': [], 'vulnerable_configuration': [], 'vulnerable_configuration_cpe_2_2':[]})
self.ref = attrs.get('id')
elif name == 'cpe-lang:fact-ref':
self.cves[-1]['vulnerable_configuration'].append(toStringFormattedCPE(attrs.get('name')))
self.cves[-1]['vulnerable_configuration_cpe_2_2'].append(attrs.get('name'))
elif name == 'cvss:score':
self.inCVSSElem = 1
self.CVSS = ""
elif name == 'cvss:access-vector':
self.inAccessvElem = 1
self.accessv = ""
elif name == 'cvss:access-complexity':
self.inAccesscElem = 1
self.accessc = ""
elif name == 'cvss:authentication':
self.inAccessaElem = 1
self.accessa = ""
elif name == 'cvss:confidentiality-impact':
self.inImpactcElem = 1
self.impactc = ""
elif name == 'cvss:integrity-impact':
self.inImpactiElem = 1
self.impacti = ""
elif name == 'cvss:availability-impact':
self.inImpactaElem = 1
self.impacta = ""
elif name == 'cvss:generated-on-datetime':
self.inCVSSgenElem = 1
self.cvssgen = ""
elif name == 'vuln:summary':
self.inSUMMElem = 1
self.SUMM = ""
elif name == 'vuln:published-datetime':
self.inDTElem = 1
self.DT = ""
elif name == 'vuln:last-modified-datetime':
self.inPUBElem = 1
self.PUB = ""
elif name == 'vuln:reference':
self.cves[-1]['references'].append(attrs.get('href'))
elif name == 'vuln:cwe':
self.cves[-1]['cwe'] = attrs.get('id')
def characters(self, ch):
if self.inCVSSElem:
self.CVSS += ch
if self.inSUMMElem:
self.SUMM += ch
if self.inDTElem:
self.DT += ch
if self.inPUBElem:
self.PUB += ch
if self.inAccessvElem:
self.accessv += ch
if self.inAccesscElem:
self.accessc += ch
if self.inAccessaElem:
self.accessa += ch
if self.inCVSSgenElem:
self.cvssgen += ch
if self.inImpactiElem:
self.impacti += ch
if self.inImpactcElem:
self.impactc += ch
if self.inImpactaElem:
self.impacta += ch
def endElement(self, name):
if name == 'cvss:score':
self.inCVSSElem = 0
self.cves[-1]['cvss'] = self.CVSS
if name == 'cvss:access-vector':
self.inAccessvElem = 0
if 'access' not in self.cves[-1]:
self.cves[-1]['access'] = {}
self.cves[-1]['access']['vector'] = self.accessv
if name == 'cvss:access-complexity':
self.inAccesscElem = 0
if 'access' not in self.cves[-1]:
self.cves[-1]['access'] = {}
self.cves[-1]['access']['complexity'] = self.accessc
if name == 'cvss:authentication':
self.inAccessaElem = 0
if 'access' not in self.cves[-1]:
self.cves[-1]['access'] = {}
self.cves[-1]['access']['authentication'] = self.accessa
if name == 'cvss:confidentiality-impact':
self.inImpactcElem = 0
if 'impact' not in self.cves[-1]:
self.cves[-1]['impact'] = {}
self.cves[-1]['impact']['confidentiality'] = self.impactc
if name == 'cvss:integrity-impact':
self.inImpactiElem = 0
if 'impact' not in self.cves[-1]:
self.cves[-1]['impact'] = {}
self.cves[-1]['impact']['integrity'] = self.impacti
if name == 'cvss:availability-impact':
self.inImpactaElem = 0
if 'impact' not in self.cves[-1]:
self.cves[-1]['impact'] = {}
self.cves[-1]['impact']['availability'] = self.impacta
if name == 'cvss:generated-on-datetime':
self.inCVSSgenElem = 0
self.cves[-1]['cvss-time'] = parse_datetime(self.cvssgen, ignoretz=True)
if name == 'vuln:summary':
self.inSUMMElem = 0
self.cves[-1]['summary'] = self.SUMM
if name == 'vuln:published-datetime':
self.inDTElem = 0
self.cves[-1]['Published'] = parse_datetime(self.DT, ignoretz=True)
if name == 'vuln:last-modified-datetime':
self.inPUBElem = 0
self.cves[-1]['Modified'] = parse_datetime(self.PUB, ignoretz=True)
|
from django.contrib import admin
from .models import MainMenu, ChildMenu, InterFaceManageClassification,\
InterFaceManageModule, InterFaceSet, InterFaceCase, InterfaceCaseSet, \
InterFaceCaseData,RelevanceCaseSet,ExecutePlan
@admin.register(MainMenu)
class MainMenuAdmin(admin.ModelAdmin):
list_display = ('id', 'title', 'icon', 'href', 'spread') # 在后台列表下显示的字段
search_fields = ('title',)
@admin.register(ChildMenu)
class ChildMenuAdmin(admin.ModelAdmin):
list_display = ('id', 'classification', 'title', 'icon', 'href', 'spread')
search_fields = ('title',)
@admin.register(InterFaceManageClassification)
class InterFaceManageClassificationAdmin(admin.ModelAdmin):
list_display = ("classification",)
@admin.register(InterFaceManageModule)
class InterFaceManageModuleAdmin(admin.ModelAdmin):
list_display = ("parent", "description", "puisne_module", "create_data")
@admin.register(InterFaceSet)
class InterFaceSetAdmin(admin.ModelAdmin):
list_display = (
"interface_name",
"tcp",
"ip",
"url",
"method",
"headers",
"params",
"body",
"belong_module",
"preprocessor")
@admin.register(InterFaceCase)
class InterFaceCaseAdmin(admin.ModelAdmin):
list_display = ("description", "interface_case_name", "create_data")
@admin.register(InterfaceCaseSet)
class InterFaceSetCaseAdmin(admin.ModelAdmin):
list_display = ("interface_case_set_name",)
@admin.register(InterFaceCaseData)
class InterFaceCaseDataAdmin(admin.ModelAdmin):
list_display = ("parent", "interface_name", "description")
@admin.register(RelevanceCaseSet)
class RelevanceCaseSetAdmin(admin.ModelAdmin):
list_display = ("parent", "relevance_id", "interface_case_name", "description")
@admin.register(ExecutePlan)
class ExecutePlanAdmin(admin.ModelAdmin):
list_display = ("plan_name", "description", "ploy", "notification", "start_time", "end_time")
|
import sys, os, subprocess
import ROOT
from ROOT import TString, TFile, TTree
from threading import Thread
#dirsToCheck = [f for f in os.listdir(".") if os.path.isdir(f)]
dirsIgnored = ["ttZctrl"]
dirsToCheck = ["SigRegion","JESUpSigRegion","JESDownSigRegion","ttWctrl","JESUpttWctrl","JESDownttWctrl"]
#dirsToCheck = ["SigRegion","JESUpSigRegion"]
ignorefiles = ["TTH","H"]
ListOfCats={"SubCat2l":{0:"inclusive", 1:"ee_neg",2:"ee_pos",3:"em_bl_neg",4:"em_bl_pos",5:"em_bt_neg",6:"em_bt_pos",7:"mm_bl_neg",8:"mm_bl_pos",9:"mm_bt_neg",10:"mm_bt_pos"}}
if not os.path.exists("Output"):
os.popen("mkdir Output")
for key,value in ListOfCats.iteritems():
n_values = len(value)
print ("SubCat is " + key + " with subcatgories of " +str(n_values-1) )
for i in range(n_values):
#for i in range(6,10):
# i+1 stands for channel, i==SubCat2l, in ttH 2lss
for dirToCheck in dirsToCheck:
if dirToCheck in dirsIgnored: continue
if not os.path.exists("Output/"+key+"/"+value[i]+"/"+dirToCheck):
os.popen("mkdir -p Output/"+key+"/"+value[i]+"/"+dirToCheck)
inputfiles = [f for f in os.listdir("./"+dirToCheck) if "root" in f]
#inputfiles = ["TTH_hmm"]
for inputfile in inputfiles:
process = inputfile.split(("_"+dirToCheck))[0]
if process in ignorefiles: continue
if process == "data" or process =="Fakes" or process== "Flips":
command_run = "root -l -b -q runReadingNoMVA.C'"+'("'+process+'","'+dirToCheck+'","Output/'+key+"/"+value[i]+"/"+dirToCheck+'",true,'+str(i)+',"'+key+'"'+")'"
print command_run
os.system(command_run)
else:
command_run = "root -l -b -q runReadingNoMVA.C'"+'("'+process+'","'+dirToCheck+'","Output/'+key+"/"+value[i]+"/"+dirToCheck+'",false,'+str(i)+',"'+key+'"'+")'"
#command_run = "root -l -b -q runReadingNoMVA.C'"+'("'+process+'","","Output/","false",'+str(i+1)+")'"
print command_run
os.system(command_run)
#root -l runReadingNoMVA.C'("TTH","2LSS/","output/")'
|
from itertools import islice, count
from math import sqrt
#islice allows for lazy slicing
#range requires bounds but count doesn't
#count() provides an open ended version of range
def is_prime(x):
if x < 2:
return False
for i in range(2, int(sqrt(x) + 1)):
if x % i == 0:
return False
return True
test = islice((x for x in count() if is_prime(x)), 1000)
print(len(list(test)))
items = [1,2,3,4,5]
def lazy_slice(items):
for item in items:
yield item
print(list(islice(lazy_slice(items),5)))
print(any([False, False, False, True, False]))
print(all([True, True, False]))
for x in list(x for x in range(1,100) if is_prime(x)):
print(x)
print(any(is_prime(x) for x in range(1,100))) |
#!/usr/bin/env python3
from PIL import Image
from keras.callbacks import ModelCheckpoint
from keras.layers import BatchNormalization, Conv2D, Dense, Dropout, Flatten
from keras.models import Sequential
from sklearn.model_selection import train_test_split
import argparse
import glob
import numpy as np
import os
import random
import sys
SIZE = 512
def all_transpositions(im):
yield im
yield im.transpose(Image.FLIP_LEFT_RIGHT)
yield im.transpose(Image.FLIP_TOP_BOTTOM)
yield im.transpose(Image.ROTATE_180)
def random_patch(im, delta, patch):
i = random.randrange(0, SIZE, delta)
j = random.randrange(0, SIZE, patch)
return im.crop((i, j, i + delta, j + patch))
def generate_patches(im, delta, patch):
total = 0
for i in range(0, SIZE, 2 * patch):
for j in range(0, SIZE, patch):
p = im.crop((i + patch - delta, j, i + patch + delta, j + patch))
for image in all_transpositions(p):
yield image, 1
total += 1
for i in range(0, SIZE, patch):
for j in range(0, SIZE, 2 * patch):
p = im.crop((i, j + patch - delta, i + patch, j + patch + delta))
for image in all_transpositions(p.transpose(Image.ROTATE_90)):
yield image, 1
total += 1
for _ in range(total):
a = random_patch(im, delta, patch)
b = random_patch(im, delta, patch)
image = Image.new('L', (2 * delta, patch))
image.paste(a, (0, 0))
image.paste(b, (delta, 0))
yield image, 0
def generate_data(data_dir, delta, patch):
xs, ys = [], []
for path in glob.glob(os.path.join(data_dir, '*.png')):
im = Image.open(path).convert('L')
assert im.height == SIZE
assert im.width == SIZE
for x, y in generate_patches(im, delta, patch):
if random.random() < 0.25:
xs.append(np.asarray(x) / 255)
ys.append(y)
xs, ys = np.array(xs), np.array(ys)
xs = np.expand_dims(xs, axis=-1)
return xs, ys
def create_model(delta, patch, drop_rate):
input_shape = (patch, 2 * delta, 1)
model = Sequential()
model.add(Conv2D(input_shape=input_shape,
filters=64,
kernel_size=(5, 5),
activation='relu'))
model.add(BatchNormalization())
model.add(Dropout(drop_rate))
model.add(Conv2D(input_shape=input_shape,
filters=64,
kernel_size=(5, 5),
activation='relu'))
model.add(BatchNormalization())
model.add(Dropout(drop_rate))
model.add(Flatten())
model.add(Dense(1, activation='sigmoid'))
model.compile(loss='binary_crossentropy',
optimizer='adam',
metrics=['binary_accuracy'])
return model
def go(data_dir, model_path, delta, patch, drop_rate):
Xs, ys = generate_data(data_dir, delta, patch)
Xs_train, Xs_test, ys_train, ys_test = train_test_split(Xs, ys,
test_size=0.2,
random_state=42)
model = create_model(delta, patch, drop_rate)
callbacks = [
ModelCheckpoint(model_path,
monitor='val_binary_accuracy',
save_best_only=True,
mode='max',
verbose=1)
]
model.fit(Xs_train, ys_train,
batch_size=64,
epochs=10,
validation_data=(Xs_test, ys_test),
callbacks=callbacks,
shuffle=True,
verbose=1)
if __name__ == '__main__':
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--patch',
type=int,
default=64,
help='Size of the image patch')
parser.add_argument('--delta',
type=int,
default=8,
help='Size of the image delta')
parser.add_argument('--data',
type=str,
default='.',
help='Path to the directory with training images')
parser.add_argument('--model',
type=str,
default='model.h5',
help='Path for the model to save')
parser.add_argument('--drop-rate',
type=float,
default=0.25,
help='Drop rate for the data')
args = parser.parse_args()
go(data_dir=args.data,
model_path=args.model,
delta=args.delta,
patch=args.patch,
drop_rate=args.drop_rate)
|
import warnings
import requests
def raise_connection_error(*args, **kwargs):
requests.get('https://jibber.ish', timeout=0.01, *args, **kwargs)
def decorate_methods(decorator, *args, **kwargs):
def decorate(cls):
for attr in cls.__dict__:
if callable(getattr(cls, attr)):
setattr(cls, attr, decorator(getattr(cls, attr), *args, **kwargs))
return cls
return decorate
def catch_errors_raise_warnings(f, ignored_errors): # pragma: no cover
def wrapper(*args, **kwargs):
try:
f(*args, **kwargs)
except ignored_errors:
warnings.warn('Unreachable API from '.format(f.__name__), Warning)
assert True
return wrapper
|
# -*- coding: utf-8 -*-
"""
Created on Fri Oct 13 21:12:02 2017
@author: 高多奇
"""
import numpy as np
import pylab as pl
V=4
TIME=0
deta=0.0001
x=[0]
y=[0]
while TIME<=100:
TIME=TIME+deta
V=V+400*deta/(70*V)-2*0.33*(0.00001)*V*deta/105-0.5*1.29*0.33*V*V*deta/(70)
x.append(TIME)
y.append(V)
else:
pl.plot(x, y,'r--',label='with air resistance',linewidth=2)
import numpy as np
import pylab as pl
V=4
TIME=0
deta=0.0001
x=[0]
y=[0]
while TIME<=100:
TIME=TIME+deta
V=V+400*deta/(70*V)-2*0.33*(0.00001)*V*deta/105
x.append(TIME)
y.append(V)
else:
pl.plot(x, y,'m--',label='without -B2*V^2 term',linewidth=2)
import numpy as np
import pylab as pl
V=4
TIME=0
deta=0.0001
x=[0]
y=[0]
while TIME<=100:
TIME=TIME+deta
V=V+400*deta/(70*V)
x.append(TIME)
y.append(V)
else:
pl.plot(x, y,'b:',label='without air resistance',linewidth=2)
import numpy as np
import pylab as pl
V=4
TIME=0
deta=0.0001
x=[0]
y=[0]
while TIME<=100:
TIME=TIME+deta
V=V+400*deta/(70*V)-0.165*(0.001)*V*deta/105-0.5*1000*0.165*V*V*deta/(70)
x.append(TIME)
y.append(V)
else:
plot1=pl.plot(x, y,'g--',label='with water resistance',linewidth=2)
pl.title('velocity')
pl.xlabel('T/s')
pl.ylabel('V m/s')
pl.xlim(0, 100)
pl.ylim(0, 40)
pl.legend(loc = 'best')
pl.show()
|
#!/usr/bin/env python
import sys
# Log format:
# - request date, time, and time zone
# - request line from the client
# - HTTP status code returned to the client
# - size (in bytes) of the returned object
def sanitize(log):
output = []
for i in log.split():
i = i.strip('[').strip('"').strip(']').rstrip('\n')
output.append(i)
return tuple(output)
def main():
if '-h' in sys.argv:
print "{0} <no arguments>".format(sys.argv[0])
print "(Looks for ./data)"
sys.exit(0)
def byte_count(resource, sanitized_logs):
bytes = [int(b[6]) for b in sanitized_logs if b[3] == resource]
return sum(bytes)
max = 10
logs = open('data', 'r').readlines()
slogs = [sanitize(log) for log in logs]
two_hundreds = [l for l in slogs if l[5].startswith('2')]
gets = [l for l in two_hundreds if l[2] == 'GET']
# count requests so we can show most
# requested item
resources = {}
for l in gets:
if l[3] not in resources:
resources[l[3]] = 1
else:
resources[l[3]] += 1
for key, value in sorted(resources.iteritems(), key=lambda (k,v): (v,k), reverse=True):
bytes = byte_count(key, gets)
print "%s: %s" % (key, bytes)
main()
|
import os
import uuid
# if you don't override the secret key, one will be chosen for you
SECRET_KEY = uuid.uuid4().hex
DATABASE_URL = 'postgresql://{db_user}:{db_password}@{db_host}:5432/{db_name}'.format(db_user=os.environ.get('DB_USER'), db_password=os.environ.get('DB_PASSWORD'),
db_host=os.environ.get('DB_HOST'), db_name=os.environ.get('DB_NAME'))
SQLALCHEMY_DATABASE_URI = DATABASE_URL
SQLALCHEMY_TRACK_MODIFICATIONS = False
|
def chkprimes(n):
c = 0
if n%2 != 0:
for i in range(1,n+1):
if n%i == 0:
c+=1
if c == 2:
return(True)
def primes(n):
l = [2]
for i in range(1,(n-1)):
a=chkprimes(i)
if a == True:
l.append(i)
return(l)
def primepartition(a):
n = primes(a)
j = 1
first = n[0]
last = n[len(n)-j]
for i in range(len(n)):
if first+last == a:
return(True)
if first + last > a:
j-=1
if first + last <a:
first = n[i]
return(False)
|
stopwordslist=['','\'',
"""can't""",
"""let's""",
"""they've""",
"""he's""",
"""she's""",
"""i'm""",
"""i'd""",
"""you'd""",
"""you've""",
"""i'll""",
"""i've""",
"""you're""",
"""you'll""",
"""he'll""",
"""he'd""",
"""she'd""",
"""it's""",
"""we're""",
"""they're""",
"""that's""",
"""it'll""",
"""we'll""",
"""they'll""",
"""that'll""",
"""we'd""",
"""isn't""",
"""aren't""",
"""wasn't""",
"""haven't""",
"""won't""",
"""wouldn't""",
"""shouldn't""",
"""couldn't""",
"""don't""",
"""doesn't""",
"""didn't""",
"""mustn't""",
"""hasn't""",
"""weren't""",
"""who'd""",
"""who's""",
"""what's""",
"""why'd""",
"""how'll""",
"""how's""",
'yeah',
'going',
'thing',
'good',
'hey',
'all',
'mug',
'biatch',
'results',
'four',
'penis',
'edu',
'go',
'causes',
'poorly',
'rd',
'certainly',
'biol',
'ty',
'itll',
'vs',
'ts',
'to',
'does',
'present',
'th',
'under',
'sorry',
'sent',
'jerk',
'outside',
'very',
'knob','end',
'none',
'every',
'yourselves',
'coon',
'did',
'forth',
'try',
'p',
'nigger',
'havent',
'thereupon',
'noted',
'says',
'past',
'likely',
'invention',
'further',
'feck',
'even',
'index',
'what',
'sub',
'giving',
'section',
'brief',
'whatll',
'above',
'sup',
'new',
'seemed',
'ever',
'whose',
'youd',
'respectively',
'mr',
'here',
'let',
'slut',
'others',
'hers',
'along',
'quite',
'thatve',
'suggest',
'obtained',
'ref',
'my',
'k',
'wherever',
'resulting',
'arent',
'usually',
'whereupon',
'makes',
'thats',
'hither',
'via',
'followed',
'merely',
'bloody',
'put',
'ninety',
'vols',
'viz',
'ord',
'readily',
'everybody',
'use',
'from',
'would',
'contains',
'two',
'next',
'few',
'therefore',
'taken',
'themselves',
'thru',
'until',
'more',
'knows',
'becomes',
'hereby',
'it',
'everywhere',
'particular',
'known',
'must',
'me',
'mg',
'balls',
'wouldnt',
'f',
'this',
'ml',
'oh',
'anywhere',
'nine',
'can',
'theirs',
'following',
'didnt',
'give',
'wank',
'near',
'states',
'weve',
'something',
'want',
'arise',
'boner',
'dyke',
'information',
'needs',
'end',
'rather',
'means',
'how',
'instead',
'fudge',
'shouldnt',
'okay',
'tried',
'may',
'stop',
'after',
'eighty',
'different',
'hereupon',
'ff',
'date',
'such',
'a',
'thered',
'whenever',
'maybe',
'q',
'ones',
'so',
'specifying',
'keeps',
'six',
'indeed',
'over',
'mainly',
'soon',
'isnt',
'through',
'looks',
'hell',
'still',
'its',
'refs',
'before',
'thank',
'thence',
'selves',
'inward',
'fix',
'actually',
'meantime',
'willing',
'thanx',
'pussy',
'ours',
'might',
'poop',
'then',
'them',
'someone',
'affected',
'thereby',
'auth',
'they',
'not',
'now',
'prick',
'nor',
'nos',
'wont',
'several',
'hereafter',
'always',
'whither',
'l',
'fag',
'sufficiently',
'muff',
'each',
'found',
'went',
'mean',
'everyone',
'significantly',
'doing',
'ed',
'eg',
'related',
'tip',
'owing',
'ex',
'substantially',
'et',
'beyond',
'out',
'rt',
'shown',
'furthermore',
'since',
'research',
'looking',
're',
'bitch',
'got',
'cause',
'shows',
'ass',
'state',
'million',
'little',
'promptly',
'que',
'besides',
'ask',
'anyhow',
'beginning',
'anal',
'g',
'could',
'tries',
'keep',
'fellatio',
'w',
'ltd',
'hence',
'turd',
'onto',
'think',
'first',
'already',
'dont',
'omitted',
'thereafter',
'thereof',
'yourself',
'twat',
'done',
'approximately',
'another',
'miss',
'awfully',
'given',
'necessarily',
'similarly',
'least',
'name',
'anyone',
'their',
'vagina',
'too',
'hundred',
'really',
'gives',
'anus',
'shell',
'mostly',
'that',
'nobody',
'took',
'immediate',
'part',
'nigga',
'somewhat',
'butt',
'off',
'believe',
'herself',
'than',
'specify',
'begins',
'b',
'unfortunately',
'showed',
'accordance',
'gotten',
'see',
'youve',
'nevertheless',
'r',
'were',
'toward',
'anyways',
'and',
'youre',
'ran',
'well',
'beforehand',
'dildo',
'spunk',
'say',
'unlikely',
'have',
'need',
'seen',
'seem',
'apparently',
'any',
'relatively',
'bastard',
'zero',
'latter',
'able',
'aside',
'predominantly',
'also',
'take',
'which',
'begin',
'added',
'unless',
'shall',
'who',
'most',
'eight',
'amongst',
'significant',
'nothing',
'why',
'kg',
'especially',
'noone',
'later',
'm',
'ballsack',
'km',
'mrs',
'heres',
'regards',
'normally',
'came',
'saying',
'jizz',
'particularly',
'show',
'anyway',
'ending',
'queer',
'fifth',
'one',
'specifically',
'fellate',
'dick',
'behind',
'should',
'only',
'announce',
'itd',
'do',
'his',
'goes',
'get',
'overall',
'truly',
'cannot',
'hid',
'nearly',
'words',
'werent',
'during',
'him',
'blowjob',
'blow',
'job',
'regarding',
'qv',
'h',
'twice',
'she',
'contain',
'x',
'where',
'sex',
'bollock',
'namely',
'sec',
'are',
'omg',
'throug',
'said',
'away',
'please',
'tosser',
'ups',
'enough',
'various',
'between',
'affecting',
'probably',
'neither',
'buttplug',
'youll',
'across',
'piss',
'available',
'we',
'never',
'recently',
'useful',
'importance',
'however',
'felching',
'wtf',
'come',
'both',
'c',
'z',
'last',
'wasnt',
'thou',
'many',
'ill',
'whereafter',
'according',
'against',
'etc',
's',
'became',
'wholl',
'com',
'll',
'comes',
'otherwise',
'among',
'liked',
'co',
'afterwards',
'seems',
'ca',
'whatever',
'alone',
'non',
'moreover',
'throughout',
'pp',
'due',
'been',
'quickly',
'whom',
'much',
'cunt',
'ah',
'whod',
'hardly',
'wants',
'adopted',
'latterly',
'thousand',
'else',
'knobend',
'former',
'those',
'fudgepacker',
'myself',
'theyve',
'look',
'unlike',
'these',
'Goddamn',
'nd',
'thereto',
'value',
'n',
'will',
'while',
'cock',
'taking',
'theres',
'ive',
'seven',
'thatll',
'almost',
'is',
'thus',
'herein',
'cant',
'itself',
'im',
'in',
'somebody',
'ie',
'id',
'whore',
'if',
'containing',
'anymore',
'perhaps',
'saw',
'make',
'same',
'wherein',
'beside',
'potentially',
'widely',
'blowjob',
'gets',
'howbeit',
'used',
'pube',
'somewhere',
'keys',
'upon',
'effect',
'uses',
'therell',
'wheres',
'recent',
'arse',
'kept',
'whereby',
'largely',
'i',
'whole',
'nonetheless',
'thoughh',
'anybody',
'obviously',
'without',
'y',
'the',
'yours',
'lest',
'world',
'just',
'less',
'being',
'downwards',
'therere',
'obtain',
'thanks',
'using',
'regardless',
'yes',
'yet',
'unto',
'wed',
'had',
'except',
'sometimes',
'lets',
'seeming',
'has',
'adj',
'ought',
'gave',
'scrotum',
'around',
'possible',
'usefully',
'possibly',
'thereve',
'five',
'know',
'immediately',
'boob',
'like',
'abst',
'necessary',
'd',
'follows',
'theyre',
't',
'become',
'smegma',
'page',
'towards',
'therein',
'shed',
'because',
'old',
'often',
've',
'successfully',
'some',
'back',
'self',
'sure',
'bugger',
'shes',
'specified',
'home',
'ourselves',
'happens',
'vol',
'for',
'affects',
'though',
'per',
'everything',
'asking',
'provides',
'tends',
'either',
'be',
'run',
'lmfao',
'lmao',
'nowhere',
'although',
'crap',
'by',
'on',
'about',
'ok',
'anything',
'getting',
'of',
'v',
'o',
'whomever',
'whence',
'plus',
'act',
'slightly',
'or',
'seeing',
'own',
'whats',
'formerly',
'previously',
'somethan',
'into',
'within',
'www',
'down',
'doesnt',
'primarily',
'theyd',
'couldnt',
'whos',
'your',
'fuck',
'her',
'hes',
'aren',
'there',
'lol',
'pages',
'hed',
'accordingly',
'homo',
'way',
'resulted',
'damn',
'was',
'himself',
'elsewhere',
'becoming',
'but',
'somehow',
'hi',
'et-al',
'bum',
'don',
'line',
'trying',
'with',
'he',
'usefulness',
'made',
'whether',
'wish',
'j',
'up',
'us',
'tell',
'placed',
'below',
'un',
'whim',
'whoever',
'similar',
'strongly',
'gone',
'proud',
'certain',
'am',
'labia',
'an',
'meanwhile',
'as',
'sometime',
'right',
'at',
'our',
'shit',
'inc',
'again',
'hasnt',
'theyll',
'no',
'tit',
'na',
'whereas',
'when',
'lately',
'til',
'bollok',
'other',
'clitoris',
'you',
'nay',
'showns',
'briefly',
'beginnings',
'welcome',
'flange',
'important',
'e',
'together',
'goddamn',
'motherfucking',
'motherfucker',
'u',
'far',
'having',
'once']
|
import argparse
from tgif import (
agent,
friday,
)
argparser = argparse.ArgumentParser()
argparser.add_argument("-l", "--level", type=int, required=True)
def main():
args = argparser.parse_args()
result = friday.start(args.level, agent.console())
print(result)
if __name__ == "__main__":
main()
|
from feature_extraction.feature_abstract import FeatureExtraction
import pandas as pd
import numpy as np
from general.sparray import sparray
from scipy.sparse import lil_matrix
class Classic(FeatureExtraction):
sec_in_day = (60*60*24)
sec1 = pd.to_timedelta("1s")
def applyParams(self, params):
self.normalized = params.get('normalized', False)
self.per_sensor = params.get('per_sensor', False)
return super().applyParams(params)
def precompute(self, datasetdscr, windows):
self.datasetdscr = datasetdscr
self.scount = sum(1 for x in datasetdscr.sensor_id_map)
self.max_windowsize = max([len(w) for w in windows])
if self.per_sensor:
self.len_per_event = 1 + len(self.scount)
else:
self.len_per_event = 2
def featureExtract(self, win):
window = win['window']
f = np.zeros(self.scount+3)
for j in range(0, min(self.max_windowsize, window.shape[0])):
sid = self.datasetdscr.sensor_id_map_inverse[window.iat[j, 0]]
timval = window.iat[j, 1]
timval = timval.hour*60*60+timval.minute*60+timval.second
if self.normalized:
timval = timval/(24*3600)
f[j*self.len_per_event] = timval
if self.per_sensor:
f[j*self.len_per_event+sid+1] = 1
else:
f[j*self.len_per_event+1] = sid
return f
#########################
class Sequence(FeatureExtraction):
sec_in_day = (60*60*24)
sec1 = pd.to_timedelta("1s")
def applyParams(self, params):
self.normalized = params.get('normalized', False)
self.per_sensor = params.get('per_sensor', False)
return super().applyParams(params)
def precompute(self, datasetdscr, windows):
self.datasetdscr = datasetdscr
self.scount = sum(1 for x in datasetdscr.sensor_id_map)
self.max_windowsize = max([len(w) for w in windows])
if self.per_sensor:
self.len_per_event = 1 + self.scount
else:
self.len_per_event = 2
self.shape = (self.max_windowsize, self.len_per_event)
def featureExtract(self, win):
window = win['window']
f = np.zeros(self.shape)
for j in range(0, min(self.max_windowsize, window.shape[0])):
sid = self.datasetdscr.sensor_id_map_inverse[window.iat[j, 0]]
timval = window.iat[j, 1]
timval = timval.hour*60*60+timval.minute*60+timval.second
if self.normalized:
timval = timval/(24*3600)
f[j, 0] = timval
if self.per_sensor:
f[j, sid+1] = 1
else:
f[j, 1] = sid
return f
def featureExtract2(self, win, idx):
window = win
f = np.zeros(self.shape)
for j in range(0, min(self.max_windowsize, len(idx))):
sid = self.datasetdscr.sensor_id_map_inverse[window[idx[j], 0]]
timval = window[idx[j], 1]
timval = timval.hour*60*60+timval.minute*60+timval.second
if self.normalized:
timval = timval/(24*3600)
f[j, 0] = timval
if self.per_sensor:
f[j, sid+1] = 1
else:
f[j, 1] = sid
return f
|
# Generated by Django 2.0 on 2020-05-21 00:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('orders', '0010_capitalinjection'),
]
operations = [
migrations.DeleteModel(
name='Extrusion',
),
migrations.RemoveField(
model_name='order',
name='purchase_order',
),
migrations.AddField(
model_name='order',
name='die_number',
field=models.TextField(blank=True, null=True),
),
]
|
# Copyright 2016 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from bisect import bisect
from bson.codec_options import CodecOptions
from pymodm.connection import _get_db, DEFAULT_CONNECTION_ALIAS
from pymodm.errors import InvalidModel
from pymodm.fields import EmbeddedDocumentField, EmbeddedDocumentListField
# Attributes that can be user-specified in MongoOptions.
DEFAULT_NAMES = (
'connection_alias', 'collection_name', 'codec_options', 'final',
'cascade', 'read_preference', 'read_concern', 'write_concern',
'indexes', 'collation', 'ignore_unknown_fields')
class MongoOptions(object):
"""Base class for metadata stored in Model classes."""
def __init__(self, meta=None):
self.meta = meta
self.connection_alias = DEFAULT_CONNECTION_ALIAS
self.collection_name = None
self.codec_options = CodecOptions()
self.fields_dict = {}
self.fields_attname_dict = {}
self.fields_ordered = []
self.implicit_id = False
self.delete_rules = {}
self.final = False
self.cascade = False
self.pk = None
self.codec_options = None
self.object_name = None
self.model = None
self.read_preference = None
self.read_concern = None
self.write_concern = None
self.indexes = []
self.collation = None
self.ignore_unknown_fields = False
self._auto_dereference = True
self._indexes_created = False
@property
def collection(self):
coll = _get_db(self.connection_alias).get_collection(
self.collection_name,
read_preference=self.read_preference,
read_concern=self.read_concern,
write_concern=self.write_concern,
codec_options=self.codec_options)
if self.indexes and not self._indexes_created:
coll.create_indexes(self.indexes)
self._indexes_created = True
return coll
@property
def auto_dereference(self):
return self._auto_dereference
@auto_dereference.setter
def auto_dereference(self, auto_dereference):
"""Turn automatic dereferencing on or off."""
for field in self.get_fields():
if isinstance(field, (EmbeddedDocumentField,
EmbeddedDocumentListField)):
embedded_options = field.related_model._mongometa
embedded_options.auto_dereference = auto_dereference
self._auto_dereference = auto_dereference
def get_field(self, field_name):
"""Retrieve a Field instance with the given MongoDB name."""
return self.fields_dict.get(field_name)
def get_field_from_attname(self, attname):
"""Retrieve a Fields instance with the given attribute name."""
return self.fields_attname_dict.get(attname)
def add_field(self, field_inst):
"""Add or replace a given Field."""
try:
orig_field = self.get_field(field_inst.mongo_name)
except Exception:
# FieldDoesNotExist, etc. may be raised by subclasses.
orig_field = None
if orig_field is None:
try:
orig_field = self.get_field_from_attname(field_inst.attname)
except Exception:
pass
if orig_field:
if field_inst.attname != orig_field.attname:
raise InvalidModel('%r cannot have the same mongo_name of '
'existing field %r' % (field_inst.attname,
orig_field.attname))
# Remove the field as it may have a different MongoDB name.
del self.fields_dict[orig_field.mongo_name]
self.fields_ordered.remove(orig_field)
self.fields_dict[field_inst.mongo_name] = field_inst
self.fields_attname_dict[field_inst.attname] = field_inst
index = bisect(self.fields_ordered, field_inst)
self.fields_ordered.insert(index, field_inst)
# Set the primary key if we don't have one yet, or it if is implicit.
if field_inst.primary_key and self.pk is None or self.implicit_id:
self.pk = field_inst
def get_fields(self, include_parents=True, include_hidden=False):
"""Get a list of all fields on the Model."""
return self.fields_ordered
def contribute_to_class(self, cls, name):
"""Callback executed when added to a Model class definition."""
self.model = cls
# Name used to look up this class with get_document().
self.object_name = '%s.%s' % (cls.__module__, cls.__name__)
setattr(cls, name, self)
# Metadata was defined by user.
if self.meta:
for attr in DEFAULT_NAMES:
if attr in self.meta.__dict__:
setattr(self, attr, getattr(self.meta, attr))
|
from flask import Blueprint, current_app, make_response
from flask_restful import Api, Resource
from eleanor.utils.api_utils import json_response
from eleanor.utils.rate_limits import ratelimit
from eleanor.celery import tasks
from eleanor.db import db
from eleanor.utils.redis import ping, set_key
from eleanor.utils.healthcheck import HealthCheck
from eleanor.db.models.products import ProductModel
import json
echo_api = Blueprint('echo_api', __name__)
api = Api(echo_api, catch_all_404s=True)
class Echo(Resource):
method_decorators = [
json_response,
# ratelimit(limit=5, per=60)
]
def get(self):
current_app.logger.info("Calling Echo")
return {
"Status": "Up and running..."
}
def db_master_check():
db.session.using_bind("master").query(ProductModel).all()
return True, "db master ok"
def db_slave_check():
db.session.using_bind("slave").query(ProductModel).all()
return True, "db slave ok"
def redis_check():
ping()
return True, "redis ok"
def task_check():
test_task = tasks.add.delay(4, 4)
test_task.get(timeout=4)
return True, "tasks ok"
health = HealthCheck(
checkers=[
db_master_check,
db_slave_check,
redis_check,
task_check
]
)
class HealthCheck(Resource):
method_decorators = [
# ratelimit(limit=10, per=60)
]
def get(self):
message, status = health.check()
check_force = [
mess for mess in message['results']
if mess["checker"] == 'db_slave_check' and
'OperationalError' in str(mess["output"])
]
if check_force:
set_key('MASTER', 'FORCE')
current_app.logger.debug("set FORCE MASTER")
else:
set_key('MASTER', 'NO FORCE')
current_app.logger.debug("NO set FORCE MASTER")
headers = [('Retry-After', '30')]
return make_response(json.dumps(message), status, headers)
api.add_resource(Echo, '/echo')
api.add_resource(HealthCheck, '/health')
|
from django.contrib import admin
from django.conf.urls import url, include
from rest_framework import routers
from durgaapi_with_restframework_APP import views
from rest_framework_swagger.views import get_swagger_view
#from django.urls import path
#____________________________________________________________________________________________________________________
schema_view = get_swagger_view(title="RAW API Documentation")
router = routers.DefaultRouter()
# NOTE: If we use ModelViewSet, then this below code line is optional.
#router.register('api', views.EmployeeCRUDCBV, basename='api')
# router is only applicable, when we are using viewsets
router.register('api', views.EmployeeCRUDCBV)
# When we routers, we need to import include as (from django.conf.urls import include) and use it as show below in
# urlpatterns:
urlpatterns = [
url(r'^swaggerdoc/', schema_view),
url(r'^admin/', admin.site.urls),
url(r'', include(router.urls)),
]
|
#Plotting functions for Python
#Cetin Can Evirgen
#13.02.16
#Preamble
import numpy as np
import matplotlib.pyplot as plt
def plot_arr(freqs):
N = len(freqs)
sz = freqs[0].shape[0]
#rets = np.zeros([N,sz])
rets = np.array([freqs[i] for i in np.arange(N)])
return rets
#1D line plot
def line_1d(x,y,xl,yl,xrn='Default',yrn='Default',l_col = 'b',l_sty = '-',fg_height = 6,l_width=1.5,sv_fig=False,sv_lab='None',sv_format='pdf',sv_dir='Current',plt_title = 'None',plt_show=True):
import numpy as np
import matplotlib.pyplot as plt
import os
golden = (1.+5.**0.5)/2.
fg_width = fg_height*golden
fig = plt.figure(figsize=[fg_width,fg_height])
ax = fig.add_subplot(111)
ax.plot(x,y,lw=l_width,ls=l_sty,color=l_col)
ax.set_xlabel(xl,fontsize=18)
ax.set_ylabel(yl,fontsize=18)
ax.tick_params(labelsize=16,length=5,width=1.5)
if xrn=='Default':
ax.set_xlim([np.amin(x),np.amax(x)])
else:
ax.set_xlim(xrn)
if yrn=='Default':
ax.set_ylim([np.amin(y),1.1*np.amax(y)])
else:
ax.set_ylim(yrn)
if plt_title!='None':
ax.set_title(plt_title,fontsize=22)
if sv_fig==True:
if sv_dir=='Current':
sv_dir = os.getcwd()
os.chdir(sv_dir)
plt.savefig(sv_lab+'.'+sv_format)
if plt_show==True:
plt.show()
#Partition sample space by one variable and find mean of other variable for each partition
def part_mean(x,y,bns,ret_freqs=False,nrm=True):
import numpy as np
if x.ndim>1:
x = x.ravel()
if y.ndim>1:
y = y.ravel()
if type(bns)==int:
x_b = np.mgrid[np.amin(x):np.amax(x):eval(str(bns)+'j')]
y_b = np.mgrid[np.amin(y):np.amax(y):eval(str(bns)+'j')]
else:
x_b,y_b = bns
freqs = np.histogram2d(x,y,bins=[x_b,y_b])[0].T
if nrm:
dx = np.ediff1d(x_b).mean(); dy = np.ediff1d(y_b).mean()
freqs /= float(np.sum(freqs*dx*dy))
xb = 0.5*(x_b[1:]+x_b[:-1]); yb = 0.5*(y_b[1:]+y_b[:-1])
Nx = len(xb); Ny = len(yb)
rets = np.zeros(Nx)
for i in np.arange(Nx):
t = np.sum(yb*freqs[:,i])
b = np.sum(freqs[:,i])
if b==0:
rets[i] = np.nan
else:
rets[i] = t/b
if ret_freqs:
return x_b,y_b,rets,freqs
else:
return [xb,rets]
#Partition sample space and compute error
#def part_error(x,y):
#
#2D histogram
def hist2d_data(x,y,bns,ret_bns=False,nrm=False):
if x.ndim>1:
x = x.ravel()
if y.ndim>1:
y = y.ravel()
if type(bns)==int:
x_b = np.mgrid[np.amin(x):np.amax(x):eval(str(bns)+'j')]
y_b = np.mgrid[np.amin(y):np.amax(y):eval(str(bns)+'j')]
elif type(bns)==np.ndarray:
if bns.shape[0]!=2:
raise ValueError('Incorrect shape')
x_b,y_b = bns
if len(x_b)!=len(y_b):
raise ValueError('x and y do not have same dimensions.')
elif type(bns)==list:
if len(x)!=2:
raise ValueError('Need tuple containing two arrays; one for x and one for y')
x_b,y_b = bns
dx = np.ediff1d(x_b).mean(); dy = np.ediff1d(y_b).mean()
freqs = np.histogram2d(x,y,bins=[x_b,y_b])[0].T
if nrm:
freqs /= float(np.sum(freqs)*dx*dy)
if ret_bns:
return [x_b,y_b,freqs]
else:
return freqs
#1D multi-plot
def mline_1d(x,ys,xl,yl,leg_labs,xrn='Default',yrn='Default',fg_height = 6,l_width=1.5):
import numpy as np
import matplotlib.pyplot as plt
import os
def ax_subplot(x,y,ax_obj,l_width):
import matplotlib.pyplot as plt
ax_obj.plot(x,y,lw=l_width)
#Initialise figure object
golden = (1.+5.**0.5)/2.
fg_width = fg_height*golden
#Number of plots decide plot grid shape
N_plt = ys.shape[0]
grid_dim = int(np.sqrt(N_plt))
fig = plt.figure(figsize=[fg_width,fg_height])
ax = fig.add_subplot(111)
for i in np.arange(N_plt):
ax.plot(x,ys[i],lw=l_width)
ax.set_xlabel(xl,fontsize=18)
ax.set_ylabel(yl,fontsize=18)
ax.tick_params(labelsize=16,length=5,width=1.5)
if xrn=='Default':
ax.set_xlim([np.amin(x),np.amax(x)])
else:
ax.set_xlim(xrn)
if yrn=='Default':
ax.set_ylim([np.amin(ys),1.1*np.amax(ys)])
else:
ax.set_ylim(yrn)
ax.legend(leg_labs,fontsize=16,loc=2)
#1D histogram as line plot
def hist_pdf(arr1,no_bins,xl,yl,rng='Default',dns=True,fg_height = 6,l_width=1.5,sv_fig=False,sv_lab='hist1d',sv_format='pdf',sv_dir='Current',plt_title = 'None',plt_show=True):
import os
golden = (1 + 5 ** 0.5) / 2
#Processing data
dims = arr1.ndim
if dims>1:
arr = arr1.ravel()
print(str(dims)+'D array flattened')
else:
arr=arr1
#Calculating histogram
if rng=='Default':
rng = [np.amin(arr),np.amax(arr)]
freqs,bns = np.histogram(arr,range=rng,bins=no_bins,density=dns)
bins = 0.5*(bns[1:]+bns[:-1])
fg_width = fg_height*golden
fig = plt.figure(figsize=[fg_width,fg_height])
ax = fig.add_subplot(111)
ax.plot(bins,freqs,linewidth=l_width)
ax.set_xlabel(xl,fontsize=18)
ax.set_ylabel(yl,fontsize=18)
ax.tick_params(labelsize=16,length=5,width=1.5)
if plt_title !='None':
ax.set_title(plt_title,fontsize=20)
if sv_fig==True:
if sv_dir == 'Current':
sv_dir = os.getcwd()+'/'
else:
print('Save directory is '+sv_dir)
os.chdir(sv_dir)
plt.savefig(sv_lab+'.'+sv_format)
if plt_show==True:
plt.show()
return bins,freqs
#Multiple PDF plots
def input_array(dset1,dset2,dset3):
if dset1.ndim>1:
arr1 = dset1.ravel()
else:
arr1 = dset1
N = len(arr1)
rets = np.zeros([3,N])
rets[0] = arr1
if dset2.ndim>1:
arr2 = dset2.ravel()
else:
arr2 = dset2
rets[1] = arr2
if dset3.ndim>1:
arr3 = dset3.ravel()
else:
arr3 = dset3
rets[0] = arr3
return rets
def phase_hist_pdf(arr,no_bins,xl,yl,fltr,rng='Default',fg_height = 6,l_width=1.5,sv_fig=False,sv_lab='None',sv_format='pdf',sv_dir='Current',plt_title = 'None',plt_show=False):
import os
golden = (1 + 5 ** 0.5) / 2
#Processing data
c = arr[fltr[0]]
w = arr[fltr[1]]
h = arr[fltr[2]]
cols = np.array(['b-','g','m-'])
#Calculating histogram
if rng=='Default':
rng = [np.amin(arr),np.amax(arr)]
fg_width = fg_height*golden
fig = plt.figure(figsize=[fg_width,fg_height])
ax = fig.add_subplot(111)
freq_arr = np.zeros([3,no_bins])
for i in np.arange(3):
if i==0:
freqs,bns = np.histogram(c,range=rng,bins=no_bins)
freq_arr[0] = freqs
elif i==1:
freqs,bns = np.histogram(w,range=rng,bins=no_bins)
freq_arr[1] = freqs
elif i==2:
freqs,bns = np.histogram(h,range=rng,bins=no_bins)
freq_arr[2] = freqs
bins = 0.5*(bns[1:]+bns[:-1])
ax.plot(bins,freqs,cols[i],linewidth=l_width)
ax.set_xlabel(xl,fontsize=18)
ax.set_ylabel(yl,fontsize=18)
ax.tick_params(labelsize=16,length=5,width=1.5)
ax.legend(['Cold phase','Warm phase','Hot phase'],fontsize=16)
if plt_title !='None':
ax.set_title(plt_title,fontsize=20)
if sv_fig==True:
if sv_dir == 'Current':
sv_dir = os.getcwd()+'/'
else:
print('Save directory is '+sv_dir)
os.chdir(sv_dir)
plt.savefig(sv_lab+'.'+sv_format)
if plt_show==True:
plt.show()
else:
plt.close()
return bins,freq_arr
|
species(
label = 'C#CC([CH2])C[C]=O(26509)',
structure = SMILES('C#CC([CH2])C[C]=O'),
E0 = (375.139,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1855,455,950,2175,525,750,770,3400,2100,2750,2850,1437.5,1250,1305,750,350,3000,3100,440,815,1455,1000,1380,1390,370,380,2900,435,216.448],'cm^-1')),
HinderedRotor(inertia=(0.273079,'amu*angstrom^2'), symmetry=1, barrier=(9.08227,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.00207502,'amu*angstrom^2'), symmetry=1, barrier=(9.08177,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(2.14545,'amu*angstrom^2'), symmetry=1, barrier=(71.3339,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(2.14561,'amu*angstrom^2'), symmetry=1, barrier=(71.3347,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (94.1112,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.972278,0.0677876,-8.47319e-05,6.01089e-08,-1.69222e-11,45226.7,27.6291], Tmin=(100,'K'), Tmax=(955.13,'K')), NASAPolynomial(coeffs=[10.0953,0.0244636,-8.6559e-06,1.3992e-09,-8.69669e-14,43717.4,-14.7432], Tmin=(955.13,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(375.139,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(291.007,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CtCsCsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + group(Ct-CtCs) + group(Ct-CtH) + radical(CCCJ=O) + radical(Isobutyl)"""),
)
species(
label = 'CH2CO(28)',
structure = SMILES('C=C=O'),
E0 = (-60.8183,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2950,3100,1380,975,1025,1650,2120,512.5,787.5],'cm^-1')),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (42.0367,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3625.12,'J/mol'), sigma=(3.97,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=2.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.13241,0.0181319,-1.74093e-05,9.35336e-09,-2.01725e-12,-7148.09,13.3808], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[5.75871,0.00635124,-2.25955e-06,3.62322e-10,-2.15856e-14,-8085.33,-4.9649], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-60.8183,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(108.088,'J/(mol*K)'), label="""CH2CO""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = 'CH2CHCCH(26391)',
structure = SMILES('C#CC=C'),
E0 = (274.188,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2950,3100,1380,975,1025,1650,750,770,3400,2100,3010,987.5,1337.5,450,1655,2175,525],'cm^-1')),
HinderedRotor(inertia=(1.46338,'amu*angstrom^2'), symmetry=1, barrier=(33.6459,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (52.0746,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(2968.28,'J/mol'), sigma=(5.18,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=1.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.87083,0.0182042,1.06711e-05,-2.72492e-08,1.19478e-11,33023.8,11.2934], Tmin=(100,'K'), Tmax=(955.249,'K')), NASAPolynomial(coeffs=[8.52653,0.0108962,-3.56564e-06,6.31243e-10,-4.51891e-14,31196.2,-19.6435], Tmin=(955.249,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(274.188,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(178.761,'J/(mol*K)'), label="""CH2CHCCH""", comment="""Thermo library: DFT_QCI_thermo"""),
)
species(
label = '[CH]=C1CC1C[C]=O(27321)',
structure = SMILES('[CH]=C1CC1C[C]=O'),
E0 = (455.669,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (94.1112,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.2053,0.0541402,-3.77902e-05,9.53052e-09,4.9045e-13,54911.3,25.0128], Tmin=(100,'K'), Tmax=(1061.23,'K')), NASAPolynomial(coeffs=[13.0973,0.0212294,-8.11054e-06,1.46355e-09,-1.01046e-13,51716.4,-36.2267], Tmin=(1061.23,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(455.669,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(299.321,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsCsH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-O2d)CsHH) + group(Cds-CdsCsCs) + group(Cds-OdCsH) + group(Cds-CdsHH) + ring(Methylene_cyclopropane) + radical(CCCJ=O) + radical(Cds_P)"""),
)
species(
label = '[CH]=C1C(=O)CC1[CH2](27322)',
structure = SMILES('[CH]=C1C(=O)CC1[CH2]'),
E0 = (408.264,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (94.1112,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.84705,0.0384977,-2.93917e-07,-2.1473e-08,9.29291e-12,49187.9,24.001], Tmin=(100,'K'), Tmax=(1068.48,'K')), NASAPolynomial(coeffs=[10.0005,0.0271221,-1.12054e-05,2.10732e-09,-1.48682e-13,46352.5,-20.9895], Tmin=(1068.48,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(408.264,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(303.478,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsCsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsHHH) + group(Cd-CdCs(CO)) + group(Cds-O2d(Cds-Cds)Cs) + group(Cds-CdsHH) + ring(Cyclobutane) + radical(Isobutyl) + radical(Cds_P)"""),
)
species(
label = 'H(3)',
structure = SMILES('[H]'),
E0 = (211.792,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (1.00794,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1205.6,'J/mol'), sigma=(2.05,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,9.24385e-15,-1.3678e-17,6.66185e-21,-1.00107e-24,25472.7,-0.459566], Tmin=(100,'K'), Tmax=(3459.6,'K')), NASAPolynomial(coeffs=[2.5,9.20456e-12,-3.58608e-15,6.15199e-19,-3.92042e-23,25472.7,-0.459566], Tmin=(3459.6,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(211.792,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""H""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'C#CC(=C)C[C]=O(27323)',
structure = SMILES('C#CC(=C)C[C]=O'),
E0 = (290.95,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2175,525,1855,455,950,2950,3100,1380,975,1025,1650,750,770,3400,2100,350,440,435,1725,2750,2850,1437.5,1250,1305,750,350,380.077],'cm^-1')),
HinderedRotor(inertia=(0.196336,'amu*angstrom^2'), symmetry=1, barrier=(20.1041,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.196846,'amu*angstrom^2'), symmetry=1, barrier=(20.0968,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.195216,'amu*angstrom^2'), symmetry=1, barrier=(20.1124,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (93.1033,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.18618,0.0525907,-4.0254e-05,1.45815e-08,-2.07099e-12,35102.3,23.9086], Tmin=(100,'K'), Tmax=(1677.54,'K')), NASAPolynomial(coeffs=[16.8993,0.0151235,-6.75189e-06,1.26746e-09,-8.68153e-14,29830.4,-60.0269], Tmin=(1677.54,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(290.95,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(270.22,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-O2d)(Cds-Cds)HH) + group(Cds-CdsCtCs) + group(Cds-OdCsH) + group(Cds-CdsHH) + group(Ct-Ct(Cds-Cds)) + group(Ct-CtH) + radical(CCCJ=O)"""),
)
species(
label = 'C#CC([CH2])C=C=O(27324)',
structure = SMILES('C#CC([CH2])C=C=O'),
E0 = (345.225,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2120,512.5,787.5,3010,987.5,1337.5,450,1655,2175,525,750,770,3400,2100,3000,3100,440,815,1455,1000,1380,1390,370,380,2900,435,180],'cm^-1')),
HinderedRotor(inertia=(0.841168,'amu*angstrom^2'), symmetry=1, barrier=(19.3401,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(1.67658,'amu*angstrom^2'), symmetry=1, barrier=(38.5479,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.836938,'amu*angstrom^2'), symmetry=1, barrier=(19.2429,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (93.1033,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.618322,0.076913,-0.000109425,8.19571e-08,-2.39239e-11,41640.4,23.6804], Tmin=(100,'K'), Tmax=(914.959,'K')), NASAPolynomial(coeffs=[12.2639,0.0204682,-7.81816e-06,1.31374e-09,-8.33556e-14,39740.9,-30.2021], Tmin=(914.959,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(345.225,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(270.22,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCd(CCO)H) + group(Cs-CsHHH) + group(Cds-(Cdd-O2d)CsH) + group(Ct-CtCs) + group(Ct-CtH) + radical(CJC(C)C=C=O)"""),
)
species(
label = 'C=[C][O](173)',
structure = SMILES('[CH2][C]=O'),
E0 = (160.185,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,539.612,539.669],'cm^-1')),
HinderedRotor(inertia=(0.000578908,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (42.0367,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.39563,0.0101365,2.30741e-06,-8.97566e-09,3.68242e-12,19290.3,10.0703], Tmin=(100,'K'), Tmax=(1068.9,'K')), NASAPolynomial(coeffs=[6.35055,0.00638951,-2.69368e-06,5.4221e-10,-4.02476e-14,18240.9,-6.33602], Tmin=(1068.9,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(160.185,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(153.818,'J/(mol*K)'), comment="""Thermo library: Klippenstein_Glarborg2016 + radical(CsCJ=O) + radical(CJC=O)"""),
)
species(
label = 'C2H(33)',
structure = SMILES('[C]#C'),
E0 = (557.301,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([750,770,3400,2100],'cm^-1')),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (25.0293,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(1737.73,'J/mol'), sigma=(4.1,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=2.5, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.89868,0.0132988,-2.80733e-05,2.89485e-08,-1.07502e-11,67061.6,6.18548], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[3.6627,0.00382492,-1.36633e-06,2.13455e-10,-1.23217e-14,67168.4,3.92206], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(557.301,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(62.3585,'J/(mol*K)'), label="""C2H""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = 'C=CC[C]=O(2390)',
structure = SMILES('C=CC[C]=O'),
E0 = (66.8219,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,1855,455,950,3010,987.5,1337.5,450,1655,2950,3100,1380,975,1025,1650,458.926],'cm^-1')),
HinderedRotor(inertia=(0.0997865,'amu*angstrom^2'), symmetry=1, barrier=(14.9157,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.099798,'amu*angstrom^2'), symmetry=1, barrier=(14.9167,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (69.0819,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3285.42,'J/mol'), sigma=(5.46087,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=513.18 K, Pc=45.78 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.51804,0.0238835,1.19491e-05,-2.85418e-08,1.09388e-11,8097.53,17.8098], Tmin=(100,'K'), Tmax=(1083.61,'K')), NASAPolynomial(coeffs=[9.78041,0.0178579,-8.47799e-06,1.72441e-09,-1.27255e-13,5303.46,-23.4402], Tmin=(1083.61,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(66.8219,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(224.491,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-O2d)(Cds-Cds)HH) + group(Cds-CdsCsH) + group(Cds-OdCsH) + group(Cds-CdsHH) + radical(CCCJ=O)"""),
)
species(
label = '[CH]=[C]C=C(4699)',
structure = SMILES('[CH]=C=C[CH2]'),
E0 = (451.584,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3120,650,792.5,1650,540,610,2055,3000,3100,440,815,1455,1000,180,1024.85,1025.53,1026.61],'cm^-1')),
HinderedRotor(inertia=(0.00938781,'amu*angstrom^2'), symmetry=1, barrier=(7.01846,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (52.0746,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.76805,0.020302,8.75519e-06,-2.87666e-08,1.37354e-11,54363.7,13.5565], Tmin=(100,'K'), Tmax=(915.031,'K')), NASAPolynomial(coeffs=[9.46747,0.00887314,-1.78262e-06,2.38534e-10,-1.6263e-14,52390.1,-22.2544], Tmin=(915.031,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(451.584,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(228.648,'J/(mol*K)'), comment="""Thermo library: DFT_QCI_thermo + radical(C=C=CJ) + radical(Allyl_P)"""),
)
species(
label = 'C#C[C](C)C[C]=O(27325)',
structure = SMILES('C#C[C](C)C[C]=O'),
E0 = (305.178,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1855,455,950,2750,2800,2850,1350,1500,750,1050,1375,1000,2175,525,750,770,3400,2100,2750,2850,1437.5,1250,1305,750,350,360,370,350,180],'cm^-1')),
HinderedRotor(inertia=(0.481374,'amu*angstrom^2'), symmetry=1, barrier=(11.0677,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.20028,'amu*angstrom^2'), symmetry=1, barrier=(4.60484,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.199771,'amu*angstrom^2'), symmetry=1, barrier=(4.59313,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(2.74983,'amu*angstrom^2'), symmetry=1, barrier=(63.224,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (94.1112,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.920108,0.071604,-9.85959e-05,7.69064e-08,-2.35114e-11,36811.6,25.4513], Tmin=(100,'K'), Tmax=(930.065,'K')), NASAPolynomial(coeffs=[8.88173,0.0270943,-1.025e-05,1.70947e-09,-1.07732e-13,35774.8,-9.99412], Tmin=(930.065,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(305.178,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(291.007,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CtCsCsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + group(Ct-CtCs) + group(Ct-CtH) + radical(Tert_Propargyl) + radical(CCCJ=O)"""),
)
species(
label = 'C#CC([CH2])[CH]C=O(27326)',
structure = SMILES('C#CC([CH2])C=C[O]'),
E0 = (334.483,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (94.1112,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.0458433,0.0727493,-7.50059e-05,3.82979e-08,-7.3957e-12,40383.5,26.988], Tmin=(100,'K'), Tmax=(1429.16,'K')), NASAPolynomial(coeffs=[18.8441,0.0117163,-2.11059e-06,1.71804e-10,-5.28237e-15,35870.2,-67.4071], Tmin=(1429.16,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(334.483,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(295.164,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(Cs-(Cds-Cds)CtCsH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + group(Ct-CtCs) + group(Ct-CtH) + radical(Isobutyl) + radical(C=COJ)"""),
)
species(
label = 'C#CC(C)[CH][C]=O(27327)',
structure = SMILES('C#CC(C)[CH][C]=O'),
E0 = (337.586,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (94.1112,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.15024,0.0593634,-5.34809e-05,2.35915e-08,-3.11457e-12,40707.9,24.9706], Tmin=(100,'K'), Tmax=(912.335,'K')), NASAPolynomial(coeffs=[11.8391,0.0219095,-7.37261e-06,1.2039e-09,-7.76811e-14,38366,-27.7624], Tmin=(912.335,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(337.586,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(291.007,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CtCsCsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + group(Ct-CtCs) + group(Ct-CtH) + radical(CCCJ=O) + radical(CCJCHO)"""),
)
species(
label = 'C#C[C]([CH2])CC=O(27328)',
structure = SMILES('[CH]=C=C([CH2])CC=O'),
E0 = (314.361,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (94.1112,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.04755,0.0558089,-3.6233e-05,7.74334e-09,5.20804e-13,37922.7,24.7967], Tmin=(100,'K'), Tmax=(1187.95,'K')), NASAPolynomial(coeffs=[14.7838,0.0222159,-9.79985e-06,1.87941e-09,-1.32889e-13,33765.9,-47.5981], Tmin=(1187.95,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(314.361,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(295.164,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-O2d)(Cds-Cds)HH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-OdCsH) + group(Cds-CdsHH) + group(Cdd-CdsCds) + radical(Allyl_P) + radical(C=C=CJ)"""),
)
species(
label = '[C]#CC(C)C[C]=O(27329)',
structure = SMILES('[C]#CC(C)C[C]=O'),
E0 = (507.201,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,2175,525,1855,455,950,1380,1390,370,380,2900,435,2750,2800,2850,1350,1500,750,1050,1375,1000,180,180],'cm^-1')),
HinderedRotor(inertia=(0.181692,'amu*angstrom^2'), symmetry=1, barrier=(4.17746,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.183461,'amu*angstrom^2'), symmetry=1, barrier=(4.21813,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.182313,'amu*angstrom^2'), symmetry=1, barrier=(4.19173,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(3.5054,'amu*angstrom^2'), symmetry=1, barrier=(80.596,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (94.1112,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.02346,0.0690922,-9.28626e-05,7.20246e-08,-2.20983e-11,61105.9,26.4069], Tmin=(100,'K'), Tmax=(918.466,'K')), NASAPolynomial(coeffs=[8.50054,0.0276371,-1.06382e-05,1.80179e-09,-1.15092e-13,60107.5,-6.9881], Tmin=(918.466,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(507.201,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(291.007,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CtCsCsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + group(Ct-CtCs) + group(Ct-CtH) + radical(Acetyl) + radical(CCCJ=O)"""),
)
species(
label = '[C]#CC([CH2])CC=O(27330)',
structure = SMILES('[C]#CC([CH2])CC=O'),
E0 = (552.322,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,2782.5,750,1395,475,1775,1000,2175,525,1380,1390,370,380,2900,435,3000,3100,440,815,1455,1000,180,1173.62],'cm^-1')),
HinderedRotor(inertia=(0.17135,'amu*angstrom^2'), symmetry=1, barrier=(3.93968,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.171069,'amu*angstrom^2'), symmetry=1, barrier=(3.93321,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.171154,'amu*angstrom^2'), symmetry=1, barrier=(3.93517,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(2.68135,'amu*angstrom^2'), symmetry=1, barrier=(61.6495,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (94.1112,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.08149,0.0682028,-9.1685e-05,7.19862e-08,-2.21825e-11,66530.4,27.1611], Tmin=(100,'K'), Tmax=(940.7,'K')), NASAPolynomial(coeffs=[7.68371,0.0287738,-1.07064e-05,1.76515e-09,-1.10301e-13,65790.6,-1.61672], Tmin=(940.7,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(552.322,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(291.007,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CtCsCsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + group(Ct-CtCs) + group(Ct-CtH) + radical(Acetyl) + radical(Isobutyl)"""),
)
species(
label = 'O=[C]CC1[C]=CC1(27331)',
structure = SMILES('O=[C]CC1[C]=CC1'),
E0 = (418.582,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (94.1112,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.61671,0.0440599,-1.43501e-05,-1.06594e-08,6.57939e-12,50436.8,24.7313], Tmin=(100,'K'), Tmax=(1044.25,'K')), NASAPolynomial(coeffs=[11.4231,0.0235167,-9.28967e-06,1.71835e-09,-1.20668e-13,47460.7,-27.4469], Tmin=(1044.25,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(418.582,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(299.321,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsCsH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-O2d)CsHH) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + group(Cds-OdCsH) + ring(Cyclobutene) + radical(CCCJ=O) + radical(cyclobutene-vinyl)"""),
)
species(
label = '[CH2]C1[C]=CC(=O)C1(27332)',
structure = SMILES('[CH2]C1[C]=CC(=O)C1'),
E0 = (344.227,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (94.1112,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.3161,0.0269835,2.67148e-05,-4.64912e-08,1.76787e-11,41470.3,23.0305], Tmin=(100,'K'), Tmax=(1013,'K')), NASAPolynomial(coeffs=[8.15624,0.0282167,-1.10843e-05,2.05897e-09,-1.456e-13,39040.6,-11.3727], Tmin=(1013,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(344.227,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(303.478,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsCsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-O2d(Cds-Cds)Cs) + group(Cd-Cd(CO)H) + ring(Cyclopentane) + radical(Isobutyl) + radical(cyclopentene-vinyl)"""),
)
species(
label = 'C#CC(=C)CC=O(27333)',
structure = SMILES('C#CC(=C)CC=O'),
E0 = (130.989,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (94.1112,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.3851,0.0483285,-2.14615e-05,-3.60221e-09,3.50041e-12,15856,22.3622], Tmin=(100,'K'), Tmax=(1189,'K')), NASAPolynomial(coeffs=[13.5897,0.0236811,-1.10707e-05,2.18003e-09,-1.56134e-13,11793.7,-43.5088], Tmin=(1189,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(130.989,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(295.164,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-O2d)(Cds-Cds)HH) + group(Cds-CdsCtCs) + group(Cds-OdCsH) + group(Cds-CdsHH) + group(Ct-Ct(Cds-Cds)) + group(Ct-CtH)"""),
)
species(
label = 'C#CC(C)C=C=O(27334)',
structure = SMILES('C#CC(C)C=C=O'),
E0 = (134.139,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (94.1112,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.855247,0.066798,-7.1586e-05,4.13483e-08,-9.51042e-12,16248.5,22.4224], Tmin=(100,'K'), Tmax=(1061.61,'K')), NASAPolynomial(coeffs=[12.8236,0.0217025,-7.86779e-06,1.33448e-09,-8.74387e-14,13707.4,-36.0336], Tmin=(1061.61,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(134.139,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(295.164,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCd(CCO)H) + group(Cs-CsHHH) + group(Cds-(Cdd-O2d)CsH) + group(Ct-CtCs) + group(Ct-CtH)"""),
)
species(
label = 'C#C[CH]CC[C]=O(26508)',
structure = SMILES('C#C[CH]CC[C]=O'),
E0 = (325.393,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (94.1112,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.967387,0.069153,-8.98087e-05,6.66782e-08,-1.96613e-11,39242.5,25.9838], Tmin=(100,'K'), Tmax=(927.407,'K')), NASAPolynomial(coeffs=[9.45022,0.0261121,-9.75565e-06,1.62859e-09,-1.03298e-13,37946.7,-12.8054], Tmin=(927.407,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(325.393,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(291.007,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CtCsHH) + group(Cds-OdCsH) + group(Ct-CtCs) + group(Ct-CtH) + radical(CCCJ=O) + radical(Sec_Propargyl)"""),
)
species(
label = 'C#CC[CH]C[C]=O(27335)',
structure = SMILES('C#CC[CH]C[C]=O'),
E0 = (379.084,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,750,770,3400,2100,2175,525,1855,455,950,3025,407.5,1350,352.5,371.899,4000],'cm^-1')),
HinderedRotor(inertia=(0.0951553,'amu*angstrom^2'), symmetry=1, barrier=(9.33769,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0951993,'amu*angstrom^2'), symmetry=1, barrier=(9.33839,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.950747,'amu*angstrom^2'), symmetry=1, barrier=(93.2641,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.950445,'amu*angstrom^2'), symmetry=1, barrier=(93.2558,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (94.1112,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.22535,0.0623033,-6.95934e-05,4.49617e-08,-1.19009e-11,45692.1,27.7202], Tmin=(100,'K'), Tmax=(915.331,'K')), NASAPolynomial(coeffs=[9.49203,0.0261769,-1.03895e-05,1.84032e-09,-1.2306e-13,44178.8,-11.4302], Tmin=(915.331,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(379.084,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(291.007,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CtCsHH) + group(Cds-OdCsH) + group(Ct-CtCs) + group(Ct-CtH) + radical(CCCJ=O) + radical(CCJCC=O)"""),
)
species(
label = 'C#CC([CH2])C(=C)[O](26501)',
structure = SMILES('C#CC([CH2])C(=C)[O]'),
E0 = (325.059,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (94.1112,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.647829,0.0714799,-7.85135e-05,4.17373e-08,-7.43084e-12,39218.5,25.1189], Tmin=(100,'K'), Tmax=(863.516,'K')), NASAPolynomial(coeffs=[14.6554,0.0180653,-5.6555e-06,8.73045e-10,-5.41924e-14,36371.6,-42.8808], Tmin=(863.516,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(325.059,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(295.164,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(Cs-(Cds-Cds)CtCsH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-CdsHH) + group(Ct-CtCs) + group(Ct-CtH) + radical(C=C(C)OJ) + radical(Isobutyl)"""),
)
species(
label = 'C#CC1CC(=O)C1(26513)',
structure = SMILES('C#CC1CC(=O)C1'),
E0 = (121.27,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (94.1112,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.98566,0.0325712,2.25623e-05,-5.06821e-08,2.14139e-11,14668.5,20.5203], Tmin=(100,'K'), Tmax=(964.503,'K')), NASAPolynomial(coeffs=[11.0526,0.0236766,-8.25133e-06,1.47611e-09,-1.04463e-13,11584.2,-29.8168], Tmin=(964.503,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(121.27,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(303.478,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CtCsCsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-(Cds-O2d)CsHH) + group(Cds-OdCsCs) + group(Ct-CtCs) + group(Ct-CtH) + ring(Cyclobutanone)"""),
)
species(
label = 'CO(12)',
structure = SMILES('[C-]#[O+]'),
E0 = (-119.219,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2084.51],'cm^-1')),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (28.0101,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(762.44,'J/mol'), sigma=(3.69,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(1.76,'angstroms^3'), rotrelaxcollnum=4.0, comment="""PrimaryTransportLibrary"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.5971,-0.00102424,2.83336e-06,-1.75825e-09,3.42587e-13,-14343.2,3.45822], Tmin=(100,'K'), Tmax=(1669.93,'K')), NASAPolynomial(coeffs=[2.92796,0.00181931,-8.35308e-07,1.51269e-10,-9.88872e-15,-14292.7,6.51157], Tmin=(1669.93,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-119.219,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(37.4151,'J/(mol*K)'), label="""CO""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'C#CC([CH2])[CH2](26629)',
structure = SMILES('C#CC([CH2])[CH2]'),
E0 = (526.841,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2175,525,750,770,3400,2100,1380,1390,370,380,2900,435,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100],'cm^-1')),
HinderedRotor(inertia=(0.00245333,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.00268778,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(1.19638,'amu*angstrom^2'), symmetry=1, barrier=(57.3712,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (66.1011,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.59865,0.0445704,-4.03451e-05,2.09026e-08,-4.1805e-12,63457.8,21.3678], Tmin=(100,'K'), Tmax=(1443.27,'K')), NASAPolynomial(coeffs=[9.29133,0.0160372,-3.19385e-06,2.79115e-10,-8.33799e-15,61988.6,-15.9649], Tmin=(1443.27,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(526.841,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(245.277,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CtCsCsH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Ct-CtCs) + group(Ct-CtH) + radical(Isobutyl) + radical(Isobutyl)"""),
)
species(
label = 'CH2(19)',
structure = SMILES('[CH2]'),
E0 = (381.563,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1032.72,2936.3,3459],'cm^-1')),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (14.0266,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.8328,0.000224446,4.68033e-06,-6.04743e-09,2.59009e-12,45920.8,1.40666], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[3.16229,0.00281798,-7.56235e-07,5.05446e-11,5.65236e-15,46099.1,4.77656], Tmin=(1000,'K'), Tmax=(3000,'K'))], Tmin=(200,'K'), Tmax=(3000,'K'), E0=(381.563,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(58.2013,'J/(mol*K)'), label="""CH2""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = 'C#C[CH]C[C]=O(27336)',
structure = SMILES('[CH]=C=CC[C]=O'),
E0 = (361.877,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,1855,455,950,3010,987.5,1337.5,450,1655,3120,650,792.5,1650,540,610,2055],'cm^-1')),
HinderedRotor(inertia=(0.942019,'amu*angstrom^2'), symmetry=1, barrier=(21.6589,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.942967,'amu*angstrom^2'), symmetry=1, barrier=(21.6807,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (80.0847,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.83042,0.0411588,-2.90855e-05,8.37138e-09,-5.06207e-13,43607.3,21.7665], Tmin=(100,'K'), Tmax=(1230.55,'K')), NASAPolynomial(coeffs=[12.1824,0.0151463,-6.68669e-06,1.2802e-09,-9.02369e-14,40481.4,-32.6727], Tmin=(1230.55,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(361.877,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(224.491,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-O2d)(Cds-Cds)HH) + group(Cds-CdsCsH) + group(Cds-OdCsH) + group(Cds-CdsHH) + group(Cdd-CdsCds) + radical(C=C=CJ) + radical(CCCJ=O)"""),
)
species(
label = '[C]=O(361)',
structure = SMILES('[C]=O'),
E0 = (439.086,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3054.48],'cm^-1')),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (28.0101,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.08916,0.00200416,-1.61661e-05,2.55058e-08,-1.16424e-11,52802.7,4.52505], Tmin=(100,'K'), Tmax=(856.11,'K')), NASAPolynomial(coeffs=[0.961625,0.00569045,-3.48044e-06,7.19202e-10,-5.08041e-14,53738.7,21.4663], Tmin=(856.11,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(439.086,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(83.1447,'J/(mol*K)'), comment="""Thermo library: Klippenstein_Glarborg2016 + radical(CdCdJ2_triplet)"""),
)
species(
label = 'N2',
structure = SMILES('N#N'),
E0 = (-8.69489,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (28.0135,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(810.913,'J/mol'), sigma=(3.621,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(1.76,'angstroms^3'), rotrelaxcollnum=4.0, comment="""PrimaryTransportLibrary"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.61263,-0.00100893,2.49898e-06,-1.43376e-09,2.58636e-13,-1051.1,2.6527], Tmin=(100,'K'), Tmax=(1817.04,'K')), NASAPolynomial(coeffs=[2.9759,0.00164141,-7.19722e-07,1.25378e-10,-7.91526e-15,-1025.84,5.53757], Tmin=(1817.04,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-8.69489,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(37.4151,'J/(mol*K)'), label="""N2""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'Ne',
structure = SMILES('[Ne]'),
E0 = (-6.19738,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (20.1797,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1235.53,'J/mol'), sigma=(3.758e-10,'m'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with fixed Lennard Jones Parameters. This is the fallback method! Try improving transport databases!"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-6.19738,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""Ne""", comment="""Thermo library: primaryThermoLibrary"""),
)
transitionState(
label = 'TS1',
E0 = (375.139,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS2',
E0 = (455.669,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS3',
E0 = (455.617,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS4',
E0 = (518.348,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS5',
E0 = (568.009,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS6',
E0 = (452.061,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS7',
E0 = (643.784,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS8',
E0 = (436.4,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS9',
E0 = (508.52,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS10',
E0 = (532.013,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS11',
E0 = (493.079,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS12',
E0 = (491.275,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS13',
E0 = (603.242,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS14',
E0 = (596.62,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS15',
E0 = (611.769,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS16',
E0 = (505.82,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS17',
E0 = (433.715,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS18',
E0 = (438.54,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS19',
E0 = (438.54,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS20',
E0 = (535.075,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS21',
E0 = (624.685,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS22',
E0 = (620.74,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS23',
E0 = (383.424,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS24',
E0 = (433.24,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS25',
E0 = (743.44,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS26',
E0 = (965.926,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
reaction(
label = 'reaction1',
reactants = ['C#CC([CH2])C[C]=O(26509)'],
products = ['CH2CO(28)', 'CH2CHCCH(26391)'],
transitionState = 'TS1',
kinetics = Arrhenius(A=(5e+12,'s^-1'), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Exact match found for rate rule [RJJ]
Euclidian distance = 0
family: 1,4_Linear_birad_scission"""),
)
reaction(
label = 'reaction2',
reactants = ['C#CC([CH2])C[C]=O(26509)'],
products = ['[CH]=C1CC1C[C]=O(27321)'],
transitionState = 'TS2',
kinetics = Arrhenius(A=(1.881e+08,'s^-1'), n=1.062, Ea=(80.5299,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 18 used for R4_S_T;triplebond_intra_H;radadd_intra_cs2H
Exact match found for rate rule [R4_S_T;triplebond_intra_H;radadd_intra_cs2H]
Euclidian distance = 0
family: Intra_R_Add_Exocyclic
Ea raised from 78.7 to 80.5 kJ/mol to match endothermicity of reaction."""),
)
reaction(
label = 'reaction3',
reactants = ['C#CC([CH2])C[C]=O(26509)'],
products = ['[CH]=C1C(=O)CC1[CH2](27322)'],
transitionState = 'TS3',
kinetics = Arrhenius(A=(1.98674e+07,'s^-1'), n=1.31443, Ea=(80.4773,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5_SS;multiplebond_intra;radadd_intra] for rate rule [R5_SS_T;triplebond_intra_H;radadd_intra_CO]
Euclidian distance = 2.44948974278
family: Intra_R_Add_Exocyclic"""),
)
reaction(
label = 'reaction4',
reactants = ['H(3)', 'C#CC(=C)C[C]=O(27323)'],
products = ['C#CC([CH2])C[C]=O(26509)'],
transitionState = 'TS4',
kinetics = Arrhenius(A=(9.17e+07,'cm^3/(mol*s)'), n=1.64, Ea=(15.6063,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 2632 used for Cds-CtCs_Cds-HH;HJ
Exact match found for rate rule [Cds-CtCs_Cds-HH;HJ]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction5',
reactants = ['H(3)', 'C#CC([CH2])C=C=O(27324)'],
products = ['C#CC([CH2])C[C]=O(26509)'],
transitionState = 'TS5',
kinetics = Arrhenius(A=(3.82e-16,'cm^3/(molecule*s)'), n=1.61, Ea=(10.992,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Cds_Ck;HJ] for rate rule [Cds-CsH_Ck;HJ]
Euclidian distance = 1.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction6',
reactants = ['C=[C][O](173)', 'CH2CHCCH(26391)'],
products = ['C#CC([CH2])C[C]=O(26509)'],
transitionState = 'TS6',
kinetics = Arrhenius(A=(0.00294841,'m^3/(mol*s)'), n=2.48333, Ea=(17.6885,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Cds-CtH_Cds-HH;CJ]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction7',
reactants = ['C2H(33)', 'C=CC[C]=O(2390)'],
products = ['C#CC([CH2])C[C]=O(26509)'],
transitionState = 'TS7',
kinetics = Arrhenius(A=(0.00168615,'m^3/(mol*s)'), n=2.52599, Ea=(19.6608,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Cds-CsH_Cds-HH;CJ] for rate rule [Cds-CsH_Cds-HH;CtJ_Ct]
Euclidian distance = 2.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction8',
reactants = ['CH2CO(28)', '[CH]=[C]C=C(4699)'],
products = ['C#CC([CH2])C[C]=O(26509)'],
transitionState = 'TS8',
kinetics = Arrhenius(A=(0.284303,'m^3/(mol*s)'), n=1.93802, Ea=(45.6341,'kJ/mol'), T0=(1,'K'), Tmin=(303.03,'K'), Tmax=(2000,'K'), comment="""Estimated using an average for rate rule [Cds-HH_Ck;CJ]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction9',
reactants = ['C#C[C](C)C[C]=O(27325)'],
products = ['C#CC([CH2])C[C]=O(26509)'],
transitionState = 'TS9',
kinetics = Arrhenius(A=(2.307e+09,'s^-1'), n=1.31, Ea=(203.342,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 163 used for R2H_S;C_rad_out_OneDe/Cs;Cs_H_out_2H
Exact match found for rate rule [R2H_S;C_rad_out_OneDe/Cs;Cs_H_out_2H]
Euclidian distance = 0
Multiplied by reaction path degeneracy 3.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction10',
reactants = ['C#CC([CH2])C[C]=O(26509)'],
products = ['C#CC([CH2])[CH]C=O(27326)'],
transitionState = 'TS10',
kinetics = Arrhenius(A=(791180,'s^-1'), n=2.19286, Ea=(156.873,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R2H_S;Y_rad_out;Cs_H_out_H/NonDeC] for rate rule [R2H_S;CO_rad_out;Cs_H_out_H/NonDeC]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction11',
reactants = ['C#CC([CH2])C[C]=O(26509)'],
products = ['C#CC(C)[CH][C]=O(27327)'],
transitionState = 'TS11',
kinetics = Arrhenius(A=(166690,'s^-1'), n=2.17519, Ea=(117.939,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R3H_SS_Cs;C_rad_out_2H;XH_out]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction12',
reactants = ['C#CC([CH2])C[C]=O(26509)'],
products = ['C#C[C]([CH2])CC=O(27328)'],
transitionState = 'TS12',
kinetics = Arrhenius(A=(285601,'s^-1'), n=2.01653, Ea=(116.136,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R3H_SS_Cs;Y_rad_out;XH_out] for rate rule [R3H_SS_Cs;CO_rad_out;XH_out]
Euclidian distance = 1.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction13',
reactants = ['[C]#CC(C)C[C]=O(27329)'],
products = ['C#CC([CH2])C[C]=O(26509)'],
transitionState = 'TS13',
kinetics = Arrhenius(A=(1.39293e+07,'s^-1'), n=1.32074, Ea=(96.0416,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4H_RSS;Y_rad_out;Cs_H_out_2H] for rate rule [R4H_TSS;Ct_rad_out;Cs_H_out_2H]
Euclidian distance = 1.41421356237
Multiplied by reaction path degeneracy 3.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction14',
reactants = ['[C]#CC([CH2])CC=O(27330)'],
products = ['C#CC([CH2])C[C]=O(26509)'],
transitionState = 'TS14',
kinetics = Arrhenius(A=(380071,'s^-1'), n=1.62386, Ea=(44.2978,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5H_RSSR;Y_rad_out;XH_out] for rate rule [R5H_TSSS;Ct_rad_out;CO_H_out]
Euclidian distance = 2.44948974278
family: intra_H_migration"""),
)
reaction(
label = 'reaction15',
reactants = ['C=[C][O](173)', '[CH]=[C]C=C(4699)'],
products = ['C#CC([CH2])C[C]=O(26509)'],
transitionState = 'TS15',
kinetics = Arrhenius(A=(7.46075e+06,'m^3/(mol*s)'), n=0.027223, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Y_rad;Y_rad]
Euclidian distance = 0
family: R_Recombination
Ea raised from -14.4 to 0 kJ/mol."""),
)
reaction(
label = 'reaction16',
reactants = ['C#CC([CH2])C[C]=O(26509)'],
products = ['O=[C]CC1[C]=CC1(27331)'],
transitionState = 'TS16',
kinetics = Arrhenius(A=(3.27074e+08,'s^-1'), n=0.924088, Ea=(130.68,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4_S;multiplebond_intra;radadd_intra_cs2H] for rate rule [R4_S_T;triplebond_intra_H;radadd_intra_cs2H]
Euclidian distance = 2.2360679775
family: Intra_R_Add_Endocyclic"""),
)
reaction(
label = 'reaction17',
reactants = ['C#CC([CH2])C[C]=O(26509)'],
products = ['[CH2]C1[C]=CC(=O)C1(27332)'],
transitionState = 'TS17',
kinetics = Arrhenius(A=(3.47e+11,'s^-1'), n=0.15, Ea=(58.576,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5_SS_T;triplebond_intra_H;radadd_intra] for rate rule [R5_SS_T;triplebond_intra_H;radadd_intra_CO]
Euclidian distance = 1.0
family: Intra_R_Add_Endocyclic"""),
)
reaction(
label = 'reaction18',
reactants = ['C#CC([CH2])C[C]=O(26509)'],
products = ['C#CC(=C)CC=O(27333)'],
transitionState = 'TS18',
kinetics = Arrhenius(A=(7.437e+08,'s^-1'), n=1.045, Ea=(63.4002,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R3radExo;Y_rad;XH_Rrad]
Euclidian distance = 0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction19',
reactants = ['C#CC([CH2])C[C]=O(26509)'],
products = ['C#CC(C)C=C=O(27334)'],
transitionState = 'TS19',
kinetics = Arrhenius(A=(1.4874e+09,'s^-1'), n=1.045, Ea=(63.4002,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R3radExo;Y_rad;XH_Rrad]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction20',
reactants = ['C#CC([CH2])C[C]=O(26509)'],
products = ['C#C[CH]CC[C]=O(26508)'],
transitionState = 'TS20',
kinetics = Arrhenius(A=(6.55606e+10,'s^-1'), n=0.64, Ea=(159.935,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [cCs(-HC)CJ;CsJ;C] for rate rule [cCs(-HC)CJ;CsJ-HH;C]
Euclidian distance = 1.0
family: 1,2_shiftC"""),
)
reaction(
label = 'reaction21',
reactants = ['C#CC[CH]C[C]=O(27335)'],
products = ['C#CC([CH2])C[C]=O(26509)'],
transitionState = 'TS21',
kinetics = Arrhenius(A=(3.53e+06,'s^-1'), n=1.73, Ea=(245.601,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [cCs(-HH)CJ;CsJ;C] for rate rule [cCs(-HH)CJ;CsJ-CsH;C]
Euclidian distance = 1.0
family: 1,2_shiftC"""),
)
reaction(
label = 'reaction22',
reactants = ['C#CC([CH2])C[C]=O(26509)'],
products = ['C#CC([CH2])C(=C)[O](26501)'],
transitionState = 'TS22',
kinetics = Arrhenius(A=(3.53e+06,'s^-1'), n=1.73, Ea=(245.601,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [cCs(-HH)CJ;CJ;C]
Euclidian distance = 0
family: 1,2_shiftC"""),
)
reaction(
label = 'reaction23',
reactants = ['C#CC([CH2])C[C]=O(26509)'],
products = ['C#CC1CC(=O)C1(26513)'],
transitionState = 'TS23',
kinetics = Arrhenius(A=(1.62e+12,'s^-1'), n=-0.305, Ea=(8.28432,'kJ/mol'), T0=(1,'K'), Tmin=(600,'K'), Tmax=(2000,'K'), comment="""Estimated using an average for rate rule [R4_SSS;C_rad_out_2H;Ypri_rad_out]
Euclidian distance = 0
family: Birad_recombination"""),
)
reaction(
label = 'reaction24',
reactants = ['CO(12)', 'C#CC([CH2])[CH2](26629)'],
products = ['C#CC([CH2])C[C]=O(26509)'],
transitionState = 'TS24',
kinetics = Arrhenius(A=(2461.18,'m^3/(mol*s)'), n=1.0523, Ea=(25.6182,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [COm;C_rad/H2/Cs]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: R_Addition_COm"""),
)
reaction(
label = 'reaction25',
reactants = ['CH2(19)', 'C#C[CH]C[C]=O(27336)'],
products = ['C#CC([CH2])C[C]=O(26509)'],
transitionState = 'TS25',
kinetics = Arrhenius(A=(1.06732e+06,'m^3/(mol*s)'), n=0.472793, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Y_rad;Birad] for rate rule [C_rad/H/OneDeC;Birad]
Euclidian distance = 4.0
family: Birad_R_Recombination
Ea raised from -3.5 to 0 kJ/mol."""),
)
reaction(
label = 'reaction26',
reactants = ['[C]=O(361)', 'C#CC([CH2])[CH2](26629)'],
products = ['C#CC([CH2])C[C]=O(26509)'],
transitionState = 'TS26',
kinetics = Arrhenius(A=(2.13464e+06,'m^3/(mol*s)'), n=0.472793, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Y_rad;Birad] for rate rule [C_rad/H2/Cs;Birad]
Euclidian distance = 3.0
Multiplied by reaction path degeneracy 2.0
family: Birad_R_Recombination
Ea raised from -3.5 to 0 kJ/mol."""),
)
network(
label = '4477',
isomers = [
'C#CC([CH2])C[C]=O(26509)',
],
reactants = [
('CH2CO(28)', 'CH2CHCCH(26391)'),
],
bathGas = {
'N2': 0.5,
'Ne': 0.5,
},
)
pressureDependence(
label = '4477',
Tmin = (300,'K'),
Tmax = (2000,'K'),
Tcount = 8,
Tlist = ([302.47,323.145,369.86,455.987,609.649,885.262,1353.64,1896.74],'K'),
Pmin = (0.01,'bar'),
Pmax = (100,'bar'),
Pcount = 5,
Plist = ([0.0125282,0.0667467,1,14.982,79.8202],'bar'),
maximumGrainSize = (0.5,'kcal/mol'),
minimumGrainCount = 250,
method = 'modified strong collision',
interpolationModel = ('Chebyshev', 6, 4),
activeKRotor = True,
activeJRotor = True,
rmgmode = True,
)
|
from math import *
import numpy as np
class Surface(object):
def __init__(self, num_groups):
self.current = np.zeros(num_groups)
self.DDif = np.zeros(num_groups)
self.DTilde = np.zeros(num_groups)
self.DHat = np.zeros(num_groups)
self.boundary = 'reflective'
|
"""
Function to teardown the openshift-logging
"""
import logging
from ocs_ci.ocs import constants, ocp
from ocs_ci.ocs.resources.pvc import get_all_pvc_objs, delete_pvcs
from ocs_ci.ocs.resources.pod import get_all_pods
from ocs_ci.ocs.exceptions import UnexpectedBehaviour, CommandFailed
from ocs_ci.utility.retry import retry
from ocs_ci.helpers.helpers import (
fetch_used_size,
default_ceph_block_pool,
verify_volume_deleted_in_backend,
)
logger = logging.getLogger(__name__)
@retry(UnexpectedBehaviour, 5, 30, 2)
def check_pod_vanished(pod_names):
"""
A function to check all the pods are vanished from the namespace
"""
pod_list_current = get_all_pods(namespace=constants.OPENSHIFT_LOGGING_NAMESPACE)
pod_names_current = [pod.name for pod in pod_list_current]
for pod in pod_names:
if pod in pod_names_current:
raise UnexpectedBehaviour
def delete_logging_namespaces(force=False):
"""
Deleting namespaces
1. Openshift-operators-redhat
2. Openshift-logging
"""
openshift_logging_namespace = ocp.OCP(
kind=constants.NAMESPACES, resource_name=constants.OPENSHIFT_LOGGING_NAMESPACE
)
openshift_operators_redhat_namespace = ocp.OCP(
kind=constants.NAMESPACES,
resource_name=constants.OPENSHIFT_OPERATORS_REDHAT_NAMESPACE,
)
try:
openshift_operators_redhat_namespace.delete(
resource_name=constants.OPENSHIFT_OPERATORS_REDHAT_NAMESPACE,
force=force,
wait=True,
)
logger.info("The project openshift-operators-redhat got deleted successfully")
except CommandFailed as e:
logger.info("Namespace not found" f"Error message {e}")
try:
openshift_logging_namespace.delete(
resource_name=constants.OPENSHIFT_LOGGING_NAMESPACE,
force=force,
wait=True,
)
logger.info("The namespace openshift-logging got deleted successfully")
except CommandFailed as e:
logger.info("Namespace not found" f"Error message {e}")
def uninstall_cluster_logging():
"""
Function to uninstall cluster-logging from the cluster
Deletes the project "openshift-logging" and "openshift-operators-redhat"
"""
# Validating the pods before deleting the instance
pod_list = get_all_pods(namespace=constants.OPENSHIFT_LOGGING_NAMESPACE)
for pod in pod_list:
logger.info(f"Pods running in the openshift-logging namespace {pod.name}")
# Excluding cluster-logging-operator from pod_list and getting pod names
pod_names_list = [
pod.name
for pod in pod_list
if not pod.name.startswith("cluster-logging-operator")
]
pvc_objs = get_all_pvc_objs(namespace=constants.OPENSHIFT_LOGGING_NAMESPACE)
# Fetch image uuid associated with PVCs to be deleted
pvc_uuid_map = {}
for pvc_obj in pvc_objs:
pvc_uuid_map[pvc_obj.name] = pvc_obj.image_uuid
# Checking for used space
cbp_name = default_ceph_block_pool()
used_space_before_deletion = fetch_used_size(cbp_name)
logger.info(
f"Used space before deletion of cluster logging {used_space_before_deletion}"
)
# Deleting the clusterlogging instance
clusterlogging_obj = ocp.OCP(
kind=constants.CLUSTER_LOGGING, namespace=constants.OPENSHIFT_LOGGING_NAMESPACE
)
try:
clusterlogging_obj.delete(resource_name="instance", wait=True)
logger.info("Instance got deleted successfully")
check_pod_vanished(pod_names_list)
except CommandFailed as error:
delete_logging_namespaces(force=True)
raise error
for pvc_obj in pvc_objs:
pv_obj = pvc_obj.backed_pv_obj
assert delete_pvcs(pvc_objs=pvc_objs), "PVCs deletion failed"
for pvc_obj in pvc_objs:
pvc_obj.ocp.wait_for_delete(resource_name=pvc_obj.name, timeout=300)
pv_obj.ocp.wait_for_delete(resource_name=pv_obj.name, timeout=300)
logger.info("Verified: PVCs are deleted.")
logger.info("Verified: PV are deleted")
for pvc_name, uuid in pvc_uuid_map.items():
rbd = verify_volume_deleted_in_backend(
interface=constants.CEPHBLOCKPOOL, image_uuid=uuid, pool_name=cbp_name
)
assert rbd, f"Volume associated with PVC {pvc_name} still exists " f"in backend"
# Checking for used space after PVC deletion
used_space_after_deletion = fetch_used_size(cbp_name)
logger.info(
f"Used space after deletion of cluster logging {used_space_after_deletion}"
)
if used_space_after_deletion < used_space_before_deletion:
logger.info("Expected !!! Space has reclaimed")
else:
logger.warning("Unexpected !! No space reclaimed after deletion of PVC")
# Deleting the RBAC permission set
rbac_role = ocp.OCP(
kind=constants.ROLE, namespace=constants.OPENSHIFT_OPERATORS_REDHAT_NAMESPACE
)
rbac_role.delete(yaml_file=constants.EO_RBAC_YAML)
delete_logging_namespaces()
|
"""
Challenge #2
Write a function that takes an integer 'minutes' and converts it to seconds.
Examples:
- convert(5) -> 300
- convert(3) -> 180
- convert(2) -> 120
"""
def convert(minutes):
return minutes * 60
print(convert(5)) #300
print(convert(3)) #180
|
#web scrape into csv from yahoo for Weekly Projection
import os, ssl
if (not os.environ.get('PYTHONHTTPSVERIFY', '') and getattr(ssl, '_create_unverified_context', None)):
ssl._create_default_https_context = ssl._create_unverified_context
import requests,csv
import pandas as pd
from bs4 import BeautifulSoup
url_base = 'https://football.fantasysports.yahoo.com/f1/1185/players?status=ALL&pos=O&cut_type=9&stat1=S_PW_10&myteam=0&sort=AR&sdir=1&count={}'
counts = [0,25]
player_data = []
names = []
position = []
byes = []
projected = []
for count in counts:
url = url_base.format(str(count))
#print(url)
df = pd.read_html(url,header=1)
playerTable = df[0]
print(playerTable.Forecast == "Forecast")
|
'''
Copyright (C) 2017-2023 Bryant Moscon - bmoscon@gmail.com
Please see the LICENSE file for the terms and conditions
associated with this software.
'''
import asyncio
from cryptofeed.defines import ASK, BID
from datetime import datetime as dt, timedelta
from decimal import Decimal
from cryptofeed.exchanges import Deribit
d = Deribit()
def teardown_module(module):
try:
loop = asyncio.get_running_loop()
except RuntimeError:
loop = asyncio.new_event_loop()
loop.run_until_complete(d.shutdown())
class TestDeribitRest:
def test_trade(self):
ret = []
for data in d.trades_sync('BTC-USD-PERP'):
ret.extend(data)
assert len(ret) > 1
def test_trades(self):
ret = []
start = dt.utcnow() - timedelta(days=5)
end = dt.utcnow() - timedelta(days=4, hours=18)
for data in d.trades_sync('BTC-USD-PERP', start=start, end=end):
ret.extend(data)
assert len(ret) > 0
assert ret[0]['symbol'] == 'BTC-USD-PERP'
assert isinstance(ret[0]['price'], Decimal)
assert isinstance(ret[0]['amount'], Decimal)
def test_l2_book(self):
ret = d.l2_book_sync('BTC-USD-PERP')
assert len(ret.book[BID]) > 0
assert len(ret.book[ASK]) > 0
|
import numpy as np
from utils.data_utils_kitti import wrap_angle
class OdometryBaseline():
def __init__(self, *args, **kwargs):
pass
def fit(self, *args, **kwargs):
pass
def predict(self, sess, batch, **kwargs):
seq_len = batch['s'].shape[1]
prediction = np.zeros_like(batch['s'])
state = batch['s'][:, 0, :]
# print('shape:', batch['s'].shape)
prediction[:, 0, :] = state
for i in range(1, seq_len):
action = batch['a'][:, i, :]
theta = state[:, 2:3]
sin_theta = np.sin(theta)
cos_theta = np.cos(theta)
new_x = state[:, 0:1] + (action[:, 0:1] * cos_theta + action[:, 1:2] * sin_theta)
new_y = state[:, 1:2] + (action[:, 0:1] * sin_theta - action[:, 1:2] * cos_theta)
new_theta = wrap_angle(state[:, 2:3] + action[:, 2:3])
# copy old and set new particles
state = np.concatenate([new_x, new_y, new_theta], axis=-1)
prediction[:, i, :] = state
return prediction
def predict_kitti(self, sess, batch, **kwargs):
seq_len = batch['s'].shape[1]
prediction = np.zeros_like(batch['s'])
state = batch['s'][:, 0, :]
# print('shape:', batch['s'].shape)
prediction[:, 0, :] = state
for i in range(1, seq_len):
time = 0.103
action = batch['a'][:, i, :]
heading = state[:, 2:3]
wrap_angle(heading)
sin_heading = np.sin(heading)
cos_heading = np.cos(heading)
# ang_acc = (noisy_actions[:, :, 1:2] * noisy_actions[:, :, 2:3])/(noisy_actions[:, :, 0:1] ** 2)
acc_north = action[:, 0:1] * sin_heading + action[:, 1:2] * cos_heading
acc_east = - action[:, 1:2] * sin_heading + action[:, 0:1] * cos_heading
new_north = state[:, 0:1] + state[:, 3:4] * time
new_east = state[:, 1:2] + state[:, 4:5] * time
new_theta = state[:, 2:3] + state[:, 5:6] * time
wrap_angle(new_theta)
new_vn = state[:, 3:4] + acc_north * time
new_ve = state[:, 4:5] + acc_east * time
new_theta_dot = state[:, 5:6] + action[:, 2:3] * time
state = np.concatenate([new_north, new_east, new_theta, new_vn, new_ve, new_theta_dot], axis=-1)
prediction[:, i, :] = state
return prediction
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Sep 5 14:50:16 2021
@author: charlescollins
"""
import csv
import re
with open('street_suffix_abbreviations.csv', mode='r') as abbr_file:
reader = csv.reader(abbr_file)
all_street_suffixes = {rows[0]:rows[1] for rows in reader}
with open('secondary_address_abbreviations.csv', mode='r') as abbr_file:
reader = csv.reader(abbr_file)
secondary_address_abbreviations = {rows[0]:rows[1] for rows in reader}
with open('directional_abbreviations.csv', mode='r') as abbr_file:
reader = csv.reader(abbr_file)
directional_abbreviations = {rows[0]:rows[1] for rows in reader}
def standardize_directions(address_line):
"""Replaces common directional indicators with their abbreviation. Example: NORTH with N
args:
address_line (str): The address line (example: '1234 main st' )
"""
for direction_name, direction_abbreviation in directional_abbreviations.items():
address_line = re.sub(r'\b' + direction_name + r'\b', direction_abbreviation, address_line)
return address_line
def standardize_street_suffixes(address_line1):
"""Replaces common street suffixes with the standard US postal abbreviation. Suffix must appear
at the end of the stinrg
args:
address_line1 (str): The address line1 (example: '1234 main st' )
"""
for suffix_name, suffix_abbreviation in all_street_suffixes.items():
address_line1 = re.sub(r'\b' + suffix_name + r'$', suffix_abbreviation, address_line1)
return address_line1
def standardize_secondary_indicators(address_line2):
"""Replaces common street suffixes with the standard US postal abbreviation. Suffix must appear
at the end of the stinrg
args:
address_line2 (str): The address line1 (example: 'APARTMENT 2' )
"""
for secondary_address_indicator, secondary_address_abbreviation in secondary_address_abbreviations.items():
address_line2 = re.sub(r'\b' + secondary_address_indicator + r'\b', secondary_address_abbreviation, address_line2)
return address_line2
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.