text stringlengths 38 1.54M |
|---|
import pandas as pd
import numpy as np
import math
class ErlangC:
def __init__(
self,
lmbd: float, # mean arrival rate of customers into the system
s_bar: float, # Expected customer service time
n_agents: int, # number of agents
):
self.lmbd = lmbd
self.s_bar = s_bar
self.n_agents = n_agents
self.rho = self.lmbd * self.s_bar # traffic intensity or offered load, mean number of busy servers
self.a = self.rho / self.n_agents # server utilization
@property
def p_0(self):
part_1 = np.sum([self.rho ** k / math.factorial(k) for k in range(0, self.n_agents)])
part_2 = self.rho ** self.n_agents / math.factorial(self.n_agents) / (1 - self.a)
return np.reciprocal(part_1 + part_2)
@property
def p_waiting(self):
numerator = self.rho ** self.n_agents / math.factorial(self.n_agents - 1) / (self.n_agents - self.rho)
denominator = np.sum(
[
self.rho ** k / math.factorial(k) for k in range(0, self.n_agents)
]
) + numerator
return numerator / denominator
if __name__ == '__main__':
tst = ErlangC(lmbd=6, )
|
import pytest
import sys, os
try :
import Queue
except:
import queue as Queue
# Adds App, Config directory to sys path for testing
path = os.path.abspath(__file__)
app = path[0:path.find("test")] + "app"
config = path[0:path.find("test")] + "config"
sys.path.append(app)
sys.path.append(config)
from SubscriptionHandler import SubscriptionHandler
subscriptionHandler = None
request = None
def setup_module(module):
global subscriptionHandler
global request
subscriptionHandler = SubscriptionHandler(Queue.Queue(), Queue.Queue())
request = {
'messageInfo' : {
'action': "WriteToFile",
'responseType' : "text",
'slackUserId' : "someId",
'channel' : "someChannel",
'response' : "Some Response"
},
'scheduleJob' : {
'action' : None,
'type' : None,
'serviceName' : "serviceName",
'serviceTag' : None,
'frequency' : None,
'interval' : None,
'time' : None,
'day' : None
}
}
subscriptionHandler.addUserToSubscription("1UFJD2D_Intro Service")
subscriptionHandler.addUserToSubscription("1UFJD2D_Another Service")
subscriptionHandler.addUserToSubscription("1UFJD2D_Dumb Service")
subscriptionHandler.addUserToSubscription("2UFJD2D_Intro Service")
subscriptionHandler.addUserToSubscription("2UFJD2D_Crazy Service")
subscriptionHandler.addUserToSubscription("3UFJD2D_Dumb Service")
def test_unscheduleJob() :
subscriptionHandler.unscheduleJob(request)
#assert(schedule.jobs == 0)
# TODO: Decouple ServiceValidatore from Config file
def test_setUpServiceFunction() :
pass
def test_produceTag():
expectedTag = request['messageInfo']['slackUserId'] + "_" + request['scheduleJob']['serviceName']
tag = subscriptionHandler.produceTag(request)
assert(tag == expectedTag)
def test_isIntraDay_happyPath() :
request['scheduleJob']['type'] = 'intra-day'
expectedResult = subscriptionHandler.isIntraDay(request)
assert(expectedResult == True)
def test_isIntraDay_sadPath() :
request['scheduleJob']['type'] = 'intra-hour'
expectedResult = subscriptionHandler.isIntraDay(request)
assert(expectedResult == False)
def test_isIntraMonth_happyPath() :
request['scheduleJob']['type'] = 'intra-month'
expectedResult = subscriptionHandler.isIntraMonth(request)
assert(expectedResult == True)
def test_isIntraMonth_sadPath() :
request['scheduleJob']['type'] = 'intra-second'
expectedResult = subscriptionHandler.isIntraMonth(request)
assert(expectedResult == False)
def test_isIntraYear_happyPath() :
request['scheduleJob']['type'] = 'intra-year'
expectedResult = subscriptionHandler.isIntraYear(request)
assert(expectedResult == True)
def test_isIntraYear_sadPath() :
request['scheduleJob']['type'] = 'intra-ms'
expectedResult = subscriptionHandler.isIntraYear(request)
assert(expectedResult == False)
def test_fileJob() :
messageInfo = request['messageInfo']
subscriptionHandler.fileJob(messageInfo)
assert(subscriptionHandler.serviceResponseQueue.qsize() == 1)
def test_subscriptionExists_Happy() :
expected = True
result = subscriptionHandler.subscriptionExists('1UFJD2D_Intro Service')
assert(expected == result)
def test_subscriptionExists_Sad() :
expected = False
result = subscriptionHandler.subscriptionExists('13FSUJD_Intro Service')
assert(expected == result)
def test_addUserToSubscription_Happy() :
userId_1 = '1UFJD2D'
userId_2 = '2UFJD2D'
userId_2 = '3UFJD2D'
expectedUserCount = 3
expectedServiceCount = 3
resultUserCount = len(subscriptionHandler.usersSubscriptions)
resultServiceCount = len(subscriptionHandler.usersSubscriptions[userId_1])
assert(resultUserCount == expectedUserCount)
assert(userId_1 in subscriptionHandler.usersSubscriptions)
assert(userId_2 in subscriptionHandler.usersSubscriptions)
assert(resultServiceCount == expectedServiceCount)
def test_addUserToSubscription_Sad() :
pass
def test_removeUserFromSubscription_Happy() :
tag1_0 = '1UFJD2D_Intro Service'
tag1_0 = '1UFJD2D_Dumb Service'
tag2_0 = '2UFJD2D_Dumb Service'
userId_1 = '1UFJD2D'
subscriptionHandler.removeUserFromSubscription(tag1_0)
subscriptionHandler.removeUserFromSubscription(tag2_0)
expectedUserCount = 3
expectedServiceCount = 2
resultUserCount = len(subscriptionHandler.usersSubscriptions)
resultServiceCount = len(subscriptionHandler.usersSubscriptions[userId_1])
assert(resultUserCount == expectedUserCount)
assert(resultServiceCount == expectedServiceCount)
assert(userId_1 in subscriptionHandler.usersSubscriptions)
def test_removeUserFromSubscription_Sad() :
pass
def test_getUserIdsForServiceName() :
userId = '1UFJD2D'
expected = ['Intro Service', 'Another Service']
result = subscriptionHandler.getServicesListForUsersId(userId)
assert(expected == result)
def getServicesListForUsersId() :
pass
# def test_unsceduleJob() :
# expectedResult = subscriptionHandler.unsceduleJob()
# assert(expectedResult == ) |
import glob
import xarray as xr
import numpy as np
import pandas as pd
from scipy import stats
RUTA = '/home/users/vg140344/datos/data/fogt/'
#lista = xr.open_mfdataset(RUTA + "correlations/seasonal*_SPoV_enso*.nc4", chunks=None)
#file = RUTA + "seasonal_correlations_SPoV_enso_polar.nc4"
#correlations = xr.open_dataset(file)
#seasons = np.unique(lista.seas.values)
#df = pd.DataFrame()
#for i in seasons:
# for ii in lista.data_vars:
# aux = np.percentile(lista[ii].values[lista[ii].seas == i], [5, 95])
# pvalue = stats.percentileofscore(lista[ii].values[lista.seas == i],
# correlations[ii].sel(**{'seas':i}).values)
# df = df.append(pd.DataFrame({'Field': ii, 'Percentile':[pvalue],
# '5th Percentile': [aux[0]], '95th Percentile': [aux[1]]},
# index=[i], columns=['Field', 'Percentile', '5th Percentile', '95th Percentile']))
#df = df.loc[['ASO', 'SON', 'OND', 'NDJ', 'DJF']]
#
#df.to_csv(RUTA + "percentile_seasonal_correlations_composites_SPoV_enso.csv")
#
lista = xr.open_mfdataset(RUTA + "correlations/monthly*_SPoV_enso*.nc4", chunks=70000)
file = RUTA + "monthly_correlations_SPoV_enso_polar.nc4"
correlations = xr.open_dataset(file)
months = np.unique(lista.month.values)
df = pd.DataFrame()
for i in months:
for ii in lista.data_vars:
aux = np.percentile(lista[ii].values[lista[ii].month == i], [5, 95])
pvalue = stats.percentileofscore(lista[ii].values[lista.month == i],
correlations[ii].sel(**{'month':i}).values)
df = df.append(pd.DataFrame({'Field': ii, 'Percentile':[pvalue],
'5th Percentile': [aux[0]], '95th Percentile': [aux[1]]},
index=[i], columns=['Field', 'Percentile', '5th Percentile', '95th Percentile']))
df = df.loc[['Aug', 'Sep', 'Oct', 'Nov', 'Ded', 'Jan', 'Feb']]
df.to_csv(RUTA + "percentile_monthly_correlations_composites_SPoV_enso.csv")
|
class ListNode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
# def partition(head, x):
# dummy = t = ListNode(next=head)
# small = st = ListNode()
# large = lt = ListNode()
# while t.next:
# if t.next.val < x:
# st.next = t.next
# st = st.next
# else:
# lt.next = t.next
# lt = lt.next
# t = t.next
# if small.next:
# dummy.next = small.next
# st.next = large.next
# else:
# dummy.next = large.next
# return dummy.next
def partition(head, x):
dummy = t = ListNode(next=head)
large = lh = ListNode()
while t.next:
if t.next.val < x:
t = t.next
else:
lh.next = t.next
lh = lh.next
t.next = t.next.next
t.next = large.next
lh.next = None
return dummy.next
head = ListNode(1,ListNode(4,ListNode(3,ListNode(2,ListNode(5,ListNode(2))))))
x = 3
ans = partition(head,x)
print(ans.next.next.val) |
import random
import string
import pygame
PANEL_width = 1000
PANEL_highly = 800
FONT_PX = 15
pygame.init()
winSur = pygame.display.set_mode((PANEL_width,PANEL_highly))
font = pygame.font.SysFont("msyh.tss",25)
bg_suface = pygame.Surface((PANEL_width,PANEL_highly),flags = pygame.SRCALPHA)
pygame.Surface.convert(bg_suface)
bg_suface.fill(pygame.Color(0,0,0,28))
winSur.fill((0,0,0))
#texts = [font.render(str(i),True,(0,255,0)) for i in range(10)]
letter = string.ascii_lowercase
texts = [font.render(str(letter[i]),True,(0,225,0)) for i in range(26)]
column = int(PANEL_width / FONT_PX)
drops = [0 for i in range(column)]
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
exit()
elif event.type == pygame.KEYDOWN:
chang = pygame.key.get_pressed()
if(chang[32]):
exit()
pygame.time.delay(30)
winSur.blit(bg_suface,(0,0))
for i in range(len(drops)):
text = random.choice(texts)
winSur.blit(text,(i*FONT_PX,drops[i]*FONT_PX))
drops[i] += 1
if drops[i]*10 > PANEL_highly or random.random() > 0.95:
drops[i] = 0
pygame.display.flip()
|
import sqlite3
def dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
global conn
def connect():
global conn
conn = sqlite3.connect('./zf.db', check_same_thread=False)
conn.row_factory = dict_factory
def query(sql, *args):
if conn is None:
connect()
cursor = conn.cursor()
values = None
try:
cursor.execute(sql, args)
values = cursor.fetchall()
cursor.close()
conn.commit()
except Exception as ex:
print(ex.__str__())
cursor.close()
return values
def query_one(sql, *args):
if conn is None:
connect()
cursor = conn.cursor()
value = None
try:
cursor.execute(sql, args)
value = cursor.fetchone()
cursor.close()
conn.commit()
except Exception as ex:
print(ex.__str__())
cursor.close()
return value
def query_count(sql, *args):
if conn is None:
connect()
cursor = conn.cursor()
value = None
try:
cursor.execute(sql, args)
value = cursor.fetchone()
cursor.close()
conn.commit()
keys = value.keys()
return value[list(keys)[0]]
except Exception as ex:
print(ex.__str__())
cursor.close()
return value
def execute(sql, *args):
if conn is None:
connect()
cursor = conn.cursor()
try:
cursor.execute(sql, args)
rowcount = cursor.rowcount
cursor.close()
conn.commit()
return rowcount
except Exception as ex:
print(ex.__str__())
cursor.close()
return None
def close():
if conn is not None:
conn.close()
|
from datetime import datetime
from flask_bcrypt import Bcrypt
from flask_sqlalchemy import SQLAlchemy
bcrypt = Bcrypt()
db = SQLAlchemy()
class User(db.Model):
"""User Model."""
__tablename__ = 'users'
id = db.Column(
db.Integer, primary_key=True)
username = db.Column(
db.Text, nullable=False, unique=True)
password = db.Column(
db.Text, nullable=False)
location = db.Column(
db.String, nullable=False, default='US')
articles = db.relationship(
'Article', backref="user", cascade="all, delete-orphan")
def __repr__(self):
return f"<User #{self.id}: {self.username}>"
def has_saved_article(self, article):
"""Check if the current user has passed article saved."""
found_saved_articles = [i for i in self.articles if article == i.path]
return len(found_saved_articles) == 1
@classmethod
def signup(cls, username, password, location):
"""Sign up user.
Hashes password and adds user to system.
"""
hashed_pwd = bcrypt.generate_password_hash(password).decode('UTF-8')
user = User(
username=username, password=hashed_pwd, location=location)
db.session.add(user)
return user
@classmethod
def authenticate(cls, username, password):
"""Authenticate user with `username` and `password`.
Search for a user whose password hash matches passed password
and returns that user object if found. Otherwise return False.
"""
user = cls.query.filter_by(username=username).first()
if user:
is_auth = bcrypt.check_password_hash(user.password, password)
if is_auth:
return user
return False
class Article(db.Model):
"""Article Model."""
__tablename__ = 'articles'
id = db.Column(
db.Integer, primary_key=True)
path = db.Column(
db.String, nullable=False)
url = db.Column(
db.String, nullable=False)
path = db.Column(
db.String, nullable=False)
location = db.Column(
db.String, nullable=False)
title = db.Column(
db.String, nullable=False)
excerpt = db.Column(
db.String, nullable=False)
image = db.Column(db.String)
published_date = db.Column(
db.DateTime, nullable=False, default=datetime.utcnow())
source = db.Column(
db.String, nullable=False)
saved_by = db.Column(
db.Integer, db.ForeignKey('users.id', ondelete='CASCADE'), nullable=False)
def serialize(self):
"""Serialize article to a dict."""
return {
'id': self.id,
'path': self.path,
'url': self.url,
'location': self.location,
'title': self.title,
'excerpt': self.excerpt,
'image': self.image,
'source': self.source,
'published_date': self.published_date,
'saved_by': self.saved_by,
}
@classmethod
def save_article(cls, path, url, location, title, excerpt, image, source, published_date, saved_by):
"""Add a saved article to the database."""
article = Article(
path=path, url=url, location=location, title=title, excerpt=excerpt,
image=image, source=source, published_date=published_date, saved_by=saved_by)
db.session.add(article)
return article
def connect_db(app):
"""Connect this database to provided Flask app."""
db.app = app
db.init_app(app)
|
from datetime import datetime
from os import path
def writing_to_file(n):
with open(n, 'w') as file:
file.write('Done!')
# задание № 1
# создать функцию которая распечатает n раз слово privet - n передается как параметр функции
def greetings(n):
word = 'privet'
if n == 0:
return
for i in range(n):
print(word)
# задание № 1 (второй варинат)
def greetings2(n):
word = 'privet'
if n == 0:
return
while n > 0:
print(word)
n -= 1
# задание № 1 (третий вариант)
def rec_greetings(n):
if n == 0:
return
if n > 0:
n -= 1
print('privet')
rec_greetings(n)
# задание № 2
# создать функцию которая рисует
# +
# +++
# +++++
# +++
# +
# размер ромба задается через параметр функции
def draws(n):
a = '+'
middle = (n - 1) / 2 + 1
count = 1
for i in range(n + 1):
if i < middle - 1:
picture = a * count
print(picture.center(n * 2))
count += 2
if i == middle:
picture = a * n
print(picture.center(n * 2))
if i > middle:
count -= 2
picture = a * count
print(picture.center(n * 2))
# задание № 3
# сделать функцию в которую передается строка “skdhlzx;hfkhsnkeluewrtyeruopvcwqsnewfawhrevnetrvnoesrvpser” и
# шаблон слова, внутри функции опреелять возможно ли собрать из букв,что в ней находятся переанное слово для сборки,
# результат лио True либо False?
def word_search(set_of_letters, word):
for letter in word:
if letter in set_of_letters:
word = word.replace(letter, '')
if len(word) == 0:
return True
else:
return False
# задание № 4
# есть функция на вход подается строка ‘fhsefisdvdsufbsdkbvjhsdfbvshdbv’ фнутри функции из строки достается каждый 3
# символ и делается ключем словаря и созданный словарь возвращается из функции - распечатать полученный словарь
def create_dict(n):
my_dict = {}
count = 0
for letter in n:
count += 1
if count == 3:
my_dict[letter] = my_dict.setdefault(letter)
count = 0
return my_dict
# задание № 4 (второй вариант)
def create_dict2(n):
dicts = dict.fromkeys(n[2::3]) # подумал, что это будет лучше - не прогоняем цикл и ничего не добавляем в список
return dicts # а сразу работаем с поданной строкой
# задание № 5
# функция которая отдает текущее время
def times():
n = datetime.now()
return n
# задание № 6
# функция которая отдает текущий день недели
def day_of_week():
n = datetime.today().strftime('%A')
return n
# задание № 7
# функция которая если сегодня среда и пятница отдает сообщение “go to sport”
def remind():
n = datetime.today().strftime('%A')
if n == 'Wednesday' or n == 'Friday':
return ('Go to sport')
else:
return ('Without sports today')
# задание 8
# функция которая на вход параметром получает некий тип (не важно инт стринг лист), а функция печатает магические
# методы этого тиго типа
def define_methods(n):
return dir(n)
# задание 9
# функция которая на вход получает некое число и плюсует к нему 13 и возвращает результат в виде строки
def addition(n):
n += 13
return str(n)
# задание 10
# функция которая на вход получает некую строку и обрезает ее до 7 букв или увеличивает до 7 букв из тех что есть и
# возвращает результат
def trim_add(n):
if len(n) > 7:
return n[:7]
if len(n) < 7:
n *= 7
return n[:7]
# задание 11
# функция которая на вход получает некий список(из строковых элементов или числовых или то и то) и склеивает все
# элементы в строку и возвращает результат
def create_str(n):
for i in range(len(n)):
if type(n[i]) == int:
n[i] = str(n[i])
return ''.join(n)
# задание 12
# функция куда на вход подается либо строка, либо число, либо список(состоящий из строковых
# элементов или чисел или того и того) - то бишь на вход три параметра, задача выяснить какой из параметров передан
# не пустым, и используя функции из этого блока из заданий 9,10,11 получить строку из переданного элемента,
# а затем создать файл со строкой “Done!” и сохранить в файл имя которого взято из результата работы функции
# примененной к этому типу данных полученных на вход в функцию. к примеру: на вход подаем строку “wert” на выходе
# функция создает файл с названием wertwer и в нем слово Done записано. на вход подаем 67 на выходе функция создает
# файл с названием 80 и текстом Done
def create_file(n):
if type(n) == int:
writing_to_file(addition(n))
if type(n) == str:
writing_to_file(trim_add(n))
if type(n) == list:
writing_to_file(create_str(n))
# задание 13
# функция которая создает файлы - их количество равно числу переданному через параметр функции, называть
# файлы new_file_{num}.txt где num - номер созданного файла
def creating_numbered_files(n):
for i in range(1, n + 1):
with open(f'new_file_{i}.txt', 'w') as file:
file.write('')
# задание № 14
# функция которая проверяет все ли файлы созданы, файлы в папке называются по след шаблону new_file_{num}.txt
# (либо результат работы пред функции) если у меня передано в функцию число 7 а в папке лежат
# файлы new_file_0.txt new_file_5.txt new_file_6.txt - то мне необходимо создать файлы под номером 1,2,3,4,7,
# вообщем все недостающие, если все ок - сказать - все файлы в наличии, либо напечатать имена файлов что созданы
def check_create(n):
for i in range(1, n + 1):
if path.isfile(f'new_file_{i}.txt'):
print(f'new_file_{i}.txt', 'File exists')
else:
with open(f'new_file_{i}.txt', 'w') as file:
file.write('')
print(f'new_file_{i}.txt', 'Create new file')
greetings(25)
greetings2(25)
rec_greetings(25)
draws(7)
print(word_search('skdhlzx;hfkhsnkeluewritymeruopvacwqsnewfawhrevnetrvnoesrvpser', 'hello'))
print(create_dict2('fhsefisdvdsufbsdkbvjhsdfbvshdbv'))
print(create_dict('fhsefisdvdsufbsdkbvjhsdfbvshdbv'))
print(times())
print(day_of_week())
print(remind())
d = [1, 2, 3, 4]
print(define_methods(d))
number = 0
string_number = addition(number)
print(string_number, type(string_number))
string = 'wert'
new_string_seven = trim_add(string)
print(new_string_seven)
lists = ['sdjvb', 4, 'sldj', 237]
new_str_lists = create_str(lists)
print(new_str_lists)
create_file(0)
create_file('dima')
create_file([26, 5, 1991, 'Dima'])
creating_numbered_files(5)
check_create(7)
|
# -*- coding:utf8 -*-
"""Tweet operate classes.
"""
import datetime
from twinkerer import utils
TWITTER_URL_BASE = 'https://twitter.com'
TWEET_HTML_TEMPLATE = u'''
.. raw:: html
<div class="twinker">
<p class="twinker_header">{tweet_title}:</p>
<p class="twinker_body">{tweet_body}</p>
<p class="twinker_footer">at {tweet_date} / <a href="{tweet_url}" target="_blank">go to tweet</a></p>
</div>
'''
class _ConvertPattern(object):
class ConvertFailed(Exception):
pass
class RequiredNotFound(Exception):
pass
def __init__(self, base_name, converter=None, required=True):
self.base_name = base_name
self.converter = converter
self.required = required
def convert(self, base_dict):
if self.base_name in base_dict:
try:
if self.converter:
attr_value_ = self.converter(base_dict[self.base_name])
else:
attr_value_ = base_dict[self.base_name]
return attr_value_
except:
raise self.ConvertFailed('target is %s: %s' % (self.base_name, base_dict[self.base_name]))
elif self.required:
raise self.RequiredNotFound('target is %s' % (self.base_name,))
class Model(object):
def __init__(self, json):
for name_, pattern_ in self.__class__.__base__.__dict__.items():
if isinstance(pattern_, _ConvertPattern):
setattr(self, name_, pattern_.convert(json))
for name_, pattern_ in self.__class__.__dict__.items():
if isinstance(pattern_, _ConvertPattern):
setattr(self, name_, pattern_.convert(json))
class User(Model):
"""twitter user-account object based from twitter-api json
"""
id = _ConvertPattern('id')
name = _ConvertPattern('name')
screen_name = _ConvertPattern('screen_name')
profile_image_url = _ConvertPattern('profile_image_url', required=False)
profile_image_url_https = _ConvertPattern('profile_image_url_https', required=False)
@property
def url(self):
return u'{base}/{user}'.format(
base=TWITTER_URL_BASE,
user=self.screen_name,
)
class Tweet(Model):
"""Tweet object based from twitter-api json
"""
id = _ConvertPattern('id')
created_at = _ConvertPattern('created_at', utils.strptime)
text = _ConvertPattern('text')
def __init__(self, json):
super(Tweet, self).__init__(json)
self.user = User(json['user'])
@property
def url(self):
return u'{base}/{user}/statuses/{tweet_id}'.format(
base=TWITTER_URL_BASE,
user=self.user.screen_name,
tweet_id=self.id,
)
def _render(self, title, template=None):
if template is None:
template = TWEET_HTML_TEMPLATE
return template.format(
tweet_title=title,
tweet_date=self.created_at.strftime('%Y-%m-%d %H:%M'),
tweet_body=self.text.replace('\n', '<br />'),
tweet_url=self.url,
)
def as_html(self, template=None):
return self._render('Tweet', template)
class ReTweet(Tweet):
"""ReTweet object based from twitter-api json
"""
def as_html(self, template=None):
return self._render('ReTweet', template)
def parse_tweet(json):
if 'retweeted_status' in json:
return ReTweet(json['retweeted_status'])
return Tweet(json)
|
# coding: utf-8
# flake8: noqa
from __future__ import absolute_import
# import models into model package
from swagger_server.models.all_info import AllInfo
from swagger_server.models.all_info_car import AllInfoCar
from swagger_server.models.charge_perc import ChargePerc
from swagger_server.models.charger import Charger
from swagger_server.models.dist_to_charger import DistToCharger
from swagger_server.models.on_route import OnRoute
|
species(
label = '[CH2]C(C)C([O])CCCC(11275)',
structure = SMILES('[CH2]C(C)C([O])CCCC'),
E0 = (17.8534,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,1380,1383.33,1386.67,1390,370,373.333,376.667,380,2800,3000,430,440,3000,3100,440,815,1455,1000,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,200,800,1066.67,1333.33,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (128.212,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.03904,0.102995,-7.55748e-05,2.98901e-08,-4.91323e-12,2334.89,40.3393], Tmin=(100,'K'), Tmax=(1410.11,'K')), NASAPolynomial(coeffs=[17.4599,0.0505192,-1.97536e-05,3.49894e-09,-2.34274e-13,-2882.18,-55.2648], Tmin=(1410.11,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(17.8534,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(577.856,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsCsH) + group(Cs-CsCsOsH) + longDistanceInteraction_noncyclic(CsCs-ST) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(CC(C)OJ) + radical(Isobutyl)"""),
)
species(
label = 'C3H6(72)',
structure = SMILES('C=CC'),
E0 = (5.9763,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2950,3100,1380,975,1025,1650,2750,2800,2850,1350,1500,750,1050,1375,1000,3010,987.5,1337.5,450,1655],'cm^-1')),
HinderedRotor(inertia=(0.497558,'amu*angstrom^2'), symmetry=1, barrier=(11.4398,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (42.0797,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(2218.31,'J/mol'), sigma=(4.982,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=1.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.31912,0.00817959,3.34736e-05,-4.36194e-08,1.58213e-11,749.325,9.54025], Tmin=(100,'K'), Tmax=(983.754,'K')), NASAPolynomial(coeffs=[5.36755,0.0170743,-6.35108e-06,1.1662e-09,-8.2762e-14,-487.138,-4.54468], Tmin=(983.754,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(5.9763,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(203.705,'J/(mol*K)'), label="""C3H6""", comment="""Thermo library: DFT_QCI_thermo"""),
)
species(
label = 'CCCCC=O(1733)',
structure = SMILES('CCCCC=O'),
E0 = (-250.884,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2750,2800,2850,1350,1500,750,1050,1375,1000,2782.5,750,1395,475,1775,1000,198.883,199.064],'cm^-1')),
HinderedRotor(inertia=(0.00426375,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.518882,'amu*angstrom^2'), symmetry=1, barrier=(14.5862,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.334975,'amu*angstrom^2'), symmetry=1, barrier=(9.40848,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.00310926,'amu*angstrom^2'), symmetry=1, barrier=(9.40929,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (86.1323,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3428.95,'J/mol'), sigma=(5.98513,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=535.59 K, Pc=36.29 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.63555,0.0520732,-2.77334e-05,6.90636e-09,-6.84311e-13,-30090.2,22.3653], Tmin=(100,'K'), Tmax=(2214.2,'K')), NASAPolynomial(coeffs=[15.0687,0.0278058,-1.12935e-05,1.95647e-09,-1.25427e-13,-36038.9,-53.1197], Tmin=(2214.2,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-250.884,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(365.837,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsHHH) + group(Cds-OdCsH)"""),
)
species(
label = 'H(3)',
structure = SMILES('[H]'),
E0 = (211.792,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (1.00794,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1205.6,'J/mol'), sigma=(2.05,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,9.24385e-15,-1.3678e-17,6.66185e-21,-1.00107e-24,25472.7,-0.459566], Tmin=(100,'K'), Tmax=(3459.6,'K')), NASAPolynomial(coeffs=[2.5,9.20456e-12,-3.58608e-15,6.15199e-19,-3.92042e-23,25472.7,-0.459566], Tmin=(3459.6,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(211.792,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""H""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'C=C(C)C([O])CCCC(11291)',
structure = SMILES('C=C(C)C([O])CCCC'),
E0 = (-69.3635,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2950,3100,1380,975,1025,1650,350,440,435,1725,1380,1390,370,380,2900,435,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,200,800,1066.67,1333.33,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (127.204,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.999231,0.0988722,-6.97287e-05,2.51573e-08,-3.69811e-12,-8153.62,38.6049], Tmin=(100,'K'), Tmax=(1577.57,'K')), NASAPolynomial(coeffs=[20.599,0.0441088,-1.7658e-05,3.15265e-09,-2.10993e-13,-14968.2,-75.4404], Tmin=(1577.57,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-69.3635,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(557.07,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-(Cds-Cds)CsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsHH) + radical(CC(C)OJ)"""),
)
species(
label = '[CH2]C(C)C(=O)CCCC(11292)',
structure = SMILES('[CH2]C(C)C(=O)CCCC'),
E0 = (-150.201,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,375,552.5,462.5,1710,1380,1390,370,380,2900,435,3000,3100,440,815,1455,1000,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (127.204,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.749491,0.112389,-0.000122737,8.72361e-08,-2.72356e-11,-17901.3,36.3282], Tmin=(100,'K'), Tmax=(759.626,'K')), NASAPolynomial(coeffs=[8.18665,0.0653332,-2.9819e-05,5.68856e-09,-3.97528e-13,-19258.9,-4.3267], Tmin=(759.626,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-150.201,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(552.912,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-O2d)CsCsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-OdCsCs) + radical(CJC(C)C=O)"""),
)
species(
label = 'CCCC[CH][O](1728)',
structure = SMILES('CCCC[CH][O]'),
E0 = (78.0912,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2750,2800,2850,1350,1500,750,1050,1375,1000,3025,407.5,1350,352.5,284.58,286.601,291.211,2078.87],'cm^-1')),
HinderedRotor(inertia=(0.00200967,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.10398,'amu*angstrom^2'), symmetry=1, barrier=(6.1119,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.103299,'amu*angstrom^2'), symmetry=1, barrier=(6.13902,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0993848,'amu*angstrom^2'), symmetry=1, barrier=(6.07184,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (86.1323,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.20776,0.0668884,-7.57546e-05,6.29928e-08,-2.32085e-11,9487.51,24.9863], Tmin=(100,'K'), Tmax=(767.626,'K')), NASAPolynomial(coeffs=[4.00991,0.045565,-2.09523e-05,3.99079e-09,-2.77682e-13,9255.35,13.4985], Tmin=(767.626,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(78.0912,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(365.837,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + radical(CCOJ) + radical(CCsJOH)"""),
)
species(
label = 'CH3(17)',
structure = SMILES('[CH3]'),
E0 = (136.188,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([604.263,1333.71,1492.19,2836.77,2836.77,3806.92],'cm^-1')),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (15.0345,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.65718,0.0021266,5.45839e-06,-6.6181e-09,2.46571e-12,16422.7,1.67354], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.97812,0.00579785,-1.97558e-06,3.07298e-10,-1.79174e-14,16509.5,4.72248], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(136.188,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(83.1447,'J/(mol*K)'), label="""CH3""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = 'C=CC([O])CCCC(10637)',
structure = SMILES('C=CC([O])CCCC'),
E0 = (-30.3085,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2950,3100,1380,975,1025,1650,3010,987.5,1337.5,450,1655,1380,1390,370,380,2900,435,2750,2800,2850,1350,1500,750,1050,1375,1000,200,800,1066.67,1333.33,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (113.178,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.164877,0.0791405,-4.09554e-05,1.68689e-09,3.59834e-12,-3484.9,34.8438], Tmin=(100,'K'), Tmax=(1127.93,'K')), NASAPolynomial(coeffs=[16.399,0.0405505,-1.64334e-05,3.03198e-09,-2.10395e-13,-8503.3,-52.7435], Tmin=(1127.93,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-30.3085,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(486.397,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsOsH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(CC(C)OJ)"""),
)
species(
label = 'C3H6(T)(143)',
structure = SMILES('[CH2][CH]C'),
E0 = (284.865,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2750,2800,2850,1350,1500,750,1050,1375,1000,3000,3100,440,815,1455,1000],'cm^-1')),
HinderedRotor(inertia=(0.238389,'amu*angstrom^2'), symmetry=1, barrier=(5.48103,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.00909639,'amu*angstrom^2'), symmetry=1, barrier=(22.1005,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (42.0797,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.93778,0.0190991,4.26842e-06,-1.44873e-08,5.74941e-12,34303.2,12.9695], Tmin=(100,'K'), Tmax=(1046.81,'K')), NASAPolynomial(coeffs=[5.93909,0.0171892,-6.69152e-06,1.21546e-09,-8.39795e-14,33151.2,-4.14888], Tmin=(1046.81,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(284.865,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(199.547,'J/(mol*K)'), label="""C3H6(T)""", comment="""Thermo library: DFT_QCI_thermo"""),
)
species(
label = 'butyl_1(82)',
structure = SMILES('[CH2]CCC'),
E0 = (63.0573,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,2750,2800,2850,1350,1500,750,1050,1375,1000,3000,3100,440,815,1455,1000,180],'cm^-1')),
HinderedRotor(inertia=(0.0977402,'amu*angstrom^2'), symmetry=1, barrier=(2.24724,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0976865,'amu*angstrom^2'), symmetry=1, barrier=(2.246,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0977534,'amu*angstrom^2'), symmetry=1, barrier=(2.24754,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (57.1143,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(2968.28,'J/mol'), sigma=(5.176,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=1.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.25388,0.0316763,2.89994e-06,-1.98049e-08,8.20503e-12,7652.64,17.2725], Tmin=(100,'K'), Tmax=(1050.57,'K')), NASAPolynomial(coeffs=[7.59591,0.0260842,-1.01719e-05,1.85189e-09,-1.28169e-13,5716.37,-12.6366], Tmin=(1050.57,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(63.0573,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(295.164,'J/(mol*K)'), label="""butyl_1""", comment="""Thermo library: DFT_QCI_thermo"""),
)
species(
label = 'iC4H7OJ_48(1671)',
structure = SMILES('[CH2]C(C)C=O'),
E0 = (-23.7707,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2782.5,750,1395,475,1775,1000,1380,1390,370,380,2900,435,3000,3100,440,815,1455,1000,2750,2800,2850,1350,1500,750,1050,1375,1000],'cm^-1')),
HinderedRotor(inertia=(0.271179,'amu*angstrom^2'), symmetry=1, barrier=(6.23493,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.270879,'amu*angstrom^2'), symmetry=1, barrier=(6.22803,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.271311,'amu*angstrom^2'), symmetry=1, barrier=(6.23798,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (71.0978,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.39266,0.04169,-1.61772e-05,-3.94586e-08,4.47807e-11,-2807.52,16.5948], Tmin=(100,'K'), Tmax=(488.014,'K')), NASAPolynomial(coeffs=[5.16192,0.0298914,-1.3414e-05,2.53281e-09,-1.75697e-13,-3207.59,3.89167], Tmin=(488.014,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-23.7707,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(270.22,'J/(mol*K)'), label="""iC4H7OJ_48""", comment="""Thermo library: CBS_QB3_1dHR"""),
)
species(
label = 'CCCCC([O])[C](C)C(11293)',
structure = SMILES('CCCCC([O])[C](C)C'),
E0 = (-34.6551,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (128.212,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.19953,0.0989856,-6.44623e-05,2.11707e-08,-2.81625e-12,-3967.97,38.6754], Tmin=(100,'K'), Tmax=(1736.99,'K')), NASAPolynomial(coeffs=[22.696,0.0439579,-1.69422e-05,2.93217e-09,-1.91223e-13,-12269.2,-89.8008], Tmin=(1736.99,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-34.6551,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(577.856,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsCsH) + group(Cs-CsCsOsH) + longDistanceInteraction_noncyclic(CsCs-ST) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(CC(C)OJ) + radical(CCJ(C)CO)"""),
)
species(
label = '[CH2]C(C)[C](O)CCCC(11294)',
structure = SMILES('[CH2]C(C)[C](O)CCCC'),
E0 = (-35.8796,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (128.212,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.90057,0.110296,-9.94909e-05,5.2703e-08,-1.1907e-11,-4140.95,39.4658], Tmin=(100,'K'), Tmax=(1039.73,'K')), NASAPolynomial(coeffs=[12.6567,0.0581392,-2.42447e-05,4.45557e-09,-3.06022e-13,-6960.13,-26.4685], Tmin=(1039.73,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-35.8796,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(573.699,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsCsH) + group(Cs-CsCsOsH) + longDistanceInteraction_noncyclic(CsCs-ST) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(C2CsJOH) + radical(Isobutyl)"""),
)
species(
label = 'CCCC[C]([O])C(C)C(11295)',
structure = SMILES('CCCC[C]([O])C(C)C'),
E0 = (-10.6011,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (128.212,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.686204,0.103953,-7.93088e-05,3.33214e-08,-5.98283e-12,-1107.37,36.0171], Tmin=(100,'K'), Tmax=(1268.04,'K')), NASAPolynomial(coeffs=[13.9649,0.0577359,-2.46366e-05,4.57749e-09,-3.15766e-13,-4822.98,-38.1453], Tmin=(1268.04,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-10.6011,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(577.856,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsCsH) + group(Cs-CsCsOsH) + longDistanceInteraction_noncyclic(CsCs-ST) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(CC(C)OJ) + radical(C2CsJOH)"""),
)
species(
label = '[CH2][C](C)C(O)CCCC(11296)',
structure = SMILES('[CH2][C](C)C(O)CCCC'),
E0 = (-59.9335,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (128.212,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.987228,0.100981,-7.2237e-05,2.77498e-08,-4.42908e-12,-7021.85,40.5426], Tmin=(100,'K'), Tmax=(1448.34,'K')), NASAPolynomial(coeffs=[17.5041,0.0499116,-1.93463e-05,3.40429e-09,-2.26766e-13,-12378.2,-55.5167], Tmin=(1448.34,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-59.9335,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(573.699,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsCsH) + group(Cs-CsCsOsH) + longDistanceInteraction_noncyclic(CsCs-ST) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(Isobutyl) + radical(CCJ(C)CO)"""),
)
species(
label = '[CH2]C(C)C(O)[CH]CCC(11297)',
structure = SMILES('[CH2]C(C)C(O)[CH]CCC'),
E0 = (-12.6054,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,3615,1277.5,1000,1380,1383.33,1386.67,1390,370,373.333,376.667,380,2800,3000,430,440,2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,3000,3100,440,815,1455,1000,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (128.212,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.12597,0.103149,-7.75856e-05,3.15387e-08,-5.29084e-12,-1323.74,43.3417], Tmin=(100,'K'), Tmax=(1395.57,'K')), NASAPolynomial(coeffs=[18.1405,0.0479276,-1.82325e-05,3.18584e-09,-2.11792e-13,-6701.32,-56.0295], Tmin=(1395.57,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-12.6054,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(573.699,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsCsH) + group(Cs-CsCsOsH) + longDistanceInteraction_noncyclic(CsCs-ST) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(CCJCO) + radical(Isobutyl)"""),
)
species(
label = '[CH2]C([CH2])C(O)CCCC(11298)',
structure = SMILES('[CH2]C([CH2])C(O)CCCC'),
E0 = (-7.42509,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2750,2800,2850,1350,1500,750,1050,1375,1000,3615,1277.5,1000,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,1380,1383.33,1386.67,1390,370,373.333,376.667,380,2800,3000,430,440,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (128.212,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.06419,0.107335,-8.97393e-05,4.27007e-08,-8.49258e-12,-707.354,41.7068], Tmin=(100,'K'), Tmax=(1186.34,'K')), NASAPolynomial(coeffs=[15.2917,0.0521875,-2.00114e-05,3.51692e-09,-2.35297e-13,-4588.09,-39.9957], Tmin=(1186.34,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-7.42509,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(573.699,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsCsH) + group(Cs-CsCsOsH) + longDistanceInteraction_noncyclic(CsCs-ST) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(Isobutyl) + radical(Isobutyl)"""),
)
species(
label = 'CCC[CH]C([O])C(C)C(11299)',
structure = SMILES('CCC[CH]C([O])C(C)C'),
E0 = (12.6731,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (128.212,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.29689,0.100757,-6.87773e-05,2.40068e-08,-3.39652e-12,1728.06,41.319], Tmin=(100,'K'), Tmax=(1647.37,'K')), NASAPolynomial(coeffs=[22.3377,0.0433695,-1.6523e-05,2.86011e-09,-1.8735e-13,-6058.87,-84.5019], Tmin=(1647.37,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(12.6731,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(577.856,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsCsH) + group(Cs-CsCsOsH) + longDistanceInteraction_noncyclic(CsCs-ST) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(CCJCO) + radical(CC(C)OJ)"""),
)
species(
label = '[CH2]C(C)C(O)C[CH]CC(11300)',
structure = SMILES('[CH2]C(C)C(O)C[CH]CC'),
E0 = (-18.0491,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,3615,1277.5,1000,1380,1383.33,1386.67,1390,370,373.333,376.667,380,2800,3000,430,440,2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,3000,3100,440,815,1455,1000,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (128.212,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.416152,0.101087,-8.28262e-05,4.16552e-08,-9.32648e-12,-2015.13,40.8696], Tmin=(100,'K'), Tmax=(1022.68,'K')), NASAPolynomial(coeffs=[9.41838,0.0626216,-2.6407e-05,4.87645e-09,-3.35679e-13,-4026.64,-6.79688], Tmin=(1022.68,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-18.0491,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(573.699,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsCsH) + group(Cs-CsCsOsH) + longDistanceInteraction_noncyclic(CsCs-ST) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(Isobutyl) + radical(RCCJCC)"""),
)
species(
label = 'CC[CH]CC([O])C(C)C(11301)',
structure = SMILES('CC[CH]CC([O])C(C)C'),
E0 = (7.22935,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (128.212,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.158626,0.0943918,-6.19674e-05,2.1905e-08,-3.3731e-12,1015.97,37.2532], Tmin=(100,'K'), Tmax=(1405.94,'K')), NASAPolynomial(coeffs=[11.7433,0.0605301,-2.58403e-05,4.77428e-09,-3.26975e-13,-2330.71,-24.2218], Tmin=(1405.94,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(7.22935,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(577.856,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsCsH) + group(Cs-CsCsOsH) + longDistanceInteraction_noncyclic(CsCs-ST) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(RCCJCC) + radical(CC(C)OJ)"""),
)
species(
label = '[CH2]C(C)C(O)CC[CH]C(11302)',
structure = SMILES('[CH2]C(C)C(O)CC[CH]C'),
E0 = (-18.0611,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (128.212,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.527121,0.101584,-8.1486e-05,3.88426e-08,-8.09228e-12,-2010.98,41.4324], Tmin=(100,'K'), Tmax=(1102.55,'K')), NASAPolynomial(coeffs=[10.9741,0.0598581,-2.47181e-05,4.51722e-09,-3.09077e-13,-4547.11,-15.1773], Tmin=(1102.55,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-18.0611,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(573.699,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsCsH) + group(Cs-CsCsOsH) + longDistanceInteraction_noncyclic(CsCs-ST) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(Isobutyl) + radical(RCCJC)"""),
)
species(
label = '[CH2]CCCC(O)C([CH2])C(11303)',
structure = SMILES('[CH2]CCCC(O)C([CH2])C'),
E0 = (-7.26105,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (128.212,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.94068,0.105156,-8.33622e-05,3.67101e-08,-6.78474e-12,-692.673,41.9608], Tmin=(100,'K'), Tmax=(1260.29,'K')), NASAPolynomial(coeffs=[15.5251,0.0528966,-2.11631e-05,3.80844e-09,-2.58193e-13,-4843.04,-41.2863], Tmin=(1260.29,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-7.26105,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(573.699,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsCsH) + group(Cs-CsCsOsH) + longDistanceInteraction_noncyclic(CsCs-ST) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(RCCJ) + radical(Isobutyl)"""),
)
species(
label = 'C[CH]CCC([O])C(C)C(11304)',
structure = SMILES('C[CH]CCC([O])C(C)C'),
E0 = (7.21732,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (128.212,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.382064,0.0960716,-6.41535e-05,2.29256e-08,-3.50765e-12,1025.31,38.2296], Tmin=(100,'K'), Tmax=(1444.56,'K')), NASAPolynomial(coeffs=[13.5952,0.0573683,-2.39647e-05,4.37828e-09,-2.9778e-13,-3012.87,-34.3434], Tmin=(1444.56,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(7.21732,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(577.856,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsCsH) + group(Cs-CsCsOsH) + longDistanceInteraction_noncyclic(CsCs-ST) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(CC(C)OJ) + radical(RCCJC)"""),
)
species(
label = '[CH2]CCCC([O])C(C)C(11305)',
structure = SMILES('[CH2]CCCC([O])C(C)C'),
E0 = (18.0174,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,1380,1383.33,1386.67,1390,370,373.333,376.667,380,2800,3000,430,440,3000,3100,440,815,1455,1000,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,200,800,1066.67,1333.33,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (128.212,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.975988,0.101461,-7.11306e-05,2.59673e-08,-3.91473e-12,2352.27,39.4284], Tmin=(100,'K'), Tmax=(1522.53,'K')), NASAPolynomial(coeffs=[18.7592,0.049613,-2.00504e-05,3.60116e-09,-2.42234e-13,-3657.25,-64.0786], Tmin=(1522.53,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(18.0174,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(577.856,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsCsH) + group(Cs-CsCsOsH) + longDistanceInteraction_noncyclic(CsCs-ST) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(RCCJ) + radical(CC(C)OJ)"""),
)
species(
label = 'C=C(C)C(O)CCCC(11306)',
structure = SMILES('C=C(C)C(O)CCCC'),
E0 = (-299.724,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (128.212,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.07225,0.101902,-7.26685e-05,2.68516e-08,-4.06808e-12,-35858.3,39.0521], Tmin=(100,'K'), Tmax=(1527.46,'K')), NASAPolynomial(coeffs=[19.8349,0.0471525,-1.89035e-05,3.3858e-09,-2.27449e-13,-42245.3,-70.6693], Tmin=(1527.46,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-299.724,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(577.856,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-(Cds-Cds)CsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsHH)"""),
)
species(
label = 'CCCCC(=O)C(C)C(11307)',
structure = SMILES('CCCCC(=O)C(C)C'),
E0 = (-360.712,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (128.212,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.179577,0.0990798,-7.56182e-05,3.46248e-08,-7.27796e-12,-43239.2,33.235], Tmin=(100,'K'), Tmax=(1050.63,'K')), NASAPolynomial(coeffs=[8.19924,0.0671793,-3.00728e-05,5.72413e-09,-4.00891e-13,-44999.8,-7.60174], Tmin=(1050.63,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-360.712,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(577.856,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-O2d)CsCsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-OdCsCs)"""),
)
species(
label = 'CH2(S)(23)',
structure = SMILES('[CH2]'),
E0 = (419.862,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1369.36,2789.41,2993.36],'cm^-1')),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (14.0266,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.19195,-0.00230793,8.0509e-06,-6.60123e-09,1.95638e-12,50484.3,-0.754589], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.28556,0.00460255,-1.97412e-06,4.09548e-10,-3.34695e-14,50922.4,8.67684], Tmin=(1000,'K'), Tmax=(3000,'K'))], Tmin=(200,'K'), Tmax=(3000,'K'), E0=(419.862,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(58.2013,'J/(mol*K)'), label="""CH2(S)""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = '[CH2]CC([O])CCCC(2132)',
structure = SMILES('[CH2]CC([O])CCCC'),
E0 = (47.492,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,1380,1390,370,380,2900,435,3000,3100,440,815,1455,1000,2750,2800,2850,1350,1500,750,1050,1375,1000,200,800,1066.67,1333.33,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (114.185,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3939.29,'J/mol'), sigma=(7.11172,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=615.31 K, Pc=24.85 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.159069,0.0871361,-6.05738e-05,2.20349e-08,-3.34105e-12,5864.74,35.2178], Tmin=(100,'K'), Tmax=(1497.96,'K')), NASAPolynomial(coeffs=[15.5065,0.0453044,-1.86851e-05,3.39235e-09,-2.29726e-13,1171.46,-46.69], Tmin=(1497.96,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(47.492,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(507.183,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(RCCJ) + radical(CC(C)OJ)"""),
)
species(
label = '[CH2]C(C)C([O])CCC(11308)',
structure = SMILES('[CH2]C(C)C([O])CCC'),
E0 = (41.6336,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,1380,1383.33,1386.67,1390,370,373.333,376.667,380,2800,3000,430,440,3000,3100,440,815,1455,1000,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (114.185,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.475798,0.0891137,-6.45544e-05,2.50979e-08,-4.02874e-12,5176.08,36.0779], Tmin=(100,'K'), Tmax=(1451.36,'K')), NASAPolynomial(coeffs=[16.5863,0.0420904,-1.59557e-05,2.77487e-09,-1.83593e-13,223.365,-52.5929], Tmin=(1451.36,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(41.6336,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(507.183,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsCsH) + group(Cs-CsCsOsH) + longDistanceInteraction_noncyclic(CsCs-ST) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(CC(C)OJ) + radical(Isobutyl)"""),
)
species(
label = 'C[CH]CC([O])CCCC(11273)',
structure = SMILES('C[CH]CC([O])CCCC'),
E0 = (12.9116,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (128.212,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.202544,0.0963167,-6.6337e-05,2.53135e-08,-4.25978e-12,1700.47,38.5615], Tmin=(100,'K'), Tmax=(1297.9,'K')), NASAPolynomial(coeffs=[10.9108,0.0620666,-2.67538e-05,4.98158e-09,-3.43487e-13,-1184.34,-17.9518], Tmin=(1297.9,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(12.9116,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(577.856,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(RCCJC) + radical(CC(C)OJ)"""),
)
species(
label = 'CC[CH]C([O])CCCC(11309)',
structure = SMILES('CC[CH]C([O])CCCC'),
E0 = (18.3674,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (128.212,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.999982,0.0999161,-6.82618e-05,2.40115e-08,-3.4661e-12,2397.04,41.2055], Tmin=(100,'K'), Tmax=(1591.59,'K')), NASAPolynomial(coeffs=[19.7935,0.0476576,-1.90105e-05,3.38159e-09,-2.25644e-13,-4221.9,-68.7746], Tmin=(1591.59,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(18.3674,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(577.856,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(CC(C)OJ) + radical(CCJCO)"""),
)
species(
label = 'CCCCC1OCC1C(11277)',
structure = SMILES('CCCCC1OCC1C'),
E0 = (-236.905,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (128.212,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.244071,0.0763928,7.65e-06,-6.22059e-08,2.99362e-11,-28324.8,32.6092], Tmin=(100,'K'), Tmax=(937.843,'K')), NASAPolynomial(coeffs=[16.1317,0.0488521,-1.5962e-05,2.67565e-09,-1.80408e-13,-33256.8,-55.262], Tmin=(937.843,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-236.905,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(586.17,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(Cs-CsCsCsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + ring(Oxetane)"""),
)
species(
label = 'CH2(19)',
structure = SMILES('[CH2]'),
E0 = (381.563,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1032.72,2936.3,3459],'cm^-1')),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (14.0266,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.8328,0.000224446,4.68033e-06,-6.04743e-09,2.59009e-12,45920.8,1.40666], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[3.16229,0.00281798,-7.56235e-07,5.05446e-11,5.65236e-15,46099.1,4.77656], Tmin=(1000,'K'), Tmax=(3000,'K'))], Tmin=(200,'K'), Tmax=(3000,'K'), E0=(381.563,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(58.2013,'J/(mol*K)'), label="""CH2""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = 'C[CH]C([O])CCCC(10554)',
structure = SMILES('C[CH]C([O])CCCC'),
E0 = (42.1476,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,3025,407.5,1350,352.5,1380,1390,370,380,2900,435,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,200,800,1066.67,1333.33,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (114.185,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.470561,0.0863497,-5.80297e-05,1.99195e-08,-2.78225e-12,5240,37.0723], Tmin=(100,'K'), Tmax=(1656.62,'K')), NASAPolynomial(coeffs=[19.1809,0.0389002,-1.50661e-05,2.62984e-09,-1.73075e-13,-1270.99,-67.6543], Tmin=(1656.62,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(42.1476,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(507.183,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(CCJCO) + radical(CC(C)OJ)"""),
)
species(
label = 'O(4)',
structure = SMILES('[O]'),
E0 = (243.005,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (15.9994,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(665.16,'J/mol'), sigma=(2.75,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,9.24385e-15,-1.3678e-17,6.66185e-21,-1.00107e-24,29226.7,5.11107], Tmin=(100,'K'), Tmax=(3459.6,'K')), NASAPolynomial(coeffs=[2.5,9.20456e-12,-3.58608e-15,6.15199e-19,-3.92042e-23,29226.7,5.11107], Tmin=(3459.6,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(243.005,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""O""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = '[CH2]C(C)[CH]CCCC(556)',
structure = SMILES('[CH2]C(C)[CH]CCCC'),
E0 = (154.505,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,3025,407.5,1350,352.5,1380,1390,370,380,2900,435,3000,3100,440,815,1455,1000,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (112.213,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.142888,0.0865654,-5.26267e-05,1.65985e-08,-2.19713e-12,18734.8,37.5097], Tmin=(100,'K'), Tmax=(1663.79,'K')), NASAPolynomial(coeffs=[14.9003,0.0503995,-2.00212e-05,3.53372e-09,-2.34032e-13,13729.1,-42.7233], Tmin=(1663.79,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(154.505,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(552.912,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsCsH) + group(Cs-CsCsHH) + longDistanceInteraction_noncyclic(CsCs-ST) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(Isobutyl) + radical(Cs_S)"""),
)
species(
label = 'N2',
structure = SMILES('N#N'),
E0 = (-8.69489,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (28.0135,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(810.913,'J/mol'), sigma=(3.621,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(1.76,'angstroms^3'), rotrelaxcollnum=4.0, comment="""PrimaryTransportLibrary"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.61263,-0.00100893,2.49898e-06,-1.43376e-09,2.58636e-13,-1051.1,2.6527], Tmin=(100,'K'), Tmax=(1817.04,'K')), NASAPolynomial(coeffs=[2.9759,0.00164141,-7.19722e-07,1.25378e-10,-7.91526e-15,-1025.84,5.53757], Tmin=(1817.04,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-8.69489,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(37.4151,'J/(mol*K)'), label="""N2""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'Ne',
structure = SMILES('[Ne]'),
E0 = (-6.19738,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (20.1797,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1235.53,'J/mol'), sigma=(3.758e-10,'m'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with fixed Lennard Jones Parameters. This is the fallback method! Try improving transport databases!"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-6.19738,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""Ne""", comment="""Thermo library: primaryThermoLibrary"""),
)
transitionState(
label = 'TS1',
E0 = (17.8534,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS2',
E0 = (142.429,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS3',
E0 = (133.033,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS4',
E0 = (103.728,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS5',
E0 = (135.627,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS6',
E0 = (90.068,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS7',
E0 = (67.3194,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS8',
E0 = (120.533,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS9',
E0 = (183.524,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS10',
E0 = (135.793,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS11',
E0 = (93.134,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS12',
E0 = (92.9566,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS13',
E0 = (76.2549,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS14',
E0 = (92.747,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS15',
E0 = (66.0493,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS16',
E0 = (64.2958,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS17',
E0 = (43.585,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS18',
E0 = (111.366,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS19',
E0 = (60.5302,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS20',
E0 = (81.1958,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS21',
E0 = (362.956,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS22',
E0 = (81.2535,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS23',
E0 = (81.2535,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS24',
E0 = (467.354,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS25',
E0 = (461.496,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS26',
E0 = (177.789,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS27',
E0 = (212.619,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS28',
E0 = (26.1377,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS29',
E0 = (423.711,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS30',
E0 = (397.51,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
reaction(
label = 'reaction1',
reactants = ['[CH2]C(C)C([O])CCCC(11275)'],
products = ['C3H6(72)', 'CCCCC=O(1733)'],
transitionState = 'TS1',
kinetics = Arrhenius(A=(5e+12,'s^-1'), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Exact match found for rate rule [RJJ]
Euclidian distance = 0
family: 1,4_Linear_birad_scission"""),
)
reaction(
label = 'reaction2',
reactants = ['H(3)', 'C=C(C)C([O])CCCC(11291)'],
products = ['[CH2]C(C)C([O])CCCC(11275)'],
transitionState = 'TS2',
kinetics = Arrhenius(A=(0.0051739,'m^3/(mol*s)'), n=2.82163, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(303.03,'K'), Tmax=(2000,'K'), comment="""From training reaction 102 used for Cds-CsCs_Cds-HH;HJ
Exact match found for rate rule [Cds-CsCs_Cds-HH;HJ]
Euclidian distance = 0
family: R_Addition_MultipleBond
Ea raised from -4.8 to 0 kJ/mol."""),
)
reaction(
label = 'reaction3',
reactants = ['H(3)', '[CH2]C(C)C(=O)CCCC(11292)'],
products = ['[CH2]C(C)C([O])CCCC(11275)'],
transitionState = 'TS3',
kinetics = Arrhenius(A=(0.0366254,'m^3/(mol*s)'), n=1.743, Ea=(71.4418,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [CO-CsCs_O;YJ] for rate rule [CO-CsCs_O;HJ]
Euclidian distance = 1.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction4',
reactants = ['C3H6(72)', 'CCCC[CH][O](1728)'],
products = ['[CH2]C(C)C([O])CCCC(11275)'],
transitionState = 'TS4',
kinetics = Arrhenius(A=(0.00168615,'m^3/(mol*s)'), n=2.52599, Ea=(19.6608,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Cds-CsH_Cds-HH;CJ]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction5',
reactants = ['CH3(17)', 'C=CC([O])CCCC(10637)'],
products = ['[CH2]C(C)C([O])CCCC(11275)'],
transitionState = 'TS5',
kinetics = Arrhenius(A=(10000,'cm^3/(mol*s)'), n=2.41, Ea=(29.7482,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 417 used for Cds-CsH_Cds-HH;CsJ-HHH
Exact match found for rate rule [Cds-CsH_Cds-HH;CsJ-HHH]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction6',
reactants = ['C3H6(T)(143)', 'CCCCC=O(1733)'],
products = ['[CH2]C(C)C([O])CCCC(11275)'],
transitionState = 'TS6',
kinetics = Arrhenius(A=(0.0201871,'m^3/(mol*s)'), n=2.2105, Ea=(56.0866,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [CO-CsH_O;YJ] for rate rule [CO-CsH_O;CJ]
Euclidian distance = 1.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction7',
reactants = ['butyl_1(82)', 'iC4H7OJ_48(1671)'],
products = ['[CH2]C(C)C([O])CCCC(11275)'],
transitionState = 'TS7',
kinetics = Arrhenius(A=(7.94e+10,'cm^3/(mol*s)'), n=0, Ea=(28.0328,'kJ/mol'), T0=(1,'K'), Tmin=(333,'K'), Tmax=(363,'K'), comment="""Estimated using template [CO_O;CsJ-CsHH] for rate rule [CO-CsH_O;CsJ-CsHH]
Euclidian distance = 2.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction8',
reactants = ['[CH2]C(C)C([O])CCCC(11275)'],
products = ['CCCCC([O])[C](C)C(11293)'],
transitionState = 'TS8',
kinetics = Arrhenius(A=(5.265e-07,'s^-1'), n=5.639, Ea=(102.68,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 38 used for R2H_S;C_rad_out_2H;Cs_H_out_Cs2
Exact match found for rate rule [R2H_S;C_rad_out_2H;Cs_H_out_Cs2]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction9',
reactants = ['[CH2]C(C)C([O])CCCC(11275)'],
products = ['[CH2]C(C)[C](O)CCCC(11294)'],
transitionState = 'TS9',
kinetics = Arrhenius(A=(4.56178e+08,'s^-1'), n=1.25272, Ea=(165.67,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R2H_S;Y_rad_out;Cs_H_out_Cs2] for rate rule [R2H_S;O_rad_out;Cs_H_out_Cs2]
Euclidian distance = 1.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction10',
reactants = ['[CH2]C(C)C([O])CCCC(11275)'],
products = ['CCCC[C]([O])C(C)C(11295)'],
transitionState = 'TS10',
kinetics = Arrhenius(A=(83345.1,'s^-1'), n=2.17519, Ea=(117.939,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R3H_SS_Cs;C_rad_out_2H;XH_out]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction11',
reactants = ['[CH2]C(C)C([O])CCCC(11275)'],
products = ['[CH2][C](C)C(O)CCCC(11296)'],
transitionState = 'TS11',
kinetics = Arrhenius(A=(111914,'s^-1'), n=2.27675, Ea=(75.2806,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R3H_SS_Cs;O_rad_out;XH_out]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction12',
reactants = ['[CH2]C(C)C(O)[CH]CCC(11297)'],
products = ['[CH2]C(C)C([O])CCCC(11275)'],
transitionState = 'TS12',
kinetics = Arrhenius(A=(5.71,'s^-1'), n=3.021, Ea=(105.562,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2500,'K'), comment="""From training reaction 319 used for R3H_SS_Cs;C_rad_out_H/NonDeC;O_H_out
Exact match found for rate rule [R3H_SS_Cs;C_rad_out_H/NonDeC;O_H_out]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction13',
reactants = ['[CH2]C([CH2])C(O)CCCC(11298)'],
products = ['[CH2]C(C)C([O])CCCC(11275)'],
transitionState = 'TS13',
kinetics = Arrhenius(A=(1.72e-08,'s^-1'), n=5.55, Ea=(83.68,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2500,'K'), comment="""From training reaction 340 used for R4H_SSS;C_rad_out_2H;O_H_out
Exact match found for rate rule [R4H_SSS;C_rad_out_2H;O_H_out]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction14',
reactants = ['[CH2]C(C)C([O])CCCC(11275)'],
products = ['CCC[CH]C([O])C(C)C(11299)'],
transitionState = 'TS14',
kinetics = Arrhenius(A=(754000,'s^-1'), n=1.63, Ea=(74.8936,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 110 used for R4H_SSS;C_rad_out_2H;Cs_H_out_H/(NonDeC/Cs)
Exact match found for rate rule [R4H_SSS;C_rad_out_2H;Cs_H_out_H/(NonDeC/Cs)]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction15',
reactants = ['[CH2]C(C)C(O)C[CH]CC(11300)'],
products = ['[CH2]C(C)C([O])CCCC(11275)'],
transitionState = 'TS15',
kinetics = Arrhenius(A=(2960,'s^-1'), n=2.11, Ea=(84.0984,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2500,'K'), comment="""From training reaction 323 used for R4H_SSS;C_rad_out_H/NonDeC;O_H_out
Exact match found for rate rule [R4H_SSS;C_rad_out_H/NonDeC;O_H_out]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction16',
reactants = ['[CH2]C(C)C([O])CCCC(11275)'],
products = ['CC[CH]CC([O])C(C)C(11301)'],
transitionState = 'TS16',
kinetics = Arrhenius(A=(262000,'s^-1'), n=1.62, Ea=(46.4424,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5H_CCC;C_rad_out_2H;Cs_H_out_H/NonDeC] for rate rule [R5H_CCC;C_rad_out_2H;Cs_H_out_H/(NonDeC/Cs)]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction17',
reactants = ['[CH2]C(C)C([O])CCCC(11275)'],
products = ['[CH2]C(C)C(O)CC[CH]C(11302)'],
transitionState = 'TS17',
kinetics = Arrhenius(A=(8e+10,'s^-1'), n=0, Ea=(25.7316,'kJ/mol'), T0=(1,'K'), Tmin=(200,'K'), Tmax=(1000,'K'), comment="""From training reaction 307 used for R5H_CCC;O_rad_out;Cs_H_out_H/NonDeC
Exact match found for rate rule [R5H_CCC;O_rad_out;Cs_H_out_H/NonDeC]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction18',
reactants = ['[CH2]C(C)C([O])CCCC(11275)'],
products = ['[CH2]CCCC(O)C([CH2])C(11303)'],
transitionState = 'TS18',
kinetics = Arrhenius(A=(4.68e+09,'s^-1'), n=0, Ea=(93.5124,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 140 used for R6H_SSSSS;O_rad_out;Cs_H_out_2H
Exact match found for rate rule [R6H_SSSSS;O_rad_out;Cs_H_out_2H]
Euclidian distance = 0
Multiplied by reaction path degeneracy 3.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction19',
reactants = ['[CH2]C(C)C([O])CCCC(11275)'],
products = ['C[CH]CCC([O])C(C)C(11304)'],
transitionState = 'TS19',
kinetics = Arrhenius(A=(25800,'s^-1'), n=1.67, Ea=(42.6768,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 114 used for R6H_SSSSS;C_rad_out_2H;Cs_H_out_H/NonDeC
Exact match found for rate rule [R6H_SSSSS;C_rad_out_2H;Cs_H_out_H/NonDeC]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction20',
reactants = ['[CH2]CCCC([O])C(C)C(11305)'],
products = ['[CH2]C(C)C([O])CCCC(11275)'],
transitionState = 'TS20',
kinetics = Arrhenius(A=(128.4,'s^-1'), n=2.1, Ea=(63.1784,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 115 used for R7H;C_rad_out_2H;Cs_H_out_2H
Exact match found for rate rule [R7H;C_rad_out_2H;Cs_H_out_2H]
Euclidian distance = 0
Multiplied by reaction path degeneracy 6.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction21',
reactants = ['C3H6(T)(143)', 'CCCC[CH][O](1728)'],
products = ['[CH2]C(C)C([O])CCCC(11275)'],
transitionState = 'TS21',
kinetics = Arrhenius(A=(7.46075e+06,'m^3/(mol*s)'), n=0.027223, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Y_rad;Y_rad]
Euclidian distance = 0
family: R_Recombination
Ea raised from -14.4 to 0 kJ/mol."""),
)
reaction(
label = 'reaction22',
reactants = ['[CH2]C(C)C([O])CCCC(11275)'],
products = ['C=C(C)C(O)CCCC(11306)'],
transitionState = 'TS22',
kinetics = Arrhenius(A=(7.437e+08,'s^-1'), n=1.045, Ea=(63.4002,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R3radExo;Y_rad;XH_Rrad]
Euclidian distance = 0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction23',
reactants = ['[CH2]C(C)C([O])CCCC(11275)'],
products = ['CCCCC(=O)C(C)C(11307)'],
transitionState = 'TS23',
kinetics = Arrhenius(A=(7.437e+08,'s^-1'), n=1.045, Ea=(63.4002,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R3radExo;Y_rad;XH_Rrad]
Euclidian distance = 0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction24',
reactants = ['CH2(S)(23)', '[CH2]CC([O])CCCC(2132)'],
products = ['[CH2]C(C)C([O])CCCC(11275)'],
transitionState = 'TS24',
kinetics = Arrhenius(A=(143764,'m^3/(mol*s)'), n=0.444, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [carbene;R_H]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: 1,2_Insertion_carbene
Ea raised from -5.1 to 0 kJ/mol."""),
)
reaction(
label = 'reaction25',
reactants = ['CH2(S)(23)', '[CH2]C(C)C([O])CCC(11308)'],
products = ['[CH2]C(C)C([O])CCCC(11275)'],
transitionState = 'TS25',
kinetics = Arrhenius(A=(1.31021e+06,'m^3/(mol*s)'), n=0.189, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [carbene;C_pri] for rate rule [carbene;C_pri/NonDeC]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 3.0
family: 1,2_Insertion_carbene
Ea raised from -1.5 to 0 kJ/mol."""),
)
reaction(
label = 'reaction26',
reactants = ['[CH2]C(C)C([O])CCCC(11275)'],
products = ['C[CH]CC([O])CCCC(11273)'],
transitionState = 'TS26',
kinetics = Arrhenius(A=(6.55606e+10,'s^-1'), n=0.64, Ea=(159.935,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [cCs(-HC)CJ;CsJ;C] for rate rule [cCs(-HC)CJ;CsJ-HH;C]
Euclidian distance = 1.0
family: 1,2_shiftC"""),
)
reaction(
label = 'reaction27',
reactants = ['[CH2]C(C)C([O])CCCC(11275)'],
products = ['CC[CH]C([O])CCCC(11309)'],
transitionState = 'TS27',
kinetics = Arrhenius(A=(5.59192e+09,'s^-1'), n=1.025, Ea=(194.765,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [cCs(-HC)CJ;CsJ;CH3] for rate rule [cCs(-HC)CJ;CsJ-HH;CH3]
Euclidian distance = 1.0
family: 1,2_shiftC"""),
)
reaction(
label = 'reaction28',
reactants = ['[CH2]C(C)C([O])CCCC(11275)'],
products = ['CCCCC1OCC1C(11277)'],
transitionState = 'TS28',
kinetics = Arrhenius(A=(1.62e+12,'s^-1'), n=-0.305, Ea=(8.28432,'kJ/mol'), T0=(1,'K'), Tmin=(600,'K'), Tmax=(2000,'K'), comment="""Estimated using template [R4_SSS;C_rad_out_2H;Ypri_rad_out] for rate rule [R4_SSS;C_rad_out_2H;Opri_rad]
Euclidian distance = 1.0
family: Birad_recombination"""),
)
reaction(
label = 'reaction29',
reactants = ['CH2(19)', 'C[CH]C([O])CCCC(10554)'],
products = ['[CH2]C(C)C([O])CCCC(11275)'],
transitionState = 'TS29',
kinetics = Arrhenius(A=(1.06732e+06,'m^3/(mol*s)'), n=0.472793, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Y_rad;Birad] for rate rule [C_rad/H/NonDeC;Birad]
Euclidian distance = 3.0
family: Birad_R_Recombination
Ea raised from -3.5 to 0 kJ/mol."""),
)
reaction(
label = 'reaction30',
reactants = ['O(4)', '[CH2]C(C)[CH]CCCC(556)'],
products = ['[CH2]C(C)C([O])CCCC(11275)'],
transitionState = 'TS30',
kinetics = Arrhenius(A=(2085.55,'m^3/(mol*s)'), n=1.09077, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(303.03,'K'), Tmax=(2000,'K'), comment="""Estimated using template [Y_rad;O_birad] for rate rule [C_rad/H/NonDeC;O_birad]
Euclidian distance = 3.0
family: Birad_R_Recombination
Ea raised from -8.3 to 0 kJ/mol."""),
)
network(
label = '2099',
isomers = [
'[CH2]C(C)C([O])CCCC(11275)',
],
reactants = [
('C3H6(72)', 'CCCCC=O(1733)'),
],
bathGas = {
'N2': 0.5,
'Ne': 0.5,
},
)
pressureDependence(
label = '2099',
Tmin = (300,'K'),
Tmax = (2000,'K'),
Tcount = 8,
Tlist = ([302.47,323.145,369.86,455.987,609.649,885.262,1353.64,1896.74],'K'),
Pmin = (0.01,'bar'),
Pmax = (100,'bar'),
Pcount = 5,
Plist = ([0.0125282,0.0667467,1,14.982,79.8202],'bar'),
maximumGrainSize = (0.5,'kcal/mol'),
minimumGrainCount = 250,
method = 'modified strong collision',
interpolationModel = ('Chebyshev', 6, 4),
activeKRotor = True,
activeJRotor = True,
rmgmode = True,
)
|
#!/usr/bin/env python
"""
Remove duplicated string from unsorted linked list
"""
import sys
sys.path.append("../")
from llist import *
def rm_dups(head):
if head == None:
return head
values = []
n = head
while True:
if n == None:
return head
if n.data in values:
head = head.delete(n.data)
n = n.nxt
continue
values.append(n.data)
n = n.nxt
return head
if __name__ == "__main__":
questions = [[1, 2, 3, 2], [1, 2, 3, 4], [1], []]
answers = [[1, 3, 2], [1, 2, 3, 4], [1], []]
for i, q in enumerate(questions):
q_ = lst_to_llst(q)
answer = rm_dups(q_)
answer = llst_to_lst(answer)
if not answers[i] == answer:
print "[FAIL] q: %s, a: %s" % (q, answer)
exit(1)
print "[PASS] %s" % q
|
from tgboost import tgb
import pandas as pd
train = pd.read_csv('../../train.csv')
train = train.sample(frac=1.0, axis=0) # shuffle the data
train.fillna(-999, inplace=True)
val = train.iloc[0:5000]
train = train.iloc[5000:]
train_y = train.label.values
train_X = train.drop('label', axis=1).values
val_y = val.label.values
val_X = val.drop('label', axis=1).values
del train, val
print train_X.shape, val_X.shape
params = {'loss': "logisticloss",
'eta': 0.3,
'max_depth': 5,
'num_boost_round': 500,
'scale_pos_weight': 1.0,
'subsample': 0.8,
'colsample': 0.8,
'min_sample_split': 50,
'min_child_weight': 1,
'reg_lambda': 10,
'gamma': 0.01,
'eval_metric': "error",
'early_stopping_rounds': 20,
'maximize': False}
tgb.fit(train_X, train_y, validation_data=(val_X, val_y), **params)
"""
TGBoost round 0, train-error is 0.238742857143, val-error is 0.2456, time cost 3.71701383591s
TGBoost round 1, train-error is 0.230285714286, val-error is 0.2338, time cost 3.69342088699s
TGBoost round 2, train-error is 0.2306, val-error is 0.2344, time cost 3.70841407776s
TGBoost round 3, train-error is 0.231542857143, val-error is 0.231, time cost 3.70805692673s
TGBoost round 4, train-error is 0.230828571429, val-error is 0.2314, time cost 3.72947621346s
TGBoost round 5, train-error is 0.229085714286, val-error is 0.2276, time cost 3.75900292397s
TGBoost round 6, train-error is 0.23, val-error is 0.2302, time cost 3.70230102539s
TGBoost round 7, train-error is 0.2288, val-error is 0.2292, time cost 3.73828411102s
TGBoost round 8, train-error is 0.228057142857, val-error is 0.229, time cost 3.75026297569s
TGBoost round 9, train-error is 0.2266, val-error is 0.2296, time cost 3.70752620697s
TGBoost round 10, train-error is 0.227, val-error is 0.2318, time cost 3.68288016319s
TGBoost round 11, train-error is 0.227657142857, val-error is 0.2322, time cost 3.71141815186s
TGBoost round 12, train-error is 0.2264, val-error is 0.231, time cost 3.69189381599s
TGBoost round 13, train-error is 0.225828571429, val-error is 0.2292, time cost 3.81694102287s
TGBoost round 14, train-error is 0.225628571429, val-error is 0.2292, time cost 3.71179795265s
TGBoost round 15, train-error is 0.2256, val-error is 0.2284, time cost 3.72830486298s
TGBoost round 16, train-error is 0.224228571429, val-error is 0.2276, time cost 3.77151203156s
TGBoost round 17, train-error is 0.224057142857, val-error is 0.2258, time cost 3.82595896721s
TGBoost round 18, train-error is 0.2234, val-error is 0.226, time cost 3.70994997025s
TGBoost round 19, train-error is 0.223, val-error is 0.2252, time cost 3.75701904297s
TGBoost round 20, train-error is 0.2206, val-error is 0.2218, time cost 3.68910098076s
TGBoost round 21, train-error is 0.220914285714, val-error is 0.223, time cost 3.7491979599s
TGBoost round 22, train-error is 0.217942857143, val-error is 0.2202, time cost 3.78888297081s
TGBoost round 23, train-error is 0.218028571429, val-error is 0.221, time cost 3.71157193184s
TGBoost round 24, train-error is 0.218285714286, val-error is 0.2198, time cost 3.73786687851s
TGBoost round 25, train-error is 0.218428571429, val-error is 0.2212, time cost 3.77874898911s
TGBoost round 26, train-error is 0.218257142857, val-error is 0.2198, time cost 3.73246216774s
TGBoost round 27, train-error is 0.217971428571, val-error is 0.2188, time cost 3.5935549736s
TGBoost round 28, train-error is 0.217257142857, val-error is 0.2182, time cost 3.63868999481s
TGBoost round 29, train-error is 0.2168, val-error is 0.2178, time cost 3.72400403023s
TGBoost round 30, train-error is 0.2164, val-error is 0.2172, time cost 3.71992492676s
TGBoost round 31, train-error is 0.216142857143, val-error is 0.217, time cost 3.69051504135s
TGBoost round 32, train-error is 0.216628571429, val-error is 0.217, time cost 3.7802131176s
TGBoost round 33, train-error is 0.2162, val-error is 0.2164, time cost 3.67879986763s
TGBoost round 34, train-error is 0.216285714286, val-error is 0.217, time cost 3.69256806374s
TGBoost round 35, train-error is 0.216114285714, val-error is 0.2166, time cost 3.66421985626s
TGBoost round 36, train-error is 0.215771428571, val-error is 0.2158, time cost 3.74409604073s
TGBoost round 37, train-error is 0.214542857143, val-error is 0.2164, time cost 3.69231891632s
TGBoost round 38, train-error is 0.214857142857, val-error is 0.2166, time cost 3.70529198647s
TGBoost round 39, train-error is 0.214257142857, val-error is 0.2138, time cost 3.76646089554s
TGBoost round 40, train-error is 0.213885714286, val-error is 0.2136, time cost 3.74382400513s
TGBoost round 41, train-error is 0.213542857143, val-error is 0.213, time cost 3.63991785049s
TGBoost round 42, train-error is 0.213428571429, val-error is 0.215, time cost 3.77041006088s
TGBoost round 43, train-error is 0.212685714286, val-error is 0.215, time cost 3.62445902824s
TGBoost round 44, train-error is 0.212685714286, val-error is 0.2144, time cost 3.71449494362s
TGBoost round 45, train-error is 0.212714285714, val-error is 0.2144, time cost 3.66005396843s
TGBoost round 46, train-error is 0.212371428571, val-error is 0.2144, time cost 3.66620707512s
TGBoost round 47, train-error is 0.212542857143, val-error is 0.2138, time cost 3.67622303963s
TGBoost round 48, train-error is 0.212142857143, val-error is 0.214, time cost 3.70943689346s
TGBoost round 49, train-error is 0.2122, val-error is 0.2136, time cost 3.61202192307s
TGBoost round 50, train-error is 0.211942857143, val-error is 0.2134, time cost 3.66828107834s
TGBoost round 51, train-error is 0.211714285714, val-error is 0.2136, time cost 3.65024113655s
TGBoost round 52, train-error is 0.211342857143, val-error is 0.2122, time cost 3.66291499138s
TGBoost round 53, train-error is 0.211114285714, val-error is 0.2124, time cost 3.73679614067s
TGBoost round 54, train-error is 0.211428571429, val-error is 0.2132, time cost 3.75595188141s
TGBoost round 55, train-error is 0.2114, val-error is 0.214, time cost 3.66404008865s
TGBoost round 56, train-error is 0.2112, val-error is 0.2136, time cost 3.78315210342s
TGBoost round 57, train-error is 0.210628571429, val-error is 0.2144, time cost 3.60629796982s
TGBoost round 58, train-error is 0.210771428571, val-error is 0.2138, time cost 3.68571186066s
TGBoost round 59, train-error is 0.210828571429, val-error is 0.2144, time cost 3.72975087166s
TGBoost round 60, train-error is 0.210657142857, val-error is 0.2142, time cost 3.66297101974s
TGBoost round 61, train-error is 0.210742857143, val-error is 0.2144, time cost 3.63383293152s
TGBoost round 62, train-error is 0.2104, val-error is 0.2146, time cost 3.67448306084s
TGBoost round 63, train-error is 0.2102, val-error is 0.2146, time cost 3.60910105705s
TGBoost round 64, train-error is 0.210228571429, val-error is 0.2142, time cost 3.67948198318s
TGBoost round 65, train-error is 0.210257142857, val-error is 0.2142, time cost 3.70058321953s
TGBoost round 66, train-error is 0.210085714286, val-error is 0.2146, time cost 3.71190094948s
TGBoost round 67, train-error is 0.210257142857, val-error is 0.2146, time cost 3.60603618622s
TGBoost round 68, train-error is 0.210257142857, val-error is 0.2146, time cost 3.68661594391s
TGBoost round 69, train-error is 0.2102, val-error is 0.2148, time cost 3.65223407745s
TGBoost round 70, train-error is 0.209657142857, val-error is 0.2146, time cost 3.61065888405s
TGBoost round 71, train-error is 0.209142857143, val-error is 0.2138, time cost 3.60764408112s
TGBoost round 72, train-error is 0.209342857143, val-error is 0.2132, time cost 3.63329005241s
TGBoost round 73, train-error is 0.2092, val-error is 0.2138, time cost 3.67511677742s
TGBoost round 74, train-error is 0.209142857143, val-error is 0.2136, time cost 3.65356612206s
TGBoost round 75, train-error is 0.209028571429, val-error is 0.2136, time cost 3.68754291534s
TGBoost round 76, train-error is 0.209, val-error is 0.2138, time cost 3.61171102524s
TGBoost round 77, train-error is 0.208857142857, val-error is 0.2138, time cost 3.67216515541s
TGBoost round 78, train-error is 0.208828571429, val-error is 0.2138, time cost 3.6556289196s
TGBoost round 79, train-error is 0.208914285714, val-error is 0.2128, time cost 3.60161304474s
TGBoost round 80, train-error is 0.209028571429, val-error is 0.2128, time cost 3.76077389717s
TGBoost round 81, train-error is 0.208857142857, val-error is 0.2128, time cost 3.67292404175s
TGBoost round 82, train-error is 0.208857142857, val-error is 0.2128, time cost 3.57409310341s
TGBoost round 83, train-error is 0.209028571429, val-error is 0.2124, time cost 3.7161090374s
TGBoost round 84, train-error is 0.209114285714, val-error is 0.2128, time cost 3.63063502312s
TGBoost round 85, train-error is 0.208685714286, val-error is 0.2128, time cost 3.67365813255s
TGBoost round 86, train-error is 0.208685714286, val-error is 0.213, time cost 3.58945894241s
TGBoost round 87, train-error is 0.208714285714, val-error is 0.2128, time cost 3.59758901596s
TGBoost round 88, train-error is 0.209028571429, val-error is 0.213, time cost 3.61627411842s
TGBoost round 89, train-error is 0.208742857143, val-error is 0.2134, time cost 3.62249994278s
TGBoost round 90, train-error is 0.208542857143, val-error is 0.2134, time cost 3.56504416466s
TGBoost round 91, train-error is 0.208485714286, val-error is 0.213, time cost 3.7109978199s
TGBoost round 92, train-error is 0.208457142857, val-error is 0.213, time cost 3.65219807625s
TGBoost round 93, train-error is 0.208371428571, val-error is 0.2128, time cost 3.6589820385s
TGBoost round 94, train-error is 0.208228571429, val-error is 0.2132, time cost 3.67305898666s
TGBoost round 95, train-error is 0.2082, val-error is 0.2132, time cost 3.66815090179s
TGBoost round 96, train-error is 0.2082, val-error is 0.2132, time cost 3.73147678375s
TGBoost round 97, train-error is 0.208171428571, val-error is 0.2132, time cost 3.63195705414s
TGBoost round 98, train-error is 0.208057142857, val-error is 0.2132, time cost 3.67149996758s
TGBoost round 99, train-error is 0.208285714286, val-error is 0.2132, time cost 3.66407799721s
TGBoost round 100, train-error is 0.208228571429, val-error is 0.2128, time cost 3.67459392548s
TGBoost round 101, train-error is 0.208171428571, val-error is 0.2128, time cost 3.6504199504
"""
|
arr = input()
arr = arr[1:len(arr)-1]
arr = list(map(int,arr.split(",")))
k = int(input())
if k in arr or sum(arr) == k:
print(0)
else:
arr.sort(reverse=True)
_min1 = abs(arr[0] - k)
for i in range(0, len(arr)-1):
_tem1 = abs(arr[i]-k)
_min1 = min(_min1,_tem1)
tem1 = arr[0]
for i in range(1, len(arr)-1):
tem1 += arr[i]
_min1 = min(_min1,abs(tem1-k))
arr.sort()
_min2 = abs(arr[0] - k)
for i in range(0, len(arr)-1):
_tem2 = abs(arr[i]-k)
_min2 = min(_min2,_tem2)
tem2 = arr[0]
for i in range(1, len(arr)-1):
tem2 += arr[i]
_min2 = min(_min2,abs(tem2-k))
print(min(_min1,_min2))
|
import discord
import asyncio
import datetime
import os
import re
import urllib
import sys, traceback
import requests, time
from discord.utils import get
client = discord.Client()
url=''
botOwner = "266640111897149440"
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
await client.change_presence(game=discord.Game(name="!help", type=1))
client.loop.create_task(check_new_animes())
Channel = client.get_channel('503638566405013505')
Text= "Welcome to **Otaku World** Discord server! <:Hehe:503632223895945227>\nIf you watch **anime** react with <:TohruPoint:503633376524107776> to join **anime** group.\nIf you read **manga** react with <:NekoChen:503633274594394123> to join **manga** group."
Moji = await client.send_message(Channel, Text)
emoji1 = get(client.get_all_emojis(), name='TohruPoint')
emoji2 = get(client.get_all_emojis(), name='NekoChen')
await client.add_reaction(Moji, emoji=emoji1)
await client.add_reaction(Moji, emoji=emoji2)
async def check_new_animes():
lastAnime = eval(requests.get("http://animeslayer.com/Data/newSystem/NewGet.php?fu=1").content)[0]
while(True):
newAnime = eval(requests.get("http://animeslayer.com/Data/newSystem/NewGet.php?fu=1").content)[0]
if newAnime == lastAnime:
print("No new episode")
else:
print("New episode")
lastAnime = eval(requests.get("http://animeslayer.com/Data/newSystem/NewGet.php?fu=1").content)[0]
AnimeLogChannel = discord.Object(id=504382218961944607)
embed=discord.Embed(color=0xffa500)
embed.add_field(name="Title: " , value=str(lastAnime["Title"]), inline=True)
embed.add_field(name="State: " , value=str(lastAnime["State"]), inline=True)
embed.add_field(name="Year: " , value=str(lastAnime["Year"]), inline=True)
embed.add_field(name="Rate: " , value=str(lastAnime["Rate"]), inline=True)
embed.add_field(name="Gen: " , value=str(lastAnime["Gen"].replace("[","").replace('"',"").replace("]","").replace(",",", ")), inline=True)
embed.add_field(name="Time: " , value=str(lastAnime["time"].replace(",- ", "")), inline=True)
if lastAnime["Rating"] == "":
embed.add_field(name="Rating: " , value="-", inline=True)
else:
embed.add_field(name="Rating: " , value=str(lastAnime["Rating"]), inline=True)
embed.set_image(url=(str(lastAnime["img"])).replace("\\",""))
try:
await client.send_message(AnimeLogChannel, embed=embed)
except discord.HTTPException:
print(embed.to_dict())
raise
lastAnime = newAnime
await asyncio.sleep(1)
@client.event
async def on_message(message):
now = datetime.datetime.now()
if message.channel.id == "504690754669510667":
if message.content.startswith("!invite"):
await client.send_message(message.channel, "Invite link: https://discord.gg/xDWGavx")
if message.author.id == botOwner:
if message.content.startswith("!clear"):
mm=message.content.split('!clear ')[1]
nn=0
async for m in client.logs_from(message.channel, limit=int(mm)):
await client.delete_message(m)
nn+=1
embed=discord.Embed(title="<a:cleaning:521720398740717569> The chat is clean:", description=str(nn)+" messages deleted.", color=0xffa500)
embed.set_footer(text=now.strftime("%Y/%m/%d %H:%M:%S"))
n = await client.send_message(message.channel, embed=embed)
await asyncio.sleep(2)
await client.delete_message(n)
if message.content.startswith("!refresh"):
await client.delete_message(message)
await client.logout()
os.system("python Rachel.py")
exit()
if message.content.startswith("!lastAnime"):
lastAnime = eval(requests.get("http://animeslayer.com/Data/newSystem/NewGet.php?fu=1").content)[0]
AnimeLogChannel = discord.Object(id=504382218961944607)
embed=discord.Embed(color=0xffa500)
embed.add_field(name="Title: " , value=str(lastAnime["Title"]), inline=True)
embed.add_field(name="State: " , value=str(lastAnime["State"]), inline=True)
embed.add_field(name="Year: " , value=str(lastAnime["Year"]), inline=True)
embed.add_field(name="Rate: " , value=str(lastAnime["Rate"]), inline=True)
embed.add_field(name="Gen: " , value=str(lastAnime["Gen"].replace("[","").replace('"',"").replace("]","").replace(",",", ")), inline=True)
embed.add_field(name="Time: " , value=str(lastAnime["time"].replace(",- ", "")), inline=True)
if lastAnime["Rating"] == "":
embed.add_field(name="Rating: " , value="-", inline=True)
else:
embed.add_field(name="Rating: " , value=str(lastAnime["Rating"]), inline=True)
embed.set_image(url=(str(lastAnime["img"])).replace("\\",""))
try:
await client.send_message(AnimeLogChannel, embed=embed)
except discord.HTTPException:
print(embed.to_dict())
raise
if message.content.startswith("AQ!character"):
await client.delete_message(message)
if message.author.id == "464004630632988672":
if message.content.startswith("🎉 "):
await client.pin_message(message)
@client.event
async def on_member_join(member):
Role = discord.utils.get(member.server.roles, name="Otaku")
await client.add_roles(member, Role)
JoiningLogChannel = discord.Object(id=510846615880335361)
embed=discord.Embed(description=str(member.mention)+" joined.", color=0xffa500)
await client.send_message(JoiningLogChannel, embed=embed)
@client.event
async def on_member_remove(member):
JoiningLogChannel = discord.Object(id=510846615880335361)
embed=discord.Embed(description=str(member.mention)+" left.", color=0xffa500)
await client.send_message(JoiningLogChannel, embed=embed)
@client.event
async def on_reaction_add(reaction, user):
roleChannelId = '503638566405013505'
if reaction.message.channel.id != roleChannelId:
return
if user != client.user:
if str(reaction.emoji) == "<:TohruPoint:503633376524107776>":
Role = discord.utils.get(user.server.roles, name="Anime")
await client.add_roles(user, Role)
if str(reaction.emoji) == "<:NekoChen:503633274594394123>":
Role = discord.utils.get(user.server.roles, name="Manga")
await client.add_roles(user, Role)
@client.event
async def on_reaction_remove(reaction, user):
roleChannelId = '503638566405013505'
if reaction.message.channel.id != roleChannelId:
return
if user != client.user:
if str(reaction.emoji) == "<:TohruPoint:503633376524107776>":
Role = discord.utils.get(user.server.roles, name="Anime")
await client.remove_roles(user, Role)
if str(reaction.emoji) == "<:NekoChen:503633274594394123>":
Role = discord.utils.get(user.server.roles, name="Manga")
await client.remove_roles(user, Role)
@client.event
async def on_message_delete(message):
now = datetime.datetime.now()
if message.author.id != client.user.id:
channel = discord.Object(id=503636188901539863)
embed=discord.Embed(color=0xffa500)
embed.add_field(name="**Message:**" , value=str(message.content), inline=False)
embed.set_author(name=str(message.author), icon_url=str(message.author.avatar_url))
embed.set_footer(text="Deleted message in #"+str(message.channel.name)+" at "+now.strftime("%Y/%m/%d %H:%M:%S"))
await client.send_message(channel, embed=embed)
@client.event
async def on_message_edit(before, after):
now = datetime.datetime.now()
if after.author.id != client.user.id:
channel = discord.Object(id=503636188901539863)
embed=discord.Embed(color=0xffa500)
embed.add_field(name="**Before:**" , value=str(before.content), inline=False)
embed.add_field(name="**After:**" , value=str(after.content), inline=False)
embed.set_author(name=str(after.author), icon_url=str(after.author.avatar_url))
embed.set_footer(text="Edited message in #"+str(after.channel.name)+" at "+now.strftime("%Y/%m/%d %H:%M:%S"))
await client.send_message(channel, embed=embed)
client.run('NTAzNjI2Mzg3MTQwMzc4NjM0.Dq5QDg.SK9utIEQ2EJaMoPvpy9RXvZ6R_k')
|
from flask_sqlalchemy import SQLAlchemy
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from marshmallow import Schema, fields, ValidationError
from passlib.apps import custom_app_context as pwd_context
db = SQLAlchemy()
class User(db.Model):
'''
Create a table for the users.
'''
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(32), index=True)
password_hash = db.Column(db.String(64)) # To store hashed passwords.
def hash_password(self, password):
''' Method to encrypt password'''
self.password_hash = pwd_context.encrypt(password)
def verify_password(self, password):
'''To verify a password'''
return pwd_context.verify(password, self.password_hash)
def generate_auth_token(self, SECRET_KEY, expiration=600):
s = Serializer(SECRET_KEY, expires_in=expiration)
return s.dumps({'id': self.id})
class Bucketlist(db.Model):
'''Method to create tables for the bucketlists'''
__tablename__ = 'bucketlists'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50), unique=False)
date_created = db.Column(db.DateTime, default=db.func.current_timestamp())
date_modified = db.Column(db.DateTime, default=db.func.current_timestamp())
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
created_by = db.relationship(
"User", backref=db.backref("users.username", lazy="dynamic"))
items = db.relationship("Item", backref=db.backref("bucketlists"))
def __repr__(self):
return "<Bucketlist(created_by='%s')>" % (
self.created_by)
def as_dict(self):
return {c.name: getattr(self, c.name) for c in self.__table__.columns}
class Item(db.Model):
'''Method to create tables for the items'''
__tablename__ = 'items'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50), unique=False)
bucketlist_id = db.Column(db.Integer, db.ForeignKey('bucketlists.id'))
date_created = db.Column(db.DateTime, default=db.func.current_timestamp())
date_modified = db.Column(db.DateTime, default=db.func.current_timestamp())
done = db.Column(db.Boolean, default=False)
class UserSchema(Schema):
'''UserSchema for data serialization using marshmallow'''
id = fields.Int(dump_only=True)
username = fields.Str()
def must_not_be_blank(data):
if not data:
raise ValidationError('Data not provided.')
class ItemSchema(Schema):
'''ItemSchema for data serialization using marshmallow'''
id = fields.Int(dump_only=True)
name = fields.Str()
date_created = fields.DateTime(dump_only=True)
date_modified = fields.DateTime(dump_only=True)
done = fields.Boolean()
class BucketlistSchema(Schema):
'''BucketlistSchema for data serialization using marshmallow'''
id = fields.Int(dump_only=True)
name = fields.Str()
date_created = fields.DateTime(dump_only=True)
date_modified = fields.DateTime(dump_only=True)
created_by = fields.Nested(UserSchema, only=('username'))
items = fields.Nested(ItemSchema, many=True)
user_schema = UserSchema()
users_schema = UserSchema(many=True)
bucketlist_schema = BucketlistSchema()
bucketlists_schema = BucketlistSchema(many=True)
item_schema = ItemSchema()
items_schema = ItemSchema(many=True)
|
month = int(input('월을 입력하세요 : '))
if month >= 3 and month <= 5 :
print('%d월은 봄입니다.' % month)
if month >= 6 and month <= 8 :
print('%d월은 여름입니다.' % month)
if month >= 9 and month <= 11 :
print('%d월은 가을입니다.' % month)
if month == 12 or month == 1 or month == 2 :
print('%d월은 겨울입니다.' % month) |
#coding=gbk
from os import path as ospath
from ConfigParser import ConfigParser
CONF = None
class myConfig(object):
def __init__(self):
self.conf = ConfigParser()
self.conf.read('config.ini')
def getStatus(self):
return self.conf.get('service', 'status')
def getPort(self):
return int(self.conf.get('service', 'port'))
def getServer(self):
return self.conf.get('service', 'server')
def getCltPath(self):
path = {}
for flag in self.conf.options('localpath'):
tpath = self.conf.get('localpath', flag)
if not ospath.exists(tpath):
self.save('localpath', flag, '')
else:
path[flag] = tpath
return path
def save(self, section, item, value):
self.conf.set(section, item, value)
self.conf.write(open('config.ini','w+'))
#===============================================================================
# control
#===============================================================================
def getCMD(self):
cmds = self.conf.get('run','command')
cmds = cmds.split('|')
return cmds
def getCtrlPath(self):
return dict(self.conf.items('path'))
def getList(self, path, cmd):
path.update(dict(self.conf.items(cmd)))
return path
#===============================================================================
# vittual
#===============================================================================
def getUser(self):
user = self.conf.get('Script','user')
return user
def getPasswd(self):
password = self.conf.get('Script', 'password')
return password
def getGMPort(self):
return int(self.conf.get('Script', 'gmport'))
def getDelayKill(self):
return int(self.conf.get('Script', 'delaykill'))
CONF = myConfig() |
# Filters edge_tuple for resolved tuples with h2 - h1 = 2
import pymongo as pm
import time
import csv
##
# @param edge_id ID of edge to extract info about
# return (ip1_id,ip2_id,hop_diff)
def get_ip12(edge_id):
edge = c_edges.find_one({"edge":str(edge_id)})
return (int(edge['ip1']),int(edge['ip2']),int(edge['l12']))
def resolve(ip1_r,ip2_r, ip1_p, ip2_n):
if ip1_r == ip1_p and ip2_r == ip2_n:
print "Edge", ip1_r, "->", ip2_r, "can be resolved."
return True
else:
print "Edge", ip1_r, "->", ip2_r, "Can not be resolved."
return False
client = pm.MongoClient()
db = client.dataset
c_tuples = db.tuples
c_edgetuple = db.edgetuple
c_edges = db.edges
tuples_checked = 0
tuples_resolved = 0
start_time = time.time()
with open('tuples_diff_2.csv', 'rb') as csvfile:
next(csvfile) # To ignore the first line
reader = csv.reader(csvfile, delimiter=';')
for row in reader:
t_resolve = row[0]
src = row[1]
dst = row[2]
h1_resolve = row[3]
h2_resolve = row[4]
print 'tuple to be resolved: ', t_resolve
# TODO get all edges not only one !!
resolve_edges = c_edgetuple.find_one({"tuple": t_resolve})
(ip1_r,ip2_r,h_diff_r) = get_ip12(resolve_edges['edge'])
print "Edge to be resolved: ", ip1_r, "->", ip2_r
mid_tuple = c_tuples.find_one( {"src": src, "dst": dst, "h1": h1_resolve , "h2": str(int(h1_resolve)+1)})
if mid_tuple != None:
print 'Mid tuple: '+ str(mid_tuple['tuple'])
mid_edge = c_edgetuple.find_one({"tuple": mid_tuple['tuple']})
if mid_edge != None:
(ip1_m,ip2_m,h_diff_m) = get_ip12(mid_edge['edge'])
print "Mid Edge: ", ip1_m, "->", ip2_m
if ip1_r == ip1_m:
next_tuple = c_tuples.find_one({"src": src,"dst": dst, "h1":str(int(h1_resolve)+1), "h2": str(int(h1_resolve)+2)})
if next_tuple != None:
print 'next tuple: '+ str(next_tuple['tuple'])
next_edge = c_edgetuple.find_one({"tuple": next_tuple['tuple']})
(ip1_n,ip2_n,h_diff_n) = get_ip12(next_edge['edge'])
print "Next Edge: ", ip1_n, "->", ip2_n
if(resolve(ip1_r,ip2_r, ip1_m, ip2_n)):
tuples_resolved += 1
elif ip2_m == ip2_r:
prev_tuple = c_tuples.find_one( {"src": src, "dst": dst, "h1": str(int(h1_resolve) - 1) , "h2": h1_resolve})
if prev_tuple != None:
print 'previous tuple: '+ str(prev_tuple['tuple'])
prev_edge = c_edgetuple.find_one({"tuple": prev_tuple['tuple']})
(ip1_p,ip2_p,h_diff_p) = get_ip12(prev_edge['edge'])
print "Prev Edge: ", ip1_p, "->", ip2_p
if(resolve(ip1_r,ip2_r,ip1_p,ip2_m)):
tuples_resolved += 1
tuples_checked += 1
print 'tuples resolved: ', tuples_resolved, '/', tuples_checked
print '----------------------------------------------'
print 'Time elapsed %f' % (time.time() - start_time) |
size=3
for i in range(1,size+1):
print ("-"*(2*(size-i)),end="")
s=chr(96+size)
for j in range(1,i):
s+=("-"+chr(96+size-j))
s2= s[::-1]
print (s+s2[1::],end="")
print ("-"*(2*(size-i)))
for i in range(size-1,0,-1):
print ("-"*(2*(size-i)),end="")
s=chr(96+size)
for j in range(1,i):
s+=("-"+chr(96+size-j))
s2= s[::-1]
print (s+s2[1::],end="")
print ("-"*(2*(size-i)))
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from data.models import Idol, Skill, SkillValue, Cartoon
# Register your models here.
class IdolAdmin(admin.ModelAdmin):
list_display = (
'idol_id',
'name',
'type',
'rarity',
'cost',
'offense',
'defense',
'max_offense',
'max_defense',
'skill_name',
'skill',
'skill2',
)
fieldsets = [
(None, {
'fields': ['idol_id', 'name']
}),
('Status', {
'fields': ['type', 'rarity', 'cost', 'offense', 'defense', 'max_offense', 'max_defense', 'hash']
}),
('Skill', {
'fields': ['skill_name', 'skill', 'skill2']
}),
]
search_fields = ['name']
admin.site.register(Idol, IdolAdmin)
class SkillAdmin(admin.ModelAdmin):
list_display = (
'skill_id',
'comment',
'target_unit',
'target_member',
'target_type',
'target_num',
'target_param',
'skill_value',
)
fieldsets = [
(None, {
'fields': ['skill_id', 'comment']
}),
('Effect', {
'fields': ['target_unit', 'target_member', 'target_type', 'target_num', 'target_param', 'skill_value']
}),
]
search_fields = ['comment']
admin.site.register(Skill, SkillAdmin)
class SkillValueAdmin(admin.ModelAdmin):
list_display = (
'id',
'value1',
'value2',
'value3',
'value4',
'value5',
'value6',
'value7',
'value8',
'value9',
'value10',
'value11',
'value12',
)
fieldsets = [
(None, {
'fields': ['id']
}),
('Param', {
'fields': [
'value1',
'value2',
'value3',
'value4',
'value5',
'value6',
'value7',
'value8',
'value9',
'value10',
'value11',
'value12',
]
}),
]
admin.site.register(SkillValue, SkillValueAdmin)
class CartoonAdmin(admin.ModelAdmin):
list_display = (
'id',
'title',
'date',
'idols',
'comment',
)
fieldsets = [
(None, {
'fields': ['id', 'title', 'date', 'idols', 'thumbnail_hash', 'comment']
})
]
admin.site.register(Cartoon, CartoonAdmin) |
import os
import sys
import re
import time
import logging
import hashlib
import threading
from configparser import ConfigParser
from pathlib import Path
from contextlib import contextmanager
from datetime import timedelta
from typing import NamedTuple, List
from docopt import docopt
from natural.date import compress as compress_date
import pandas as pd
from more_itertools import peekable
import psycopg2, psycopg2.errorcodes
from psycopg2 import sql
from psycopg2.pool import ThreadedConnectionPool
from sqlalchemy import create_engine
import sqlparse
import git
_log = logging.getLogger(__name__)
# Meta-schema for storing stage and file status in the database
_ms_path = Path(__file__).parent.parent / 'schemas' / 'meta-schema.sql'
meta_schema = _ms_path.read_text()
_pool = None
_engine = None
# DB configuration info
class DBConfig:
host: str
port: str
database: str
user: str
password: str
@classmethod
def load(cls):
repo = git.Repo(search_parent_directories=True)
cfg = ConfigParser()
_log.debug('reading config from db.cfg')
cfg.read([repo.working_tree_dir + '/db.cfg'])
branch = repo.head.reference.name
_log.info('reading database config for branch %s', branch)
if branch in cfg:
section = cfg[branch]
else:
_log.debug('No configuration for branch %s, using default', branch)
section = cfg['DEFAULT']
dbc = cls()
dbc.host = section.get('host', 'localhost')
dbc.port = section.get('port', None)
dbc.database = section.get('database', None)
dbc.user = section.get('user', None)
dbc.password = section.get('password', None)
if dbc.database is None:
_log.error('No database specified for branch %s', branch)
raise RuntimeError('no database specified')
return dbc
def url(self) -> str:
url = 'postgresql://'
if self.user:
url += self.user
if self.password:
url += ':' + self.password
url += '@'
url += self.host
if self.port:
url += ':' + self.port
url += '/' + self.database
return url
def db_url():
"Get the URL to connect to the database."
if 'DB_URL' in os.environ:
_log.info('using env var DB_URL')
return os.environ['DB_URL']
config = DBConfig.load()
_log.info('using database %s', config.database)
return config.url()
@contextmanager
def connect():
"Connect to a database. This context manager yields the connection, and closes it when exited."
global _pool
if _pool is None:
_log.info('connecting to %s', db_url())
_pool = ThreadedConnectionPool(1, 5, db_url())
conn = _pool.getconn()
try:
yield conn
finally:
_pool.putconn(conn)
def engine():
"Get an SQLAlchemy engine"
global _engine
if _engine is None:
_log.info('connecting to %s', db_url())
_engine = create_engine(db_url())
return _engine
def _tokens(s, start=-1, skip_ws=True, skip_cm=True):
i, t = s.token_next(start, skip_ws=skip_ws, skip_cm=skip_cm)
while t is not None:
yield t
i, t = s.token_next(i, skip_ws=skip_ws, skip_cm=skip_cm)
def describe_statement(s):
"Describe an SQL statement. This utility function is used to summarize statements."
label = s.get_type()
li, lt = s.token_next(-1, skip_cm=True)
if lt is None:
return None
if lt and lt.ttype == sqlparse.tokens.DDL:
# DDL - build up!
parts = []
first = True
skipping = False
for t in _tokens(s, li):
if not first:
if isinstance(t, sqlparse.sql.Identifier) or isinstance(t, sqlparse.sql.Function):
parts.append(t.normalized)
break
elif t.ttype != sqlparse.tokens.Keyword:
break
first = False
if t.normalized == 'IF':
skipping = True
if not skipping:
parts.append(t.normalized)
label = label + ' ' + ' '.join(parts)
elif label == 'UNKNOWN':
ls = []
for t in _tokens(s):
if t.ttype == sqlparse.tokens.Keyword:
ls.append(t.normalized)
else:
break
if ls:
label = ' '.join(ls)
name = s.get_real_name()
if name:
label += f' {name}'
return label
def is_empty(s):
"check if an SQL statement is empty"
lt = s.token_first(skip_cm=True, skip_ws=True)
return lt is None
class ScriptChunk(NamedTuple):
"A single chunk of an SQL script."
label: str
allowed_errors: List[str]
src: str
use_transaction: bool = True
@property
def statements(self):
return [s for s in sqlparse.parse(self.src) if not is_empty(s)]
class SqlScript:
"""
Class for processing & executing SQL scripts with the following features ``psql``
does not have:
* Splitting the script into (named) steps, to commit chunks in transactions
* Recording metadata (currently just dependencies) for the script
* Allowing chunks to fail with specific errors
The last feature is to help with writing _idempotent_ scripts: by allowing a chunk
to fail with a known error (e.g. creating a constraint that already exists), you
can write a script that can run cleanly even if it has already been run.
Args:
file: the path to the SQL script to read.
"""
_sep_re = re.compile(r'^---\s*(?P<inst>.*)')
_icode_re = re.compile(r'#(?P<code>\w+)\s*(?P<args>.*\S)?\s*$')
chunks: List[ScriptChunk]
def __init__(self, file):
if hasattr(file, 'read'):
self._parse(peekable(file))
else:
with open(file, 'r', encoding='utf8') as f:
self._parse(peekable(f))
def _parse(self, lines):
self.chunks = []
self.deps, self.tables = self._parse_script_header(lines)
next_chunk = self._parse_chunk(lines, len(self.chunks) + 1)
while next_chunk is not None:
if next_chunk:
self.chunks.append(next_chunk)
next_chunk = self._parse_chunk(lines, len(self.chunks) + 1)
@classmethod
def _parse_script_header(cls, lines):
deps = []
tables = []
line = lines.peek(None)
while line is not None:
hm = cls._sep_re.match(line)
if hm is None:
break
inst = hm.group('inst')
cm = cls._icode_re.match(inst)
if cm is None:
next(lines) # eat line
continue
code = cm.group('code')
args = cm.group('args')
if code == 'dep':
deps.append(args)
next(lines) # eat line
elif code == 'table':
parts = args.split('.', 2)
if len(parts) > 1:
ns, tbl = parts
tables.append((ns, tbl))
else:
tables.append(('public', args))
next(lines) # eat line
else: # any other code, we're out of header
break
line = lines.peek(None)
return deps, tables
@classmethod
def _parse_chunk(cls, lines: peekable, n: int):
qlines = []
chunk = cls._read_header(lines)
qlines = cls._read_query(lines)
# end of file, do we have a chunk?
if qlines:
if chunk.label is None:
chunk = chunk._replace(label=f'Step {n}')
return chunk._replace(src='\n'.join(qlines))
elif qlines is not None:
return False # empty chunk
@classmethod
def _read_header(cls, lines: peekable):
label = None
errs = []
tx = True
line = lines.peek(None)
while line is not None:
hm = cls._sep_re.match(line)
if hm is None:
break
next(lines) # eat line
line = lines.peek(None)
inst = hm.group('inst')
cm = cls._icode_re.match(inst)
if cm is None:
continue
code = cm.group('code')
args = cm.group('args')
if code == 'step':
label = args
elif code == 'allow':
err = getattr(psycopg2.errorcodes, args.upper())
_log.debug('step allows error %s (%s)', args, err)
errs.append(err)
elif code == 'notx':
_log.debug('chunk will run outside a transaction')
tx = False
else:
_log.error('unrecognized query instruction %s', code)
raise ValueError(f'invalid query instruction {code}')
return ScriptChunk(label=label, allowed_errors=errs, src=None,
use_transaction=tx)
@classmethod
def _read_query(cls, lines: peekable):
qls = []
line = lines.peek(None)
while line is not None and not cls._sep_re.match(line):
qls.append(next(lines))
line = lines.peek(None)
# trim lines
while qls and not qls[0].strip():
qls.pop(0)
while qls and not qls[-1].strip():
qls.pop(-1)
if qls or line is not None:
return qls
else:
return None # end of file
def execute(self, dbc, transcript=None):
"""
Execute the SQL script.
Args:
dbc: the database connection.
transcript: a file to receive the run transcript.
"""
all_st = time.perf_counter()
for step in self.chunks:
start = time.perf_counter()
_log.info('Running ‘%s’', step.label)
if transcript is not None:
print('CHUNK', step.label, file=transcript)
if step.use_transaction:
with dbc, dbc.cursor() as cur:
self._run_step(step, dbc, cur, True, transcript)
else:
ac = dbc.autocommit
try:
dbc.autocommit = True
with dbc.cursor() as cur:
self._run_step(step, dbc, cur, False, transcript)
finally:
dbc.autocommit = ac
elapsed = time.perf_counter() - start
elapsed = timedelta(seconds=elapsed)
print('CHUNK ELAPSED', elapsed, file=transcript)
_log.info('Finished ‘%s’ in %s', step.label, compress_date(elapsed))
elapsed = time.perf_counter() - all_st
elasped = timedelta(seconds=elapsed)
_log.info('Script completed in %s', compress_date(elapsed))
def describe(self):
for dep in self.deps:
_log.info('Dependency ‘%s’', dep)
for step in self.chunks:
_log.info('Chunk ‘%s’', step.label)
for s in step.statements:
_log.info('Statement %s', describe_statement(s))
def _run_step(self, step, dbc, cur, commit, transcript):
try:
for sql in step.statements:
start = time.perf_counter()
_log.debug('Executing %s', describe_statement(sql))
_log.debug('Query: %s', sql)
if transcript is not None:
print('STMT', describe_statement(sql), file=transcript)
cur.execute(str(sql))
elapsed = time.perf_counter() - start
elapsed = timedelta(seconds=elapsed)
rows = cur.rowcount
if transcript is not None:
print('ELAPSED', elapsed, file=transcript)
if rows is not None and rows >= 0:
if transcript is not None:
print('ROWS', rows, file=transcript)
_log.info('finished %s in %s (%d rows)', describe_statement(sql),
compress_date(elapsed), rows)
else:
_log.info('finished %s in %s (%d rows)', describe_statement(sql),
compress_date(elapsed), rows)
if commit:
dbc.commit()
except psycopg2.Error as e:
if e.pgcode in step.allowed_errors:
_log.info('Failed with acceptable error %s (%s)',
e.pgcode, psycopg2.errorcodes.lookup(e.pgcode))
if transcript is not None:
print('ERROR', e.pgcode, psycopg2.errorcodes.lookup(e.pgcode), file=transcript)
else:
_log.error('Error in "%s" %s: %s: %s',
step.label, describe_statement(sql),
psycopg2.errorcodes.lookup(e.pgcode), e)
if e.pgerror:
_log.info('Query diagnostics:\n%s', e.pgerror)
raise e
class _LoadThread(threading.Thread):
"""
Thread worker for copying database results to a stream we can read.
"""
def __init__(self, dbc, query, dir='out'):
super().__init__()
self.database = dbc
self.query = query
rfd, wfd = os.pipe()
self.reader = os.fdopen(rfd)
self.writer = os.fdopen(wfd, 'w')
self.chan = self.writer if dir == 'out' else self.reader
def run(self):
with self.chan, self.database.cursor() as cur:
cur.copy_expert(self.query, self.chan)
def load_table(dbc, query, **kwargs):
"""
Load a query into a Pandas data frame.
This is substantially more efficient than Pandas ``read_sql``, because it directly
streams CSV data from the database instead of going through SQLAlchemy.
"""
cq = sql.SQL('COPY ({}) TO STDOUT WITH CSV HEADER')
q = sql.SQL(query)
thread = _LoadThread(dbc, cq.format(q))
thread.start()
data = pd.read_csv(thread.reader, **kwargs)
thread.join()
return data
def save_table(dbc, table, data: pd.DataFrame):
"""
Save a table from a Pandas data frame.
This is substantially more efficient than Pandas ``read_sql``, because it directly
streams CSV data from the database instead of going through SQLAlchemy.
"""
cq = sql.SQL('COPY {} FROM STDIN WITH CSV')
thread = _LoadThread(dbc, cq.format(table), 'in')
thread.start()
data.to_csv(thread.writer, header=False, index=False)
thread.writer.close()
thread.join()
|
import numpy as np
from scipy.optimize import linprog
import matplotlib.pyplot as plt
def nash_equilibrium(A):
#Поиск минимума матрицы А
min_value = abs(np.amin(A)) + 1
#Делаем матрицу положительной
A += min_value
#Подготовка столбцов ограничений для ЗЛП
z = np.ones(A.shape[0])
b_1 = -np.ones(A.shape[1])
w = -np.ones(A.shape[1])
b_2 = np.ones(A.shape[0])
#Проверка на равновесие по Нэшу
nash_i,nash_j = nash_equilibrium_point(A, min_value)
if(nash_i != 0):
p = np.zeros(A.shape[0])
p[nash_i - 1] = 1
q = np.zeros(A.shape[1])
q[nash_j - 1] = 1
v = A[nash_i - 1, nash_j - 1] - min_value
return p,q,v
#Поиск оптимального решения в смешанных стратегиях
# Игрок 1
res = linprog(w, A, b_2, options = {"disp": True})
q = res.get("x")
# Игрок 2
A = -np.transpose(A)
res = linprog(z, A, b_1, options = {"disp": True})
p = res.get("x")
#Расчет значения игры
v = 1 / np.sum(p)
p *= v
q *= v
v = v - min_value
print("p: ", p)
print("q: ", q)
print("v: ", v)
return p, q, v
def nash_equilibrium_point(A, min_value):
#Поиск равновесия по Нэшу
max_nash = np.zeros((len(A),len(A[0])))
for i, row in enumerate(A):
max_nash[i] = np.array(row == row.min())
for j, column in enumerate(A.T):
max_nash[:, j] += np.array(column == column.max())
sum = 0
print ("Ситуации равновесия по Нэшу")
for (i, j), value in np.ndenumerate(max_nash):
if value == 2:
sum += 1
nash_i = i + 1
nash_j = j + 1
print("Точка равновесия #", sum)
print("v: ", A[i, j] - min_value)
print("Стратегия первого: ", i + 1)
print("Стратегия второго: ", j + 1)
if(sum == 0):
print("Таких точек нет")
return 0, 0
elif(sum == 1):
return nash_i, nash_j
else:
print("Таких точек несколько")
return 0, 0
def draw(p, q):
x = max(len(p), len(q))
y = max(max(p), max(q))
plt.xlim([0, x + x / 4])
plt.ylim([0, y + y / 4])
for i, value in enumerate(p):
plt.scatter(1.0 * i + 1, value, color = 'blue')
plt.plot([1.0 * i + 1, 1.0 * i + 1], [0.0, value], color = 'blue')
plt.grid(True) # линии вспомогательной сетк
plt.show()
plt.xlim([0, x + x / 4])
plt.ylim([0, y + y / 4])
for i, value in enumerate(q):
plt.scatter(1.0 * i + 1, value, color = 'blue')
plt.plot([1.0 * i + 1, 1.0 * i + 1], [0.0, value], color = 'blue')
plt.grid(True) # линии вспомогательной сетк
plt.show()
print("Введите число строк в матрице А:")
n = int(input())
A = []
print("Введите матрицу А по строкам")
for i in range(n):
row = input().split()
for i in range(len(row)):
row[i] = float(row[i])
A.append(row)
A = np.array(A)
p,q,v = nash_equilibrium(A)
draw(p, q) |
class Solution:
def getFolderNames(self, names):
from collections import defaultdict
memo = defaultdict(int)
res = []
for n in names:
if memo[n] > 0:
while n+'('+ str(memo[n]) +')' in memo.keys():
memo[n]+=1
res.append(n+'('+ str(memo[n]) +')')
memo[n+'('+ str(memo[n])+')']+=1
else:
res.append(n)
memo[n]+=1
return res
if __name__== "__main__":
# Input:
names = ["kaido","kaido(1)","kaido","kaido(1)"]
# kaido -> kaido
# kaido(1) - > kaido(1)
# kaido -> kaido(1)-> kaido(2)
# kaido(1)(1) - > kaido(1)(1)
# memo = {"kaido":1, "kaido(1)":1} res = [kaido, kaido(1)]
# cur kaido(1), res = [kaido, kaido(1), kaido(2)]
# kaido(1)(1) res = [kaido, kaido(1), kaido(2), kaido(1)(1)]
sol = Solution()
print(sol.getFolderNames(names))
# Input:
names = ["pes","fifa","gta","pes(2019)"]
# Output: ["pes","fifa","gta","pes(2019)"]
sol = Solution()
print(sol.getFolderNames(names))
# Input:
names = ["gta","gta(1)","gta","avalon"]
Output: ["gta","gta(1)","gta(2)","avalon"]
sol = Solution()
print(sol.getFolderNames(names))
# Input:
names = ["onepiece","onepiece(1)","onepiece(2)","onepiece(3)","onepiece"]
# memo = {"onepiece":4, "onepiece(1)": 1, "onepiece(2)":1, "onepiece(3)":1 } res =["onepiece", "onepiece(1)", "onepiece(2)", "onepiece(3)" ]
# onepiece -> onepiece(1) - > onepiece(2)- > onepiece(3)- > onepiece(4)
# Output: ["onepiece","onepiece(1)","onepiece(2)","onepiece(3)","onepiece(4)"]
sol = Solution()
print(sol.getFolderNames(names))
# Input:
names = ["wano","wano","wano","wano"]
# Output: ["wano","wano(1)","wano(2)","wano(3)"]
sol = Solution()
print(sol.getFolderNames(names))
# Input:
names = ["kaido","kaido(1)","kaido","kaido(1)"]
sol = Solution()
print(sol.getFolderNames(names)) |
# #-*- coding: utf-8 -*-
# from __future__ import unicode_literals
# from django.db import models
# from django.contrib.sites.models import *
# from django.utils.translation import ugettext, ugettext_lazy as _
# from settings import MEDIA_ROOT
# from mezzanine.pages.models import Page
# from mezzanine.core.models import RichText
# from mezzanine.core.fields import RichTextField, FileField
# from mezzanine.utils.models import upload_to
# from mezzanine.utils.sites import current_site_id, current_request
# class Category(Page):
# order = models.IntegerField(default=0, verbose_name='ordre de priorité')
# illustration = FileField(verbose_name=_("Illustration"), upload_to=upload_to("MAIN.Category.illustration", "category"),format="Image", max_length=255, null=True, blank=True)
# logo_parrain = FileField(verbose_name=_("Logo Parrain"), upload_to=upload_to("MAIN.Category.logo_parrain", "logo_parrain"),format="Image", max_length=255, null=True, blank=True)
# presentation_parrain =RichTextField(_("Présentation parrain"),blank=True)
# def save(self, *args, **kwargs):
# self.in_menus = []
# try:
# self.parent = Page.objects.get(title='CATEGORIES')
# except:
# pass
# super(Category, self).save(*args, **kwargs)
# class Product(Page):
# """
# title = company name
# richText = description
# """
# cat_product = models.ForeignKey('Category',null=True,blank=True, verbose_name='category for product')
# baseline = models.CharField(max_length=255,blank=True)
# productName = models.CharField(max_length=255)
# presentation_product = RichTextField(_("Content"),blank=True)
# town = models.CharField(max_length=255,blank=True)
# price = models.CharField(max_length=255,blank=True)
# discount = models.CharField(max_length=255,blank=True)
# illustration = FileField(verbose_name=_("illustration principale du produit"),
# upload_to=upload_to("MAIN.Product.illustration", "illustration"),
# format="Image", max_length=255, null=True, blank=True)
# illustration2 = FileField(upload_to=upload_to("MAIN.Product.illustration", "illustration 2"),
# format="Image", max_length=255, null=True, blank=True)
# illustration3 = FileField(upload_to=upload_to("MAIN.Product.illustration", "illustration 3"),
# format="Image", max_length=255, null=True, blank=True)
# illustration4 = FileField(upload_to=upload_to("MAIN.Product.illustration", "illustration 4"),
# format="Image", max_length=255, null=True, blank=True)
# presentation_sup = RichTextField(_("Presentation Start-up"),blank=True)
# logo = FileField(verbose_name=_("logo"),
# upload_to=upload_to("MAIN.Product.illustration", "logo Start-Up"),
# format="Image", max_length=255, null=True, blank=True)
# team_pic = FileField(upload_to=upload_to("MAIN.Product.illustration", "photo Team "),format="Image", max_length=255, null=True, blank=True)
# mainLink = models.URLField(null=True,blank=False)
# website = models.URLField(null=True,blank=True)
# facebook = models.URLField(null=True,blank=True)
# twitter = models.URLField(null=True,blank=True)
# instagram = models.URLField(null=True,blank=True)
# class Meta:
# verbose_name='PRODUIT'
# ordering = ['title']
# def save(self, *args, **kwargs):
# self.in_menus = []
# try:
# self.parent = Page.objects.get(title='PRODUITS')
# except:
# pass
# super(Product, self).save(*args, **kwargs)
|
# IMPORTS
import datetime
import subprocess
import sys
import os
# import time
from datetime import datetime
from genDipoles import buildSphere
from numpy import *
# Init formatted output file
file_output = open('outputExcelParsed.txt', 'w')
# Execute command and yield output as it is received
def execute(cmd):
popen = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True)
for stdout_line in iter(popen.stdout.readline, ""):
yield stdout_line
popen.stdout.close()
return_code = popen.wait()
if return_code:
raise subprocess.CalledProcessError(return_code, cmd)
def gen_sphere(dpl, grid, x, y, z):
return buildSphere(dpl, grid, x, y, z)
def write_dipoles(dipoles, filename, new_file=True):
# Open file
f = open(filename, 'w+')
for d in dipoles:
f.write(str(int(d[0])) + ' ' + str(int(d[1])) + ' ' + str(int(d[2])) + '\n')
f.close()
# Run ADDA and grab output as it runs
def adda_run(monomers, grid='30', dplSize='10', filename='./output/runs/outputDipoleXYZ.txt', lmbda='0.55', mode='seq'):
dipoles = []
print(f'grid: {grid} \tdplSize: {dplSize} \tlmbda: {lmbda} mode: {mode}')
for m in monomers:
# print('monomer: {}'.format(m))
dipoles = list(set(dipoles + buildSphere(dplSize, grid, m[0], m[1], m[2])))
write_dipoles(dipoles, './output/runs/dipole_output_grid' + grid + '_dplsize' + dplSize + '.txt')
# Run ADDA
cmdRunADDA = ['code/adda/src/' + mode + '/adda', '-m', '1.85', '0.71', '-lambda', lmbda, '-shape', 'read', filename, '-dpl', dplSize, '-dir', 'output/runs/dipole_output_grid' + grid + '_dplsize' + dplSize]
s1 = ''
s2 = ''
t0 = -1
for output in execute(cmdRunADDA):
# if t0 == -1:
# t0 = time.time()
# Push output to console
print(output, end="")
# Run #
if ('all data is saved in' in output):
value = output[25:28]
s1 += value + '\t'
# Date + ADDA memory usage
if ('Total memory usage' in output):
value = output.split()[-2:]
s1 += datetime.now().strftime("%m/%d/%Y") + '\t'
s1 += value[0] + '\t'
# Num dipoles
if ('Total number of occupied dipoles' in output):
value = output.split()[-1:]
s1 += value[0] + '\t'
# ADDA light data
if ('Cext' in output or 'Qext' in output or 'Cabs' in output or 'Qabs' in output):
value = output.split()[-1:]
s2 += value[0] + '\t'
# Time to execute
# t1 = time.time()
s1 += '\t' # str(t1 - t0)[:9] + '\t'
# Write to file
file_output.write(s1 + '\t' + s2 + '\n')
def readExcelInput(filename='./input/inputExcelRuns.txt'):
# Read excel data input
f = open(filename)
try:
text = f.read()
finally:
f.close()
text = text.split()
dplSize = []
grid = []
for number in range(2,len(text),2):
dplSize.append(text[number])
grid.append(text[number+1])
return dplSize, grid
# Parse FracMAP output
def operate_shift(monomers, radius):
minx=radius
miny=radius
minz=radius
# finding minmum x y z
for m in monomers:
minx = min(minx, m[0])
miny = min(miny, m[1])
minz = min(minz, m[2])
# shifting
for m in monomers:
m[0] = round(m[0] - minx + radius)
m[1] = round(m[1] - miny + radius)
m[2] = round(m[2] - minz + radius)
return monomers
# Read fracmap output file
def read_fracmap(filename):
print('Reading fracmap output from file: {}'.format(filename))
f = open(filename)
try:
text = f.read()
except:
print('ERROR: Could not open fracmap output file')
return
finally:
f.close()
text = text.split('\n')
read_centers = False
monomers = []
monomer_radius = 0
for line in text:
if not read_centers and 'a:' in line:
monomer_radius = float(line.split()[1])
elif read_centers and len(line) == 0:
break
if read_centers:
x,y,z = line.split()
monomers.append([float(x),float(y),float(z)])
if line == 'X Y Z':
read_centers = True
return monomer_radius, monomers
# usage: 1 2 3 4 5
# <monomer_input_filepath> <dpl_size> <grid> <lambda> <mode>
# string double int int string
def main():
# Read input parameters
#dplSize, grid = readExcelInput()
dplSize = sys.argv[2]
grid = sys.argv[3]
lmbda = sys.argv[4]
mode = sys.argv[5]
print('dplSize: {}\ngrid: {}'.format(dplSize, grid))
# Read FracMAP input
monomers = []
monomer_radius = 0
#if len(sys.argv) > 1:
monomer_radius, monomers = read_fracmap(sys.argv[1])
# Shift monomers to lose any negative values
monomers = operate_shift(monomers, monomer_radius)
# Iterate over run parameters - TODO: find way to work this in with new flow
#for k in range(len(dplSize)):
adda_run(monomers,
grid,
dplSize,
'./output/runs/dipole_output_grid' + str(grid) + '_dplsize' + str(dplSize) + '.txt',
lmbda,
mode)
main()
|
#!/usr/bin/env
# coding: utf-8
# Open Issue: Class cant run as thread: https://github.com/r9y9/pylibfreenect2/issues/25
# Based on: https://github.com/r9y9/pylibfreenect2/blob/master/examples/multiframe_listener.py
LIBFREENECT2_LIBRARY_PATH = '/home/vigitia/freenect2/lib/libfreenect2.so'
import numpy as np
import cv2
import sys
import threading
from ctypes import *
lib = cdll.LoadLibrary(LIBFREENECT2_LIBRARY_PATH)
from pylibfreenect2 import Freenect2, SyncMultiFrameListener
from pylibfreenect2 import FrameType, Registration, Frame
from pylibfreenect2 import createConsoleLogger, setGlobalLogger
from pylibfreenect2 import LoggerLevel
try:
from pylibfreenect2 import OpenGLPacketPipeline
#pipeline = OpenGLPacketPipeline()
except:
try:
from pylibfreenect2 import OpenCLPacketPipeline
#pipeline = OpenCLPacketPipeline()
except:
from pylibfreenect2 import CpuPacketPipeline
#pipeline = CpuPacketPipeline()
FRAMES_TO_WAIT_WARM_UP = 100
DEBUG_MODE = True
class KinectV2:
need_color_depth_map = False
current_frame = 0
color_frame = None
depth_frame = None
ir_frame = None
registered_frame = None
bigdepth_frame = None
def __init__(self):
# Create and set logger
logger = createConsoleLogger(LoggerLevel.Debug)
setGlobalLogger(logger)
fn = Freenect2()
num_devices = fn.enumerateDevices()
if num_devices == 0:
print("No device connected!")
sys.exit(1)
serial = fn.getDeviceSerialNumber(0)
self.pipeline = OpenGLPacketPipeline()
self.device = fn.openDevice(serial, pipeline=self.pipeline)
self.listener = SyncMultiFrameListener(FrameType.Color | FrameType.Ir | FrameType.Depth)
# Register listeners
self.device.setColorFrameListener(self.listener)
self.device.setIrAndDepthFrameListener(self.listener)
self.device.start()
# NOTE: must be called after device.start()
self.registration = Registration(self.device.getIrCameraParams(),
self.device.getColorCameraParams())
self.undistorted = Frame(512, 424, 4)
self.registered = Frame(512, 424, 4)
self.bigdepth = Frame(1920, 1082, 4)
self.color_depth_map = np.zeros((424, 512), np.int32).ravel() if self.need_color_depth_map else None
#self.started = False
#self.read_lock = threading.Lock()
self.started = True
print('Finished init in kinect')
self.loop()
def get_listener(self):
return self.listener
def start(self):
if self.started:
print('Already running')
return None
else:
print('stared kinect thread')
self.started = True
self.thread = threading.Thread(target=self.loop, args=())
#self.thread.daemon = True
self.thread.start()
return self
def loop(self):
print('in kinect update')
while self.started:
self.current_frame += 1
print('Frame:', self.current_frame)
frames = self.listener.waitForNewFrame(1000)
print('Frames arrived')
color = frames["color"]
ir = frames["ir"]
depth = frames["depth"]
print('Distance measured at center: {} cm'.format(int(depth.asarray()[212][256] / 10)))
self.registration.apply(color, depth, self.undistorted, self.registered, bigdepth=self.bigdepth,
color_depth_map=self.color_depth_map)
#print(depth.asarray()[200, 200])
#print(self.bigdepth.asarray(np.float32)[200, 200])
# with self.read_lock:
# self.color_frame = color.asarray()
# self.depth_frame = depth.asarray() / 4500.
# self.ir_frame = ir.asarray() / 65535.
# self.registered_frame = self.registered.asarray(np.uint8)
# self.bigdepth_frame = self.bigdepth.asarray(np.float32)
if DEBUG_MODE:
cv2.imshow("kinectv2_ir.png", ir.asarray() / 65535.)
# cv2.imshow("kinectv2_depth.png", depth.asarray() / 4500.)
table_min_dist_mm = 0
table_max_dist_mm = 2000
depth_filtered = cv2.inRange(depth.asarray(), np.array([table_min_dist_mm], dtype="uint16"),
np.array([table_max_dist_mm], dtype="uint16"))
cv2.imshow("kinectv2_depth.png", depth_filtered)
cv2.imshow("kinectv2_color.png", color.asarray())
cv2.imshow("kinectv2_registered.png", self.registered.asarray(np.uint8))
cv2.imshow("kinectv2_bigdepth.png", self.bigdepth.asarray(np.float32))
if self.need_color_depth_map:
cv2.imshow("kinectv2_color_depth_map.png", self.color_depth_map)
key = cv2.waitKey(delay=1)
if key == ord('q'):
break
self.listener.release(frames)
self.device.stop()
self.device.close()
def get_frames(self):
with self.read_lock:
return self.color_frame, self.depth_frame, self.ir_frame, self.registered_frame, self.bigdepth_frame
def stop(self):
self.started = False
self.thread.join()
def __exit__(self, exec_type, exc_value, traceback):
self.pipeline.stop()
kinect = KinectV2()
kinect.start()
# if __name__ == '__main__':
# KinectV2()
|
# 什么叫变量
"""
变量就是一个存储数据的时候当前数据所在的内存地址的名字
num1 = 10
目的:为了快速找到数据
"""
# 定义变量
"""
变量名 = 值 (赋值,程序是先计算等号右边的数值,然后把值赋值给变量里)
有命名规则
:数字,字母,下划线组成
:不能数字开头
:不能使用内置关键字 共计33个关键字
严格区分大小写
:A != a
"""
# 命名习惯
"""
见名知意
大驼峰:MyName(首个英文字母大写)
小驼峰:myName(第二个含以后的单词首字母大写)
下划线: my_name
"""
# 使用变量
"""
1.定义变量 语法:变量名 = 值
2.使用变量
3.看变量的特点
"""
# 定义变量 存储数据汤姆
MyName = 'TOM'
print(MyName)
# 定义变量 存储黑马程序员
school_name = '黑马程序员'
print(school_name)
|
import pymysql
from util.myutil import release
class Dbutil:
def __init__(self,**kwargs):
# 获取数据库连接参数
# 建立与数据库的连接
host = kwargs.get('host','localhost')
port = kwargs.get('port',3306)
user = kwargs.get('user','root')
password = kwargs.get('password','123456')
database = kwargs.get('database','blog_db')
charset = kwargs.get('charset','utf8')
connection = pymysql.connect(host=host,
port=port,
user=user,
password=password,
database=database,
charset=charset)
if connection:
self.cursor = connection.cursor()
else:
raise Exception('数据库连接参数有误!')
def sucess(self,uname,upwd):
# 根据输入的用户名和密码
sql = 'select count(*) from tb_user WHERE user_name=%s and ' \
'user_password=%s'
params = (uname,upwd)
self.cursor.execute(sql,params)
result = self.cursor.fetchone()
# 判定用户是否成功
if result[0]:
return True
else:
return False
def saveuser(self,uname,upwd,city,avatar):
# 根据用户输入的信息 完成注册
sql = 'insert into tb_user' \
'(user_name, user_password, user_avatar, user_city)' \
'VALUES (%s,%s,%s,%s)'
params = (uname,upwd,avatar,city)
try:
self.cursor.execute(sql,params)
self.cursor.connection.commit()
except Exception as e:
err = str(e)
code = err.split(',')[0].split('(')[1]
r = 'error'
if code:
r = 'duplicate'
raise Exception(r)
def getblogs(self):
sql = '''
select user_name,blog_title,blog_content,user_avatar,tc,c
from (select comment_blog_id,count(*)c
from tb_comment
group by comment_blog_id)t3
right join(select user_name,user_avatar,blog_id,blog_title,blog_content,tc
from tb_user
join(select blog_id,blog_title,blog_content,tc,blog_user_id
from tb_blog
left join(select rel_blog_id, group_concat(tag_content)tc
from tb_tag
join( select rel_blog_id,rel_tag_id
from tb_blog_tag )t
on tag_id = rel_tag_id
group by rel_blog_id )t1
on blog_id = rel_blog_id)t2
on user_id = blog_user_id)t4
on comment_blog_id = blog_id;
'''
self.cursor.execute(sql)
list = self.cursor.fetchall()
L = []
blogs = []
for i in list:
L.append(i)
for j in L:
dict = {
'author': j[0],
'title': j[1],
'cotent': j[2],
'avatar': j[3],
'tags': j[4],
'count': j[5],
}
blogs.append(dict)
return blogs
def isexists(self,uname):
sql = 'select count(*) from tb_user WHERE user_name=%s'
params = (uname,)
self.cursor.execute(sql,params)
result = self.cursor.fetchone()
if result[0]:
# 用户表中已经存在
return True
else:
# 不存在
return False
# def judgepass(self,upwd):
# sql = ''
# pwd = release(upwd)
# params = (pwd,)
# self.cursor.execute(sql,params)
# result = self.cursor.fetchone()
# if result:
# return True
# else:
# return False
def judgeimg(self,uname):
sql = 'select user_avatar from tb_user WHERE user_name=%s'
params = (uname,)
self.cursor.execute(sql,params)
result = self.cursor.fetchone()
if result:
if result[0]:
return result[0]
else:
return 'default_avatar.png'
else:
return 'default_avatar.png' |
from astropy.io import fits
from astropy.wcs import WCS
from regions import write_ds9
from regions import PixCoord, LinePixelRegion
from multiprocessing import Pool
import matplotlib.pyplot as plt
from glob import glob
import numpy as np
import pandas as pd
import os
from pathlib import Path
def block_array(arr, nrows, ncols):
"""
Thank you unutbu @ stack overflow
"""
h, w = arr.shape
return (arr.reshape(h//nrows, nrows, -1, ncols)
.swapaxes(1,2)
.reshape(-1, nrows, ncols))
def plot_cut(cut, field_name, cut_idx):
title = "{}_{}".format(field_name, cut_idx)
plt.figure(figsize=(10,10))
plt.title(title)
plt.imshow(np.flipud(cut), cmap='gray', vmin=0, vmax=3.5)
ax = plt.gca()
ax.set_xticks(np.arange(0, 128, 32))
ax.set_yticks(np.arange(0, 128, 32))
plt.grid(color='r', linewidth='4')
plt.show()
def load_print_cut(filename):
echo = np.memmap(filename, dtype='float32', mode='r', shape=(16, 32, 32))
for idx, cut in enumerate(echo):
plt.subplot(4,4,idx+1)
plt.imshow(cut, cmap='gray', vmin=0, vmax=3.5)
plt.show()
def cut_echo(data, midpoint):
# 128 x 128 box around midpoint of region
size = 128
# Offset cuts, exact centering will split echo weirdly
offset = 16
# Make a bounding box
x1 = int(midpoint[0] - (size / 2))
x2 = int(midpoint[0] + (size / 2))
y1 = int(midpoint[1] - (size / 2))
y2 = int(midpoint[1] + (size / 2))
# Isolate echo
echo = data[x1 + offset:x2 + offset, y1 + offset:y2 + offset]
# Cut echo into 32x32 sub-arrays for training
cuts = block_array(echo, 32, 32)
boundary = (x1 + offset, x2 + offset, y1 + offset, y2 + offset)
return cuts, boundary
def write_cut_region(boundaries, write_path, wcs):
Y1, Y2, X1, X2 = boundaries
region_lines = []
# Draw x lines
for x_coord in np.arange(X1, X2+1, 32):
line = LinePixelRegion(start=PixCoord(x=x_coord, y=Y1), end=PixCoord(x=x_coord, y=Y2))
region_lines.append(line.to_sky(wcs))
# Draw y lines
for y_coord in np.arange(Y1, Y2+1, 32):
line = LinePixelRegion(start=PixCoord(x=X1, y=y_coord), end=PixCoord(x=X2, y=y_coord))
region_lines.append(line.to_sky(wcs))
region_file = "{}.reg".format(write_path)
write_ds9(region_lines, region_file)
def process_file(region_file, path, results_path):
regions = pd.read_csv(region_file, sep=' ', header=None, index_col=False,
names=['RA1', 'Dec1', 'RA2', 'Dec2', 'status'], skiprows=[0])
# Only evaluate files with echoes marked as definite hits
mask = regions['status'] == "definitely"
definite = regions.loc[mask]
if len(definite) == 0:
return 0, 0
# Get the field name and make a results sub-folder for that field
field = region_file.split("/")[3].split(".")[0]\
if not os.path.exists("{}/{}".format(results_path, field)):
os.mkdir("{}/{}".format(results_path, field))
# Get all subtractions for the field
subtractions = glob("{}/{}/sub*".format(path, field))
# Iterate through subtractions for individual FITS file
for idx_s, sub in enumerate(subtractions):
# Get data from fits file and a world coordinate system
hdul = fits.open(sub)
data = hdul[0].data
wcs = WCS(sub)
# Make a symbolic link to subtraction in results sub-dir for field
sub_name = "sub_{}.fits".format(idx_s)
os.link(sub, "{}/{}/{}".format(results_path, field, sub_name))
# Iterate through 'definite' hits as defined by REG file (each is marked by a line)
for idx_e, echo in definite.iterrows():
# Get the xy midpoint for the echo
x1, y1 = wcs.wcs_world2pix(echo['RA1'], echo['Dec1'], 0)
x2, y2 = wcs.wcs_world2pix(echo['RA2'], echo['Dec2'], 0)
midpoint = ((x1 + x2) / 2,
(y1 + y2) / 2)
# Cuts = x32 numpy arrays of cut up image
# Boundary = X1 X2 Y1 Y2 boundaries for image
cuts, boundary = cut_echo(data, midpoint)
# Write out boundary region
cut_path = "{}/{}/sub_{}_cut_{}".format(results_path, field, idx_s, idx_e)
write_cut_region(boundary, cut_path, wcs)
# Write out cut stack
cut_fname = "{}.dat".format(cut_path)
fp = np.memmap(cut_fname, dtype='float32', mode='w+', shape=cuts.shape)
fp[:] = cuts[:]
del fp
# Touch a file for labels
Path("{}.lab".format(cut_path))
return len(definite), len(subtractions)
if __name__ == "__main__":
path = "./echoes"
results_path = "{}/finds".format(path)
files = glob("{}/echo_regions/*.deg.reg".format(path))
echo_sum = 0
sub_sum = 0
total = len(files)
with Pool(processes=2) as pool:
results = [pool.apply_async(process_file, args=(region, path, results_path,)) for region in files]
for idx, r in enumerate(results):
sums = r.get()
n_echos, n_subs = sums
echo_sum += n_echos
sub_sum += n_subs
print('Completed processing %s of %s files' % (idx+1, total), end='\n')
print("Found {} echoes in {} subtractions!".format(echo_sum, sub_sum))
|
from django.http import Http404,HttpResponse, HttpResponseRedirect
from django.template.loader import get_template
from django.template import Context
from django.shortcuts import render_to_response
from django.views.decorators.csrf import csrf_exempt
from django.contrib.auth.decorators import login_required
import datetime
## Vista que redirecciona a la vista de admin o de usuario segun sea el caso
@login_required
def principal(request):
if request.user.is_staff:
return HttpResponseRedirect("/administrador/")
else:
return HttpResponseRedirect("/estudiante/")
## Vistas de la seccion de administrador
@login_required
def estudiante_base(request):
return render_to_response("estudiante/estudiante_base.html")
|
import my_module.util as tools
import numpy as np
_unitLoc = 'unit/'
def collect(location):
data = tools.floatParseCSVfile(location)
data = tools.transpose(data)
return data
class classHandler:
def __init__(self, fileName):
self.fileName = fileName
temp
|
from interfaces import IIndividual, IPopulation
from parameters import *
from genomes import *
from operator import attrgetter
from utils import rand_probability
import copy
class Movement:
def __init__(self, empty=False):
self.genes = []
if not empty:
for i in range(3):
self.genes.append(Genes.random())
def __str__(self):
return "Move(x={},y={},z={})".format(self.x(), self.y(), self.z())
def __repr__(self):
return str(self)
def x(self):
return self.genes[0].value
def y(self):
return self.genes[1].value
def z(self):
return self.genes[2].value
def mutate(self):
for gene in self.genes:
if rand_probability() <= mutation_rate:
gene.mutate()
def crossover(self, other):
move_a = Movement(empty=True)
move_b = Movement(empty=True)
for index in range(3):
a = self.genes[index]
b = other.genes[index]
gene_a, gene_b = a.crossover(b)
move_a.genes.append(gene_a)
move_b.genes.append(gene_b)
return move_a, move_b
class Individual(IIndividual):
def __init__(self, empty=False):
IIndividual.__init__(self)
if not empty:
self.moves = [Movement() for x in range(nb_move)]
self.fitness = 0.0
def __str__(self):
return "Individual(fitness={}, moves={})".format(self.fitness, ",".join([str(move) for move in self.moves]))
def __repr__(self):
return str(self)
def copy(self):
return copy.deepcopy(self)
def mutate(self):
for move in self.moves:
move.mutate()
def crossover(self, other):
indiv_a = Individual(empty=True)
indiv_b = Individual(empty=True)
size = max(len(self.moves), len(other.moves))
for index in range(size):
a = self.move_at(index)
b = other.move_at(index)
move_a, move_b = a.crossover(b)
# Todo: preserve size of parent
indiv_a.moves.append(move_a)
indiv_b.moves.append(move_b)
return [indiv_a, indiv_b]
def move_at(self, index):
return self.moves[index % len(self.moves)]
class Population(IPopulation):
def __init__(self, pop):
IPopulation.__init__(self)
self.size = pop
def init(self):
self.individuals = [Individual() for x in range(self.size)]
def fitnesses(self):
return [individual.fitness for individual in self.individuals]
def best(self):
return max(self.individuals, key=attrgetter('fitness')).copy()
|
# Module: sockets
# Date: 26th June 2006
# Author: James Mills, prologic at shortcircuit dot net dot au
"""Sockets Test Suite"""
import unittest
from time import sleep
from circuits import Component
from circuits.net.sockets import *
def wait():
sleep(0.1)
class Client(Component):
channel = "client"
def __init__(self):
super(Client, self).__init__()
self.connected = False
self.disconnected = False
self.data = ""
def connected(self, host, port):
self.connected = True
def disconnected(self):
self.disconnected = True
def read(self, data):
self.data = data
class Server(Component):
channel = "server"
def __init__(self):
super(Server, self).__init__()
self.connected = False
self.disconnected = False
self.data = ""
def connect(self, sock, host, port):
self.connected = True
self.push(Write(sock, "Ready"), "write")
def disconnect(self, sock):
self.disconnected = True
def read(self, sock, data):
self.data = data
class TestCloseEvent(unittest.TestCase):
"""Test Close Event"""
def runTest(self):
# Client Mode
e = Close()
self.assertFalse(e.args)
# Server Mode
e = Close(1)
self.assertEquals(e.args[0], 1)
class TestConnectEvent(unittest.TestCase):
"""Test Connect Event"""
def runTest(self):
# Client Mode (host, port, ssl=False)
e = Connect("localhost", 1234, ssl=True)
self.assertEquals(e[0], "localhost")
self.assertEquals(e[1], 1234)
self.assertEquals(e["ssl"], True)
e = Connect("localhost", 1234, ssl=False)
self.assertEquals(e[0], "localhost")
self.assertEquals(e[1], 1234)
self.assertEquals(e["ssl"], False)
# Server Mode (sock, host, port)
e = Connect(1, "localhost", 1234)
self.assertEquals(e[0], 1)
self.assertEquals(e[1], "localhost")
self.assertEquals(e[2], 1234)
class TestConnectedEvent(unittest.TestCase):
"""Test Connected Event"""
def runTest(self):
e = Connected("localhost", 1234)
self.assertEquals(e[0], "localhost")
self.assertEquals(e[1], 1234)
class TestDisconnectEvent(unittest.TestCase):
"""Test Disconnect Event"""
def runTest(self):
# Client Mode
e = Disconnect()
self.assertFalse(e.args)
# Server Mode
e = Disconnect(1)
self.assertEquals(e.args[0], 1)
class TestDisconnectedEvent(unittest.TestCase):
"""Test Disconnected Event"""
def runTest(self):
e = Disconnected()
self.assertFalse(e.args)
class TestErrorEvent(unittest.TestCase):
"""Test Error Event"""
def runTest(self):
# Client Mode
e = Error("error")
self.assertEquals(e[0], "error")
# Server Mode
e = Error(1, "error")
self.assertEquals(e[0], 1)
self.assertEquals(e[1], "error")
class TestReadEvent(unittest.TestCase):
"""Test Read Event"""
def runTest(self):
# Client Mode
e = Read("data")
self.assertEquals(e[0], "data")
# Server Mode
e = Read(1, "data")
self.assertEquals(e[0], 1)
self.assertEquals(e[1], "data")
class TestWriteEvent(unittest.TestCase):
"""Test Write Event"""
def runTest(self):
# Client Mode
e = Write("data")
self.assertEquals(e[0], "data")
# Server Mode
e = Write(1, "data")
self.assertEquals(e[0], 1)
self.assertEquals(e[1], "data")
class SocketsTestCase(unittest.TestCase):
def testTCPClientServer(self):
"""Test sockets.TCPClient and sockets.TCPServer
Test that communication between a TCPClient and
TCPServer work correctly.
"""
server = Server() + TCPServer(9999)
client = Client() + TCPClient()
server.start()
client.start()
try:
client.push(Connect("127.0.0.1", 9999), "connect")
wait()
self.assertTrue(client.connected)
self.assertTrue(server.connected)
self.assertTrue(client.data == "Ready")
client.push(Write("foo"), "write")
wait()
self.assertTrue(server.data == "foo")
client.push(Close(), "close")
wait()
self.assertTrue(client.disconnected)
self.assertTrue(server.disconnected)
finally:
server.stop()
client.stop()
def testUDPClientServer(self):
"""Test sockets.UDPClient and sockets.UDPServer
Test that communication between a UDPClient and
UDPServer work correctly.
"""
server = Server() + UDPServer(9999)
client = Client() + UDPClient(10000, channel="client")
server.start()
client.start()
try:
client.push(Write(("127.0.0.1", 9999), "foo"), "write")
wait()
self.assertTrue(server.data == "foo")
finally:
server.stop()
client.stop()
def suite():
return unittest.makeSuite(SocketsTestCase, "test")
if __name__ == "__main__":
unittest.main()
|
# --coding:utf-8--
from rest_framework import routers
from .user import UserAPIView
from .goods import GoodsAPIView
from .active import ActiveAPIView,ActiveGoodsAPIView
# 声明api路由
api_router = routers.DefaultRouter()
# 向api路由中注册ViewSet
api_router.register('users',UserAPIView)
api_router.register('goods',GoodsAPIView)
api_router.register('active',ActiveAPIView)
api_router.register('activegoods',ActiveGoodsAPIView)
|
#!/usr/bin/python3
def isNumber(string):
numbers = "0123456789"
ops = "-+*/"
num = 0
op = 0
char = 0
for ch in string:
if ch in numbers:
num += 1
elif ch in ops:
op += 1
elif ch == ".":
continue
else:
char +=1
if op == 0 and num >= 1 and char == 0:
return True
elif string[0] == "-" and num >= 1 and char == 0:
return True
elif op == 1 and num == 0 and char == 0:
return False
else:
return ValueError
def mathOperation(total, sign, number):
if sign == "+":
total += number
elif sign == "-":
total -= number
elif sign == "*":
total *= number
elif sign == "/":
total /= number
return total
def main():
running_total = 0
operation = 0
while True:
try:
user_input_1 = input("Введите число: ")
if user_input_1 == "":
user_input_exit = input("Вы уверены? (y/n): ")
if user_input_exit == "y":
exit()
else:
continue
elif isNumber(user_input_1):
running_total = float(user_input_1)
break
elif isNumber(user_input_1):
print("Нужно начать с числа!")
continue
except ValueError:
print("Проверьте корректность ввода")
while True:
try:
user_input_2 = input("Введите следующее действие или число: ")
if user_input_2 == "":
user_input_exit = input("Вы уверены? (y/n): ")
if user_input_exit == "y":
exit()
else:
continue
elif isNumber(user_input_2) and operation == 0:
print("Сначала введите действие!")
continue
elif isNumber(user_input_2) is False:
operation = user_input_2
continue
else:
user_input_2 = float(user_input_2)
running_total = mathOperation(running_total, operation,\
user_input_2)
except ValueError:
print("Проверьте корректность ввода")
continue
except ZeroDivisionError:
print("Деление на ноль!!!")
continue
print("")
# Если число дробное
if (running_total % 1) > 0:
print("Промежуточный итог: {:.2f}".format(running_total))
# Если целое
else:
print("Промежуточный итог: ", int(running_total))
# Решил оставить, хотя эффект не по задумке изначально: в силу
# записи символа операции в переменную, есть возможность не
# вводить каждый раз новый символ, а вести последовательный
# расчет суммы, к примеру
print("Текущий символ следующей операции: ", operation)
print("")
continue
main()
|
__author__ = 'mjohnpayne'
# Blast CI proteins from unnaligned contigs against Pm proteins
# if protein hits = duplicated protein
# if protein doesn't hit highly = new protein
from Bio.Blast import NCBIWWW
from Bio.Blast import NCBIXML
from Bio.Blast.Applications import NcbiblastpCommandline as blastp
from Bio import SeqIO
from Bio.Seq import Seq
import sys
import subprocess
pmdb = '/Users/mjohnpayne/Documents/PhD/wt_genome/pm_wt_dbs/all_pm_protein.fasta'
fungidb = '/Volumes/MP_HD/nr_fastas/All_fungi_refseq_prot_6-8-14.fasta'
##record = SeqIO.read("m_cold.fasta", format="fasta")
##result_handle = NCBIWWW.qblast("blastn", "nt", record.seq)
## first generate list of contigs in abacas bin
def extract_contigs(cont):
lis = []
for line in cont: lis.append(line.replace(' \n',''))
return lis
## second extract genes (prot seq) from those contigs from augustus output
def get_seqs(an,con):
## generate dictionary of contigs attached to list of gene id:seq pairs
an = an.readlines()
dic = {}
cont,gene,pseq = '','',''
for i in range(len(an)):
line = an[i]
ids = ''
if '\tgene\t' in line:
col = line.split('\t')
cont = col[0]
gene = col[8].strip('ID=').replace('\n','')
st,en = 0,0
if '= [' in line:
st = i
start = st
curr = an[start]
pseq = []
while 'end gene' not in curr:
pseq.append(an[start])
start += 1
curr = an[start]
pseq = ''.join(pseq)
pseq = pseq.replace('# ','').replace('protein sequence = [','').replace(']','').replace('\n','')
pseq = Seq(pseq)
if cont not in dic:
dic[cont] = [SeqIO.SeqRecord(pseq,gene)]
else:
dic[cont] += [SeqIO.SeqRecord(pseq,gene)]
newd = {}
for i in con:
if i in dic:
newd[i] = dic[i]
return newd
# if 'end gene' in line:
# en = i
# print st
# print en
# pseq = ''.join(an[st:en])
# pseq.replace('# ','').replace('protein sequence = [','').replace(']','').strip('\n')
# #print pseq
## third blast these genes against ncbi NR database
def blast_gene(seq,database):
tempfasta = open('temp.fasta','w')
SeqIO.write(seq,tempfasta,'fasta')
tempfasta.close()
run = blastp(query='temp.fasta',db=database,num_descriptions=5,num_threads=6,outfmt=5,out='temp.xml')
run()
result_handle = open('temp.xml')
result = NCBIXML.read(result_handle)
rets = []
for i in result.descriptions:
ttl = i.title
e = i.e
if 'Tfl|' in ttl:
species = 'T. flavus'
d = ttl[ttl.find('Tfl'):]
elif 'Pfu|' in ttl:
species = 'P. funiculosum'
d = ttl[ttl.find('Pfu'):]
elif 'PMAA_' in ttl:
species = 'T. marneffei'
d = ttl[ttl.find('PMAA'):]
else:
species = ttl[ttl.find('[')+1:ttl.find(']')]
d = ttl[ttl.find('| ')+1:ttl.find('[')-1]
rets.append(species)
rets.append(d)
rets.append(str(e))
return rets
## fourth parse blast output to determine if top hit is in Pm or another species
def main(contigs,annots,o):
cont = open(contigs,'r')
outfile = open(o,'w')
conts = extract_contigs(cont)
annot = open(annots,'r')
seqs = get_seqs(annot,conts)
for i in seqs:
print i
for j in seqs[i]:
print j.id
res = blast_gene(j,fungidb)
outfile.write(i + '\t' + j.id + '\t' + '\t'.join(res) + '\n')
outfile.close()
testseq = SeqIO.SeqRecord(Seq("MGMNINQILVESLTHLNYAFGYITPETYKIGVMPGVDASTFSDFTALKSKNSDLKTFITHLLAFMRHYGFDGVDFDWEYPGATDRQPNELNS"), id='g9865')
#print blast_gene(testseq)
testan = '/Volumes/MP_HD/CI_GENOME_SEQ/augustus_gene_finding(stats_for_fig)/velvet_assemblies_gff/012_vel_denovo.gff'
testcon = '/Volumes/MP_HD/CI_GENOME_SEQ/CI_denovo_assemblies(stats)/velvet_assemblies_scaf/abacas/012_vel_scaffolds.fasta_pmfa1_annot_scaf_concat.fasta.bin'
of = sys.argv[2][:-4] + '_unmapped_contig_blast.txt'
main(sys.argv[1],sys.argv[2],of)
|
"""
Django settings for shopping_junction project.
Generated by 'django-admin startproject' using Django 3.0.2.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# import environ
# env = environ.Env()
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '3mtje9=uo#b=kosbg$v^k0@=a8%#w8*(5z2zj3ch4a_!!fy1yg'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ["*"]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'graphene_django',
'social_django',
'corsheaders',
'rest_framework',
'graphql_auth',
# 'fcm_django',
'app',
]
JWT_VERIFY_EXPIRATION = False
CORS_ORIGIN_ALLOW_ALL = True
FCM_DJANGO_SETTINGS = {
"FCM_SERVER_KEY": "AAAAOxkCOyM:APA91bFm3HUcZAWHxwryNIR6mIZ0nxMdg8MS51GFW0xC5d0EQSNhr8AXI7gB6nN8Xaq5Ug4laTX6ycND8960a71EDuu5NDN5qHF4SutTWW4Dr9CEzUkWCVRwScTYeI4rek6B8fJLYGL7"
}
GRAPHENE = {
'SCHEMA': 'schema.schema', # Where your Graphene schema lives
'MIDDLEWARE': [
'graphql_jwt.middleware.JSONWebTokenMiddleware',
],
}
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
# 'shopping_junction.middleware.JWTMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'corsheaders.middleware.CorsMiddleware',
# 'django.middleware.common.CommonMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
# 'graphql_jwt.middleware.JSONWebTokenMiddleware',
# 'django.contrib.auth.middleware.AuthenticationMiddleware'
]
AUTHENTICATION_BACKENDS = [
# 'social_core.backends.open_id.OpenIdAuth',
# 'social_core.backends.google.GoogleOpenId',
'social_core.backends.google.GoogleOAuth2',
# 'social_core.backends.google.GoogleOAuth',
# 'social_core.backends.twitter.TwitterOAuth',
# 'social_core.backends.yahoo.YahooOpenId',
'graphql_jwt.backends.JSONWebTokenBackend',
'django.contrib.auth.backends.ModelBackend',
]
SOCIAL_AUTH_PIPELINE = [
# Get the information we can about the user and return it in a simple
# format to create the user instance later. On some cases the details are
# already part of the auth response from the provider, but sometimes this
# could hit a provider API.
'social_core.pipeline.social_auth.social_details',
# Get the social uid from whichever service we're authing thru. The uid is
# the unique identifier of the given user in the provider.
'social_core.pipeline.social_auth.social_uid',
# Verifies that the current auth process is valid within the current
# project, this is where emails and domains whitelists are applied (if
# defined).
'social_core.pipeline.social_auth.auth_allowed',
# Checks if the current social-account is already associated in the site.
'social_core.pipeline.social_auth.social_user',
# Make up a username for this person, appends a random string at the end if
# there's any collision.
'social_core.pipeline.user.get_username',
# Send a validation email to the user to verify its email address.
# Disabled by default.
# 'social_core.pipeline.mail.mail_validation',
# Associates the current social details with another user account with
# a similar email address. Disabled by default.
# 'social_core.pipeline.social_auth.associate_by_email',
# Create a user account if we haven't found one yet.
'social_core.pipeline.user.create_user',
# Create the record that associates the social account with the user.
'social_core.pipeline.social_auth.associate_user',
# Populate the extra_data field in the social record with the values
# specified by settings (and the default ones like access_token, etc).
'social_core.pipeline.social_auth.load_extra_data',
# Update the user record with any changed info from the auth service.
'social_core.pipeline.user.user_details',
]
# REST_FRAMEWORK = {
# 'DEFAULT_AUTHENTICATION_CLASSES': [
# 'rest_framework.authentication.BasicAuthentication',
# 'rest_framework.authentication.SessionAuthentication',
# ]
# }
# SOCIAL_AUTH_GOOGLE_OAUTH2_KEY =\
# env('SOCIAL_AUTH_GOOGLE_OAUTH2_KEY', default='')
# SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET =\
# env('SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET', default='')
# SOCIAL_AUTH_FACEBOOK_KEY = env('SOCIAL_AUTH_FACEBOOK_KEY', default='')
# SOCIAL_AUTH_FACEBOOK_SECRET = env('SOCIAL_AUTH_FACEBOOK_SECRET', default='')
# SOCIAL_AUTH_TWITTER_KEY = env('SOCIAL_AUTH_TWITTER_KEY', default='')
# SOCIAL_AUTH_TWITTER_SECRET = env('SOCIAL_AUTH_TWITTER_SECRET', default='')
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': [
'rest_framework.authentication.BasicAuthentication',
'rest_framework.authentication.SessionAuthentication',
]
}
ROOT_URLCONF = 'shopping_junction.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ["template"],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'shopping_junction.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
# '/var/www/static/',
]
STATIC_URL = '/static/'
# MEDIA_ROOT = ''
MEDIA_ROOT = os.path.join(BASE_DIR, 'media/')
MEDIA_URL = '/media/'
#eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VybmFtZSI6ImFkbWluIiwiZXhwIjoxNTkxMzQ1OTI1LCJvcmlnSWF0IjoxNTkxMzQ1NjI1fQ.3P0KOdDyoUOYJBgpRRE6wYKDoKkpHZcelS6XWZZr1P8
#eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VybmFtZSI6ImFkbWluIiwiZXhwIjoxNTkxNDIyMTM0LCJvcmlnSWF0IjoxNTkxNDIxODM0fQ.X9hH0Mn09QvCaY8lYPGDb3rEi8dGiQkBTQ6Q0f7AXyI |
m , n = map(int,input().split())
data = []
for i in range(m) :
dt= input()
if dt not in data :
data.append(dt)
if len(data) < 8 :
print("unsatisfactory")
else :
print("satisfactory")
|
import math
from typing import Any, List, Mapping, Type, Optional, Callable
import ray
from ray.rllib.core.rl_module.rl_module import RLModule, ModuleID
from ray.rllib.core.rl_trainer.rl_trainer import (
RLTrainer,
ParamOptimizerPairs,
Optimizer,
)
from ray.rllib.core.rl_trainer.tf.tf_rl_trainer import TfRLTrainer
from ray.rllib.core.rl_trainer.torch.torch_rl_trainer import TorchRLTrainer
from ray.rllib.policy.sample_batch import MultiAgentBatch
from ray.air.config import ScalingConfig
from ray.train._internal.backend_executor import BackendExecutor
class TrainerRunner:
"""Coordinator of RLTrainers.
Public API:
.update(batch) -> updates the RLModule based on gradient descent algos.
.additional_update() -> any additional non-gradient based updates will get
called from this entry point.
.get_state() -> returns the state of the RLModule and RLOptimizer from
all of the RLTrainers
.set_state() -> sets the state of all the RLTrainers
TODO(avnishn):
1. Add trainer runner with async operations
2. Use fault tolerant actor manager to handle failures
3. Add from_xxx constructor pattern. For example
add a `from_policy_map(self.local_worker().policy_map, cfg)`
constructor to make it easier to create a TrainerRunner from a
rollout worker.
"""
def __init__(
self,
trainer_class: Type[RLTrainer],
trainer_config: Mapping[str, Any],
compute_config: Mapping[str, Any],
):
# TODO: trainer_config and compute_config should become dataclasses.
# It's hard for the user to know what the trainer / compute parameters are
# expected.
self._trainer_config = trainer_config
self._compute_config = compute_config
# TODO: remove the _compute_necessary_resources and just use
# trainer_config["use_gpu"] and trainer_config["num_workers"]
resources = self._compute_necessary_resources()
scaling_config = ScalingConfig(
num_workers=resources["num_workers"],
use_gpu=resources["use_gpu"],
)
# the only part of this class that is framework agnostic:
if issubclass(trainer_class, TorchRLTrainer):
from ray.train.torch import TorchConfig
backend_config = TorchConfig()
elif issubclass(trainer_class, TfRLTrainer):
from ray.train.tensorflow import TensorflowConfig
backend_config = TensorflowConfig()
else:
raise ValueError("framework must be either torch or tf")
self.backend_executor = BackendExecutor(
backend_config=backend_config,
num_workers=scaling_config.num_workers,
num_cpus_per_worker=scaling_config.num_cpus_per_worker,
num_gpus_per_worker=scaling_config.num_gpus_per_worker,
max_retries=0, # TODO: make this configurable in trainer_config
# with default 0
)
# TODO: let's not pass this into the config which will cause
# information leakage into the SARLTrainer about other workers.
scaling_config = {"world_size": resources["num_workers"]}
trainer_config["scaling_config"] = scaling_config
trainer_config["distributed"] = bool(self._compute_config["num_gpus"] > 1)
self.backend_executor.start(
train_cls=trainer_class, train_cls_kwargs=trainer_config
)
self.workers = [w.actor for w in self.backend_executor.worker_group.workers]
ray.get([w.build.remote() for w in self.workers])
def _compute_necessary_resources(self):
num_gpus = self._compute_config.get("num_gpus", 0)
num_workers = self._compute_config.get("num_training_workers", 0)
if num_workers and num_gpus:
assert num_workers == num_gpus, (
"If num_training_workers and "
"num_gpus are specified it must be equal to num_gpus"
)
elif num_gpus and not num_workers:
num_workers = num_gpus
elif not num_gpus and not num_workers:
num_workers = 1
return {"num_workers": num_workers, "use_gpu": bool(num_gpus)}
def update(self, batch: MultiAgentBatch = None, **kwargs):
"""
Example:
>>> trainer_runner.update(batch) # updates the gradient
"""
refs = []
global_size = len(self.workers)
batch_size = math.ceil(len(batch) / global_size)
for i, worker in enumerate(self.workers):
batch_to_send = {}
for pid, sub_batch in batch.policy_batches.items():
batch_size = math.ceil(len(sub_batch) / global_size)
start = batch_size * i
end = min(start + batch_size, len(sub_batch))
batch_to_send[pid] = sub_batch[int(start) : int(end)]
new_batch = MultiAgentBatch(batch_to_send, int(batch_size))
refs.append(worker.update.remote(new_batch))
return ray.get(refs)
def additional_update(self, *args, **kwargs) -> List[Mapping[str, Any]]:
"""Apply additional non-gradient based updates to the RLTrainers.
For example, this could be used to do a polyak averaging update
of a target network in off policy algorithms like SAC or DQN.
By default this is a pass through that calls `RLTrainer.additional_update`
Args:
*args: Arguments to pass to each RLTrainer.
**kwargs: Keyword arguments to pass to each RLTrainer.
Returns:
A list of dictionaries of results from the updates from each worker.
"""
refs = []
for worker in self.workers:
refs.append(worker.additional_update.remote(*args, **kwargs))
return ray.get(refs)
def add_module(
self,
*,
module_id: ModuleID,
module_cls: Type[RLModule],
module_kwargs: Mapping[str, Any],
set_optimizer_fn: Optional[Callable[[RLModule], ParamOptimizerPairs]] = None,
optimizer_cls: Optional[Type[Optimizer]] = None,
) -> None:
"""Add a module to the RLTrainers maintained by this TrainerRunner.
Args:
module_id: The id of the module to add.
module_cls: The module class to add.
module_kwargs: The config for the module.
set_optimizer_fn: A function that takes in the module and returns a list of
(param, optimizer) pairs. Each element in the tuple describes a
parameter group that share the same optimizer object, if None, the
default optimizer (obtained from the exiting optimizer dictionary) will
be used.
optimizer_cls: The optimizer class to use. If None, the set_optimizer_fn
should be provided.
"""
refs = []
for worker in self.workers:
ref = worker.add_module.remote(
module_id=module_id,
module_cls=module_cls,
module_kwargs=module_kwargs,
set_optimizer_fn=set_optimizer_fn,
optimizer_cls=optimizer_cls,
)
refs.append(ref)
ray.get(refs)
def remove_module(self, module_id: ModuleID) -> None:
"""Remove a module from the RLTrainers maintained by this TrainerRunner.
Args:
module_id: The id of the module to remove.
"""
refs = []
for worker in self.workers:
ref = worker.remove_module.remote(module_id)
refs.append(ref)
ray.get(refs)
def get_state(self) -> List[Mapping[ModuleID, Mapping[str, Any]]]:
"""Get the states of the RLTrainers"""
refs = []
for worker in self.workers:
refs.append(worker.get_state.remote())
return ray.get(refs)
def set_state(self, state: List[Mapping[ModuleID, Mapping[str, Any]]]):
"""Sets the states of the RLTrainers.
Args:
state: The state of the RLTrainers
"""
refs = []
for worker in self.workers:
refs.append(worker.set_state.remote(state))
return ray.get(refs)
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index),
url(r'^register$', views.register),
url(r'^login$', views.login),
url(r'^books$', views.books),
url(r'^logout$', views.logout),
url(r'^books/add$', views.booksadd),
url(r'^books/(?P<book_id>\d+)$', views.bookview),
url(r'process$', views.process)
]
|
from GO4BehaviouralPatterns.ChainOfResponsibility.CoinHandlerBase import CoinHandlerBase
class FiveCentHandler(CoinHandlerBase):
def __init__(self):
pass
def handle_coin(self, coin):
if coin.get_weight() == 5 and coin.get_diameter() == 5:
print ("Captured 5 Cent")
elif self._successor is not None:
self._successor.handle_coin(coin)
|
#-*- coding:utf-8 _*-
"""
--------------------------------------------------------------------
@function:
@time: 2018-01-29
author:baoquan3
@version:
@modify:
--------------------------------------------------------------------
"""
import sys
import hashlib
import redis
from Mapper1 import UserContribute
import datetime
import time
defaultencoding = 'utf-8'
if sys.getdefaultencoding() != defaultencoding:
reload(sys)
sys.setdefaultencoding(defaultencoding)
def check():
a = "2010-10-01 00:00:28"
timeArray = time.strptime(a, "%Y-%m-%d %H:%M:%S")
timestamp = time.mktime(timeArray)
print a , " --> ", int(timestamp)
print len(str(time))
uc = UserContribute()
t1 = "4186776154039524"
t2 = "4189977154059524"
print len(t1)
date1 , date2 = uc.getTimeById(t1), uc.getTimeById(t2)
print date1
print date2
inteval = uc.getDateInterval(date1, date2)
print inteval
if __name__ == "__main__":
check()
|
class Solution(object):
def permuteUnique(self, nums):
"""
:type nums: List[int]
:rtype: List[List[int]]
"""
newnums = sorted(nums)
if len(newnums) == 1:
return [newnums]
else:
alllist = []
for i in range(len(newnums)):
if i>0 and newnums[i]==newnums[i-1]:
continue
copy = newnums[:]
del copy[i]
alllist += [[newnums[i]] + j for j in Solution.permuteUnique(self,copy)]
return alllist
test = Solution()
print(test.permuteUnique([1,1,4])) |
from ucloud.core import auth
def test_verify_ac():
d = {
"Action": "CreateUHostInstance",
"CPU": 2,
"ChargeType": "Month",
"DiskSpace": 10,
"ImageId": "f43736e1-65a5-4bea-ad2e-8a46e18883c2",
"LoginMode": "Password",
"Memory": 2048,
"Name": "Host01",
"Password": "VUNsb3VkLmNu",
"PublicKey": "ucloudsomeone@example.com1296235120854146120",
"Quantity": 1,
"Region": "cn-bj2",
"Zone": "cn-bj2-04",
}
cred = auth.Credential(
"ucloudsomeone@example.com1296235120854146120",
"46f09bb9fab4f12dfc160dae12273d5332b5debe",
)
assert cred.verify_ac(d) == "4f9ef5df2abab2c6fccd1e9515cb7e2df8c6bb65"
assert cred.to_dict() == {
"public_key": "ucloudsomeone@example.com1296235120854146120",
"private_key": "46f09bb9fab4f12dfc160dae12273d5332b5debe",
}
|
from django.contrib import admin
from .models import Loyal
from .models import Offer
from .models import Domain
from .models import Staff
# Register your models here.
class LoyalAdmin(admin.ModelAdmin):
list_display=['Name','Contact','Email','Last']
class OfferAdmin(admin.ModelAdmin):
list_display=['Category','Name','Img','Valid']
class DomainAdmin(admin.ModelAdmin):
list_display=['name','dom_id']
class StaffAdmin(admin.ModelAdmin):
list_display=['s','name','gender']
admin.site.register(Offer,OfferAdmin)
admin.site.register(Loyal,LoyalAdmin)
admin.site.register(Domain,DomainAdmin)
admin.site.register(Staff,StaffAdmin)
|
# 2020-8-28
# 脑袋犹如静止,思维停滞
class Solution:
def maxCoins(self, piles):
piles.sort()
ret = 0
first_and_second = piles[len(piles) // 3: ]
for i in range(0, len(first_and_second), 2):
ret += first_and_second[i]
return ret
# s = Solution()
# piles = [2,4,1,2,7,8]
# print(s.maxCoins(piles))
# 122478
# 2020-9-10
class Solution2:
def maxCoins(self, piles):
piles.sort()
ret = 0
for i in range(len(piles)//3, len(piles), 2):
ret += piles[i]
return ret
piles = [2,4,1,2,7,8]
s = Solution2()
s.maxCoins(piles)
|
'''问题1
编写一个程序,查找所有可以被7整除但不是5的倍数的数字。在2000到3200之间(均包括在内)。所获得的数字应以逗号分隔的顺序打印在一行上。
提示:
考虑使用范围(#begin,#end)方法'''
# list1 = []
# for i in range(2000,3201):
# if i%7 ==0 and i%5 !=0:
# list1.append(i)
# for i in list1:
# print(i,end=',')
'''问题2
编写一个程序,可以计算给定数字的阶乘。结果应以逗号分隔的顺序打印在一行上。
假设将以下输入提供给程序:8 然后,输出应为:40320
提示:
如果将输入数据提供给问题,则应假定它是控制台输入。'''
# while True:
# a = int(input("请输入一个数:"))
# b = 1
# if a ==0:
# print (0)
# else:
# for i in range(1,a+1):
# b=b*i
# print(b)
'''问题3
使用给定的整数n,编写程序以生成包含(i,i * i)的字典,该字典为1到n之间的整数(都包括在内)。然后程序应打印字典。
假设将以下输入提供给程序:8
然后,输出应为:{1:1、2:4、3:9、4:16、5:25、6:36、7:49、8:64}
提示:
如果将输入数据提供给问题,则应假定它是控制台输入。
考虑使用dict()'''
# n = int(input("请输入一个数字:"))
# dict_a = {}
# for i in range (1,n+1):
# dict_a[i] = i*i
# print(dict_a)
'''
问题4
编写一个程序,该程序从控制台接受一个逗号分隔的数字序列,并生成一个列表和一个包含每个数字的元组。
假设将以下输入提供给程序:34,67,55,33,12,98
然后,输出应为:
['34','67','55','33','12','98']
(“ 34”,“ 67”,“ 55”,“ 33”,“ 12”,“ 98”)
提示:
如果将输入数据提供给问题,则应假定它是控制台输入。
tuple()方法可以将列表转换为元组
'''
a = input("请输入6个数字,每个数字以空格为分隔符:")
l = a.split()
t = tuple(l)
print(l)
print(t)
'''
问题5
定义一个至少具有两个方法的类:getString:从控制台输入获取字符串printString:以大写形式输出字符串。还请包括简单的测试功能来测试类方法。
提示:
使用__init__方法构造一些参数
''' |
import sys
import re
import cPickle as pickle
from utils import timeit
import glob
import ntpath
import pymongo
from urlunshort import resolve
import csv
import ast
from utils import timeout, TimeoutError, timeit, unshorten_url
import requests
import logging
"""
expand each url
see that it has domain in orgs
What tweets have domain name in [orgs]?
make sure to cache links that have been seens
In electome db:
reuters
huffingtonpost
mcclatchy
buzzfeed
politico
propublica
wsj
nytimes
npr
cnn
ap [ap.org]
foxnews
latimes
washingtonpost
"""
logging.basicConfig(filename='expand_url.log',level=logging.DEBUG)
def check_substr(substring_list, string):
return any(substring in string for substring in substring_list)
# @timeout(2)
# def expand(url):
# r = requests.head(url)
# if r.status_code / 100 == 3:
# expanded_url = r.headers['Location']
# else:
# expanded_url = r.url
# return expanded_url
def unshort_and_check(_id, list_of_urls):
DOMAINS = ['reuters', 'huffingtonpost', 'mcclatchy',
'buzzfeed','politico', 'propublica',
'wsj', 'nytimes', 'npr', 'cnn', 'ap.org',
'foxnews', 'latimes', 'washingtonpost']
urls = ast.literal_eval(list_of_urls)
ls = []
for url in urls:
try:
unshorten_url(url)
except TimeoutError:
logging.warning("TIMED OUT: " + _id + " " + url)
except:
#logging.warning("OTHER ERROR: " + _id + " " + url)
break
if check_substr(DOMAINS, url):
ls.append(url)
return ls
@timeit
def expand_urls(inpath, outpath):
""" In path is csv of _id, [urls]
"""
with open(inpath, 'rb') as f:
reader = csv.reader(f)
with open(outpath, 'w') as out:
writer = csv.writer(out)
for row in reader:
urls = unshort_and_check(row[0], row[1])
if urls:
writer.writerow([row[0], urls])
def main(in_dir, out_dir):
# split in half for Catbook and Shannon
files = ['../DATA/LINKS_2016_ONLY/url_tweets_17_links.csv',
'../DATA/LINKS_2016_ONLY/url_tweets_18_links.csv',
'../DATA/LINKS_2016_ONLY/url_tweets_19_links.csv',
'../DATA/LINKS_2016_ONLY/url_tweets_1_links.csv',
'../DATA/LINKS_2016_ONLY/url_tweets_20_links.csv',
'../DATA/LINKS_2016_ONLY/url_tweets_21_links.csv',
'../DATA/LINKS_2016_ONLY/url_tweets_22_links.csv',
'../DATA/LINKS_2016_ONLY/url_tweets_23_links.csv',
'../DATA/LINKS_2016_ONLY/url_tweets_24_links.csv']
#for p in glob.glob(in_dir + '/*'):
for p in files:
print p
#get_links_and_pickle(p, out_dir + ntpath.basename(p).split('.')[0] + '_links.pkl')
expand_urls(p, out_dir + ntpath.basename(p).split('.')[0] + '_newslinks.csv')
#glob.glob('/Users/Cat/thesis-extend/DATA/PICKLES/*')
# Usage: python expand_urls.py ../DATA/LINKS_2016_ONLY/ ../DATA/NEWS_ONLY_LINKS/
if __name__ == "__main__": main(sys.argv[1], sys.argv[2])
|
'''
Created on May 15, 2017
@author: Nate
'''
import numpy as np
import matplotlib.pyplot as plt
import random
def barOf(list):
return sum(list) / len(list)
def sampleStandardDeviation(list):
sum = 0
xBar = barOf(list)
for i in list:
sum += (i - xBar) ** 2
return np.sqrt(sum / (len(list) - 1))
def pickInitialCentroids(xs, ys, k):
clusters = []
x = xs
y = ys
randomNums = random.sample(range(0, len(x)), k)
for n in range(0, k):
clusters.append([x[randomNums[n]], y[randomNums[n]]])
return clusters
def calculateDistances(centroids, x, y, k): # works
distanceFromCentroids = [[] for _ in range(len(x))]
# print('begin calc distances')
# calculate the distance from centroids and the datapoints
for n in range(0, len(x)):
for i in range(0, k):
distanceFromCentroids[n].append(np.sqrt(np.power(centroids[i][0] - x[n], 2) + np.power(centroids[i][0] - y[n], 2)))
return distanceFromCentroids
def assignPoints(xData, yData, k, distanceFromCentroids): # works
# print('begin assign points')
# assign points to closest centroid
clusterID = []
clusterNum = -1
for n in range(0, len(distanceFromCentroids)):
least = float('inf')
for i in range(0, len(distanceFromCentroids[n])):
if distanceFromCentroids[n][i] < least:
least = distanceFromCentroids[n][i]
clusterNum = i
clusterID.append(clusterNum)
# print(clusterID)
return clusterID
def recalculateCentroids(xData, yData, clusterID, k): # works
# print('begin recalc centroids')
centroids = []
for n in range(0, k):
sumX = 0.0
sumY = 0.0
num = 0
for i in range(0, len(xData)):
if clusterID[i] == n:
sumX += xData[i]
sumY += yData[i]
num += 1
print("sumX: {0} sumY: {1} number: {2}".format(sumX, sumY, num))
if num == 0:
point = random.randint(0, len(xData) - 1)
centroids.append([xData[point], yData[point]])
else:
centroids.append([sumX / num, sumY / num])
# print(newCentroids)
return centroids
def mergeCentroids(xData, yData, clusterID, centroids):
badClusters = []
ssd = (sampleStandardDeviation(xData) + sampleStandardDeviation(yData)) / 2
print(ssd)
for n in range(0, len(centroids) - 1):
for i in range(n, len(centroids[0])):
if np.sqrt((centroids[n][0] - centroids[i][0]) ** 2 + (centroids[n][1] - centroids[i][1]) ** 2) < ssd:
badClusters.append([n])
badClusters.append([i])
for n in range(0, len(xData)):
for clusterNum in range(0, len(badClusters)):
if clusterID[n] == badClusters[clusterNum] or clusterID[n] == badClusters[clusterNum]:
clusterID[n] = badClusters[0]
return clusterID
def shouldStop(old, new, iterations):
if iterations >= 1000:
return True
return old == new
def main():
colors = ['b', 'g', 'r', 'c', 'm', 'y', 'k', 'w']
xData = (1, 1, 2, 2, 7, 8, 7, 9, 10, 15, 16, 15, 17, 0, 1, 1, 0, 1)
yData = (0, 1, 2, 1, 9, 7, 8, 9, 8, 15, 16, 16, 15, 10, 12, 10, 11, 12)
# necessary variables
k = 4
lastRun = []
i = 0
print("Number of points= {}".format(len(xData)))
newCentroids = pickInitialCentroids(xData, yData, k)
print("centroids: {}".format(newCentroids))
while not shouldStop(lastRun, newCentroids, i):
i += 1
lastRun = newCentroids
print("Run {}:".format(i))
distances = calculateDistances(newCentroids, xData, yData, k)
print("distances: {}".format(distances))
clusterids = assignPoints(xData, yData, k, distances)
#clusterids = mergeCentroids(xData, yData, clusterids, newCentroids)
print("clusterids: {}".format(clusterids))
newCentroids = recalculateCentroids(xData, yData, clusterids, k)
print("newCentroids: {}".format(newCentroids))
for n in range(0, len(newCentroids)):
plt.scatter(newCentroids[n][0], newCentroids[n][1], c=colors[i % len(colors)])
for n in range(0, len(clusterids)):
print("point: [{},{}] id: {} centroid location: {} distances: {}".format(xData[n], yData[n], clusterids[n], newCentroids[clusterids[n]], distances[n]))
plt.scatter(xData, yData, marker='+')
plt.axis([-1, 20, -1, 20])
plt.show() # data works, two distinct clusters
main()
|
# Generated by Django 2.2 on 2019-04-17 12:59
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('users', '0005_auto_20190417_1459'),
('requets', '0008_auto_20190417_1408'),
]
operations = [
migrations.DeleteModel(
name='Company',
),
migrations.DeleteModel(
name='Personne',
),
migrations.AlterField(
model_name='requet',
name='pub_date',
field=models.DateTimeField(default=datetime.datetime(2019, 4, 17, 12, 59, 26, 528982, tzinfo=utc)),
),
]
|
import numpy as np
import pandas as pd
import random
df_sampleDB = pd.DataFrame(columns=['name', 'birth', 'sex', 'district', 'grade', 'regist', 'lastlogin', 'logincnt', 'cartcnt', 'ordercnt', 'orderqty', 'orderprice', 'couponcnt', 'couponused', 'refund'])
# 데이터 프레임 생성
Lastname, Firstname = '김이박최정강조윤장임한오서신권황안송전홍', '시림서환원태예우재빈석진이정준선연율강하사랑소찬건한리훈람유승성수아양현채안윤후은희린주혜인가도호민온세름다나영결완지동규혁'
# 랜덤 이름 생성을 위한 셋팅 (출처: https://koreanname.me/)
birth_random = pd.date_range(start='19800101', end='20011231')
# 20세부터 40세까지로 나이 제한
list_district = ['도봉구', '강북구', '노원구', '은평구', '종로구', '성북구', '중랑구', '동대문구', '서대문구', '중구', '성동구', '광진구', '강동구', '마포구', '용산구', '강서구', '양천구', '영등포구', '구로구', '동작구', '금천구', '관악구', '서초구', '강남구','송파구','강동구']
# 서울시 내 구단위로 거주지역 제한
regist_random = pd.date_range(start='20180101', end='20191231')
# 서비스 런칭 시점 18년 1월 1일로 설정
list_refundpcnt = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1]
# 구매제품 환불확률 5%로 설정 (구매 건수 당으로 설정, not each 구매 제품))
for i in range(0,1):
dt_name = random.choice(Lastname) + random.choice(Firstname) + random.choice(Firstname)
dt_birth = random.choice(birth_random.strftime("%Y%m%d").tolist())
dt_sex = random.sample(['male', 'female'], 1)[0]
dt_district = random.sample(list_district, 1)[0]
dt_grade = random.randrange(1,4,1)
dt_regist = random.choice(regist_random.strftime("%Y%m%d").tolist())
lastlogin_random = pd.date_range(start=dt_regist, end='20191231')
dt_lastlogin = random.choice(regist_random.strftime("%Y%m%d").tolist())
dt_logincnt = (2**(dt_grade-1)) + random.randrange(0, 31, 1)
dt_cartcnt = (3**(dt_grade-1)) + random.randrange(0, 11, 1)
dt_ordercnt = random.randrange(0,int(dt_cartcnt/2)+1)+int(2**random.randrange(dt_grade-2,dt_grade+2)) + dt_grade-1
dt_orderqty = sum(int(random.sample([1, 1, 1, 1, 1, 1, 2, 2, 2, 3], 1)[0]) for qty in range(0,dt_ordercnt))
dt_orderprice = sum([random.randrange(5000, 35001, 5000) for odcnt in range(0, dt_ordercnt)])
dt_couponcnt = sum([random.randint(0,1) for cpn in range(0,int(dt_logincnt/3))])
dt_couponused = dt_couponcnt - sum([random.randint(0,1) for cpn in range(0, dt_couponcnt)])
dt_refund = sum([int(random.sample(list_refundpcnt, 1)[0]) for rfd in range(0,dt_ordercnt)])
df_sampleDB.loc[i] = {'name': dt_name,
'birth': dt_birth,
'sex': dt_sex,
'district': dt_district,
'grade': dt_grade,
'regist': dt_regist,
'lastlogin': dt_lastlogin,
'logincnt': dt_logincnt,
'cartcnt': dt_cartcnt,
'ordercnt': dt_ordercnt,
'orderqty': dt_orderqty,
'orderprice': dt_orderprice,
'couponcnt': dt_couponcnt,
'couponused': dt_couponused,
'refund': dt_refund
}
|
import torch
from torch.autograd import Variable
from .certificate import Certificate
def optimize_isotropic_dds(
model: torch.nn.Module, batch: torch.Tensor,
certificate: Certificate, learning_rate: float,
sig_0: torch.Tensor, iterations: int, samples: int,
device: str = 'cuda:0'
) -> torch.Tensor:
"""Optimize smoothing parameters for a batch.
Args:
model (torch.nn.Module): trained network
batch (torch.Tensor): inputs to certify around
certificate (Certificate): instance of desired certification object
learning_rate (float): optimization learning rate for ANCER
sig_0 (torch.Tensor): initialization value per input in batch
iterations (int): number of iterations to run the optimization
samples (int): number of samples per input and iteration
device (str, optional): device on which to perform the computations
Returns:
torch.Tensor: optimized isotropic thetas
"""
batch_size = batch.shape[0]
sig = Variable(sig_0, requires_grad=True).view(batch_size, 1, 1, 1)
for param in model.parameters():
param.requires_grad_(False)
# Reshaping so for n > 1
new_shape = [batch_size * samples]
new_shape.extend(batch[0].shape)
new_batch = batch.repeat((1, samples, 1, 1)).view(new_shape)
for _ in range(iterations):
sigma_repeated = sig.repeat((1, samples, 1, 1)).view(-1, 1, 1, 1)
eps = certificate.sample_noise(
new_batch, sigma_repeated) # Reparamitrization trick
out = model(new_batch + eps).reshape(
batch_size, samples, - 1).mean(1) # This is \psi in the algorithm
vals, _ = torch.topk(out, 2)
radius = certificate.compute_radius_estimate(vals, sig.reshape(-1))
grad = torch.autograd.grad(radius.sum(), sig)
sig.data += learning_rate*grad[0] # Gradient Ascent step
# For training purposes after getting the sigma
for param in model.parameters():
param.requires_grad_(True)
return sig.reshape(-1)
|
import os
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn import datasets as ds
import tensorflow as tf
import Utils as myutils
def load_dataset(verbose=False):
data = ds.load_boston()
if(verbose):
for key in data:
print('{0} : {1}'.format(key, type(data[key])))
features = data['data']
target = data['target']
feature_names = data['feature_names']
print('Loaded : features.shape = {0}, lables.shape = {1}'.format(features.shape, target.shape))
if(verbose):
print('features = {0}'.format(feature_names))
print('description = {0}'.format(data['DESCR']))
# reduce features to simplify debug
used_feature_idx = [5, 7, 8]
# reduce sample count to simplify debug
used_sample_count = 5
feature_names = feature_names[used_feature_idx]
features = features[:used_sample_count, used_feature_idx]
target = target[:used_sample_count]
print('Used features : {0}'.format(feature_names))
print('Used : features.shape = {0}, target.shape = {1}'.format(features.shape, target.shape))
return features, target, feature_names
def main():
myutils.start_banner()
[features, target, _] = load_dataset()
x_val = [
[1.], [2.], [3.], [4.]
]
y_val = [
[0.], [-1.], [-2.], [-3.]
]
x = tf.constant(x_val, dtype=tf.float32)
y_actual = tf.constant(y_val, dtype=tf.float32)
lin_model = tf.layers.Dense(units=1)
y_pred = lin_model(x)
loss = tf.losses.mean_squared_error(labels=y_actual, predictions=y_pred)
optimizer = tf.train.GradientDescentOptimizer(0.1)
train = optimizer.minimize(loss)
loss_history = []
training_iterations = range(200)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
for i in training_iterations:
_, loss_val = sess.run((train, loss))
loss_history.append(loss_val)
y_pred_val = sess.run(y_pred)
plt.plot(training_iterations, loss_history)
plt.xlabel('interations')
plt.ylabel('error')
plt.grid(True)
plt.show()
plt.plot(x_val, y_val, 'x')
plt.plot(x_val, y_pred_val)
plt.xlabel('x')
plt.ylabel('y')
plt.grid(True)
plt.show()
for i in range(3):
print(y_val[i], y_pred_val[i])
print(sess.run(tf.trainable_variables()))
myutils.dump_graph(tf.get_default_graph())
myutils.end_banner()
main() |
from shinra_error import ShinraError
def get_title(html: str) -> str:
st = html.find("<title>") + len("<title>")
ed = html.find("</title>")
if st == -1 or ed == -1:
raise ShinraError("Title Not found")
text = html[st:ed]
ext = text.find(" - Wikipedia Dump")
if ext != -1:
return text[:ext]
else:
return text
|
import json
from functools import wraps
from django.utils.safestring import mark_safe
from django.http import HttpResponse, HttpResponseNotAllowed, Http404
from django.http import HttpResponseForbidden, HttpResponseBadRequest
def ajax_view(only_POST=False, only_authenticated=False, **jsonargs):
"""
Decorator that does the following for an ajax view
- checks if the request is an ajax request
- if only_POST is set to True then validates
if its a POST request
- if only_authenticated is set to True then validates
if the user is authenticated
- wraps the view response to json, only pass a dictionary
in the view
"""
def decorator(f):
@wraps(f)
def _ajax_view(request, *args, **kwargs):
# check for ajax request
if not request.is_ajax() and not settings.DEBUG:
return HttpResponseForbidden(
mark_safe(_403_ERROR % 'Request must be set via AJAX.'))
# check for POST request
if only_POST:
if request.method != 'POST' and request.method != 'REQUEST':
return HttpResponseNotAllowed(
mark_safe(_405_ERROR % (
'Request mehtod must be POST or REQUEST.')))
# check if the user is authenticated or not
if only_authenticated:
user = request.user
# check if the user is authenticated
if not user.is_authenticated():
return HttpResponseForbidden(
mark_safe(_403_ERROR % 'User must be authenticated!'))
# get the result
result = f(request, *args, **kwargs)
# if there is no result then send AJAX_ERROR
# in the response
if not result:
result = { 'status': AJAX_ERROR }
# determine the intendation of json
indent = jsonargs.pop('indent', 4)
# Dump as json
return HttpResponse(json.dumps(result, indent=indent, **jsonargs))
return _ajax_view
return decorator |
import serial
import cv2
import numpy as np
from .commander import Commander
from .processor import Processor
def nothing(x):
pass
class App(object):
def __init__(self, port='/dev/cu.HC-05-DevB', baud=9600, cam=0):
self.commander = Commander(port, baud)
print('Commander initialized.')
self.camera = cv2.VideoCapture(cam)
print('Camera initialized')
self.processer = Processor()
def adjust_Cam(self):
print('Adjust camera position. Enter q to continue.')
while True:
ret, frame = self.camera.read()
if ret:
cv2.imshow("Test", frame)
if cv2.waitKey(20) & 0xFF == ord('q'):
break
cv2.destroyAllWindows()
print('Camera position confirmed.')
def configure(self):
self.adjust_Cam()
self.processer.set_boundary(self.camera)
print('done!')
def update(self):
pass
def run(self):
try:
self.configure()
cv2.namedWindow('webcam')
cv2.namedWindow('persp')
cv2.namedWindow('thresh')
cv2.namedWindow('thin')
cv2.namedWindow('corner')
cv2.createTrackbar('th', 'thresh', 120,255, nothing)
ret, frame = self.camera.read()
while True:
th = cv2.getTrackbarPos('th', 'thresh')
# perspective tranform
warp = self.processer.perspectiveTransform(frame)
warp = cv2.cvtColor(warp, cv2.COLOR_BGR2GRAY)
warp = cv2.GaussianBlur(warp, (5,5), 0)
cv2.imshow('persp', warp)
# thresh
thresh = self.processer.thresh(warp, th)
cv2.imshow('thresh', thresh)
#thinning
thinned = self.processer.thin(thresh)
cv2.imshow('thin', thinned)
# # #
corner = self.processer.corner(thinned, 25, thresh)
cv2.imshow('corner', corner)
k = cv2.waitKey(20) & 0xFF
if k == ord('q'):
break
cv2.destroyAllWindows()
self.processer.process_corners()
self.processer.work(self.camera, self.commander)
self.camera.release()
print('END.')
except Exception, e:
print e
finally:
self.commander.disconnect()
|
items = ['LOL', 'AOV', 'Guitar']
print(items)
items.append('PUBG')
print(items)
i = 0
replacing_item = 'DXD'
items[i] = replacing_item
print(items) |
from ..structures import Dataset2
from .. import formats
from .. import utils
###############################################################################
class BBMRI2(Dataset2):
"""
The BBMRI complete genomics sequencing set.
Provides functionality to query the genetic variants in the study.
objects:
vcf_*, where * is a chromosome identifier, 1-22,M,X,Y
phenotypes: A description of phenotypes for each individual in the study
ids: An ID mapping for individuals in the study
percentiles: Data on the percentiles of ageing for the individuals in the study
Example usage:
---------------
lls = biu.db.BBMRI2()
v = lls.filter(15, 5001234, 5002345)
"""
versions = { "current":
{ "chrs" : [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12",
"13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "M", "X", "Y" ] }
}
version = None
vcf = None
def __init__(self, version=list(versions.keys())[0], *pargs, **kwargs):
"""
Initialize the objet.
See parameters for biu.structures.Dataset2 for details about *pargs and **kwargs
"""
super(BBMRI2, self).__init__("BBMRI/%s" % version, *pargs, **kwargs)
self.version = version
for chrom in self.versions[self.version]["chrs"]:
vcf = "vcf_%s.vcf" % chrom
tbi = "vcf_%s.vcf.tbi" % chrom
vcf_file = utils.Acquire2("/exports/molepi/BBMRISEQ/tbx/merged.bbmri.chr%s.vcf.bgz" % chrom)
tbi_file = utils.Acquire2("/exports/molepi/BBMRISEQ/tbx/merged.bbmri.chr%s.vcf.bgz.tbi" % chrom)
self._obj.add_file(vcf, vcf_file, finalize=False)
self._obj.add_file(tbi, tbi_file, finalize=False)
self._obj.register("vcf_%s" % chrom, [vcf, tbi], lambda f, vcf=vcf: formats.VCF2(f[vcf], tabix=True))
#efor
#edef
def filter(self, chrom, start, end, *pargs, **kwargs):
"""
Perform a filter for several regions
parameters:
-----------
chrom: String|int. Chromosome of interest
start: int. Start of region of interest
end: int. End of region of interest
*pargs, **kwargs: See additional arguments for VCF2.filter
Returns: VCF2 object
"""
oname = "vcf_%s" % str(chrom)
if oname not in self._obj:
raise AttributeError("Could not find chromosome '%s'" % chrom)
#fi
return self._obj[oname].filter(chrom, start, end, *pargs, **kwargs)
#edef
def filter_regions(self, regions, chrom=None, start=None, end=None, *pargs, **kwargs):
"""
Perform a filter for several regions
parameters:
-----------
regions: A list of 3-tuples (chrom, start, end) for each region of interest
chrom, start, end: Ignored
*pargs, **kwargs: See additional arguments for VCF2.filter
Returns: VCF2 object
"""
rets = [ self.filter(c, s, e, *pargs, **kwargs) for (c,s,e) in regions ]
return rets[0].merge(rets)
#edef
def get_var(self, chrom, *pargs, **kwargs):
"""
Get the variant record for a specific variant
parameters:
-----------
chrom: str|int. Which chromosome the variant is on
pos: int. What position the variant is on.
ref: str. What is the reference allele?
alt: str. What is the alternative allele?
returns:
--------
A cyvcf2.VCF.Variant object if the variant exists. Otherwise None
"""
oname = "vcf_%s" % str(chrom)
if oname not in self._obj:
raise AttributeError("Could not find chromosome '%s'" % chrom)
#fi
return self._obj[oname].get_var(chrom, *pargs, **kwargs)
#edef
def who_has(self, chrom, *pargs, **kwargs):
"""
Determine who has a specific variant
parameters:
-----------
chrom: str|int. Which chromosome the variant is on
pos: int. What position the variant is on.
ref: str. What is the reference allele?
alt: str. What is the alternative allele?
returns:
--------
List of sample IDs for who has the variant.
"""
oname = "vcf_%s" % str(chrom)
if oname not in self._obj:
raise AttributeError("Could not find chromosome '%s'" % chrom)
#fi
return self._obj[oname].who_has(chrom, *pargs, **kwargs)
#edef
#eclass
|
import time
import schedule
from plyer import notification
from src.notifier import Notifier
from src.utils.configurer import config
def job(notifier: Notifier, system_notifier: notification, delay: int = 2):
"""
Job to check if a delivery slot gets available for the default selected address in your bigbasket website.
@param system_notifier: notification - To notify users (cross-platform) via balloon tiles.
@param notifier: Notifier - Notifier class - To monitor bigbasket website.
@param delay: int - Just a preventive measure to not make too many requests at the same time.
"""
notifier.visit_main_page()
time.sleep(delay)
addr_id = notifier.visit_cart_page_and_get_address_id()
time.sleep(delay)
status, resp = notifier.check_if_delivery_slot_available(addr_id)
if status:
system_notifier.notify(
title='BigBasket Notifier',
message='A free delivery slot is found for your address',
app_name='bigbasket-notifier'
)
if __name__ == "__main__":
n = Notifier(
config.get_configuration('phone_number', "APP"),
config.get_configuration('session_pickle_filename', "SYSTEM"),
load_session=True
)
schedule.every(
int(config.get_configuration("interval", "APP"))
).minutes.do(job, n, notification)
while True:
schedule.run_pending()
time.sleep(1)
|
#!/usr/bin/python3
import argparse
import sys
import yaml
import textwrap
from string import Template
def parse_arguments():
argParse = argparse.ArgumentParser()
argParse.add_argument("-s", "--speed", help="REQUIRED: The speed of your weapon", type=int, required=True)
argParse.add_argument("-i", "--input", help="OPTIONAL: The YML file containing your spells. Defaults to 'spells.yml'", default="spells.yml")
argParse.add_argument("-o", "--output", help="OPTIONAL: The output file containing the Arcane Strike maneuvers. \
Defaults to 'arcane_strike.out'", default="arcane_strike.out")
argParse.add_argument("-r", "--readable", help="OPTIONAL: Create a file for importing into the maneuver calculator", default=False, action="store_true")
args = argParse.parse_args()
return args
arguments = parse_arguments()
weaponSpeed = arguments.speed
humanReadable = arguments.readable
with open(arguments.input, 'r') as inputFile:
spell_data = inputFile.read()
# parse the yml file here
spells = yaml.full_load(spell_data)
with open(arguments.output, 'w') as outputFile:
for spell in spells.items():
maneuver_name = "Arcane Strike(%s)" % spell[0]
maneuver_speed = max(weaponSpeed, spell[1]['Speed'])
recovery = min(weaponSpeed, spell[1]['Speed'])
strain = spell[1]['Strain']
description = spell[1]['Description']
ctn = spell[1]['CTN']
defense = spell[1]['Defense']
# Need to implement manevuer output file later
header_data = Template("$maneuver_name\n Speed(Recovery, Strain): $maneuver_speed($recovery, $strain)\n\
CTN: $ctn\n Defense: $defense\n").safe_substitute(maneuver_name=maneuver_name, \
maneuver_speed=maneuver_speed, recovery=recovery, strain=strain, ctn=ctn, defense=defense)
description_data = textwrap.wrap(description, 50, break_long_words=False)
outputFile.write(header_data)
for lines in description_data:
outputFile.write(" ")
outputFile.write(lines)
outputFile.write("\n")
outputFile.write("\n\n")
|
from src.messages import errors
class Error(Exception):
def __str__(self):
if Exception.__str__(self) == '':
return self.__class__.__name__
return Exception.__str__(self)
class BadRequest(Error):
pass
class Unauthorized(Error):
pass
class Forbidden(Error):
pass
class NotFound(Error):
pass
class MethodNotAllowed(Error):
pass
class PersistenceError(Error):
pass
class NotUniqueError(Error):
pass
class EmailAlreadyExists(Error):
def __init__(self, email, *args, **kwargs):
Error.__init__(
self,
"Email '{}' already exists".format(email),
*args,
**kwargs
)
class InvalidCredentials(Error):
pass
|
"""Tornado Webserver staff.
This module based on Tornado whicn is a Python web framework
and asynchronous networking library.
"""
import os
import tornado.ioloop
import tornado.web
import tornado.wsgi
import utils
from crawler import Admin, Crawler
# This tells tornado where to find the template files
settings = dict(
template_path=os.path.join(os.path.dirname(__file__), 'templates'),
debug=True
)
class IndexPageHandler(tornado.web.RequestHandler):
""" Class responsible for handle index page requests. """
def get(self):
""" Refelects index page. """
self.render("index.html")
def post(self):
""" Returns wrapped to html json top100 words data. """
uri = self.get_argument('url')
if utils.uri_validator(uri) and uri:
words = Crawler(uri).counters
self.render("top100.html", items=words)
else:
self.write('Wrong url format. \
Tips: Should start from schema http(s)://abc.abc')
class AdminHandler(tornado.web.RequestHandler):
""" Class responsible for handle admin page requests. """
def get(self):
""" Returns wrapped to html json all words data. """
words_and_counters = Admin().get_data()
self.render("admin.html", items=words_and_counters)
def make_app():
""" Make app func. """
return tornado.web.Application([
(r"/", IndexPageHandler),
(r"/admin", AdminHandler),
], **settings)
application = tornado.wsgi.WSGIAdapter(make_app())
|
#while문을 사용해 1부터 1000까지의 자연수 중 3의 배수의 합을 구해 보자.
# result = 0
# i = 1
# while i <= 1000:
# if i % 3 == 0:
# result += i
# i += 1
# print(result)
#while문을 사용하여 다음과 같이 별(*)을 표시하는 프로그램을 작성해 보자.
# *
# **
# ***
# ****
# *****
# i = 0
# while True:
# i += 1
# if i > 5: break
# print("*" * i)
#for문을 사용해 1부터 100까지의 숫자를 출력해 보자.
# for i in range(1,101):
# print(i)
#for문을 사용하여 A 학급의 평균 점수를 구해 보자.
# A = [70, 60, 55, 75, 95, 90, 80, 80, 85, 100]
# total = 0
#
# for score in A:
# total += score #A학급 점수 모두 더하기
#
# average = total / len(A)
# print(average)
#리스트 중에서 홀수에만 2를 곱하여 저장하는 다음과 같은 코드가 있다.
#
# numbers = [1, 2, 3, 4, 5]
#
# result = []
# for n in numbers:
# if n % 2 == 1:
# result.append(n*2)
#위 코드를 리스트 내포(list comprehension)를 사용하여 표현해 보자.
# numbers = [1, 2, 3, 4, 5]
#
# result = [n*2 for n in numbers if n%2 ==1]
# print(result)
|
import os
import glob
from chwall.utils import get_logger
import gettext
# Uncomment the following line during development.
# Please, be cautious to NOT commit the following line uncommented.
# gettext.bindtextdomain("chwall", "./locale")
gettext.textdomain("chwall")
_ = gettext.gettext
logger = get_logger(__name__)
def fetch_pictures(config):
conf = config.get("local", {})
paths = conf.get("paths", [])
include_fav = conf.get("favorites", True)
fav_dir = config["general"]["favorites_path"]
try:
if os.path.exists(fav_dir) and include_fav:
paths.insert(0, fav_dir)
except PermissionError as e:
logger.error(e)
if len(paths) == 0:
return {}
pictures = {}
for path in paths:
path = os.path.expanduser(path)
try:
for ext in ["jpg", "jpeg", "png"]:
glob_path = "{}/*.{}".format(path, ext)
for f in glob.iglob(glob_path, recursive=True):
pictures[f] = {
"image": f,
"type": "local",
"url": f,
"copyright": _("Local wallpaper")
}
except PermissionError as e:
logger.error(e)
return pictures
def preferences():
return {
"name": _("Local files"),
"options": {
"paths": {
"widget": "list",
"label": _("Wallpaper repositories")
},
"favorites": {
"label": _("Include favorites wallpapers"),
"widget": "toggle",
"default": True
}
}
}
|
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
# JeroR.java
# Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
import jarray
import sys
import weka.classifiers.Classifier as Classifier
import weka.classifiers.Evaluation as Evaluation
import weka.core.Capabilities as Capabilities
import weka.core.Capabilities.Capability as Capability
import weka.core.Instance as Instance
import weka.core.Instances as Instances
import weka.core.JythonSerializableObject as JythonSerializableObject
import weka.core.Utils as Utils
class JeroR (Classifier, JythonSerializableObject):
"""
JeroR is a Jython implementation of the Weka classifier ZeroR
'author' -- FracPete (fracpete at waikato dot ac dot nz)
'version' -- $Revision: 5628 $
"""
# the documentation can be generated with HappyDoc:
# http://happydoc.sourceforge.net/
# Example command:
# happydoc --title Weka -d ./doc ./src
# the chosen class value
__ClassValue = Instance.missingValue()
# the class attribute
__Class = None
# the counts for each class label
__Counts = None
def listOptions(self):
"""
Returns an enumeration describing the available options.
Return:
an enumeration of all the available options.
"""
return Classifier.listOptions(self)
def setOptions(self, options):
"""
Parses a given list of options.
Parameter(s):
'options' -- the list of options as an array of strings
"""
Classifier.setOptions(self, options)
return
def getOptions(self):
"""
Gets the current settings of the Classifier as string array.
Return:
an array of strings suitable for passing to setOptions
"""
return Classifier.getOptions(self)
def getCapabilities(self):
"""
returns the capabilities of this classifier
Return:
the capabilities of this classifier
"""
result = Classifier.getCapabilities(self)
# attributes
result.enable(Capability.NOMINAL_ATTRIBUTES)
result.enable(Capability.NUMERIC_ATTRIBUTES)
result.enable(Capability.DATE_ATTRIBUTES)
result.enable(Capability.STRING_ATTRIBUTES)
result.enable(Capability.RELATIONAL_ATTRIBUTES)
result.enable(Capability.MISSING_VALUES)
# class
result.enable(Capability.NOMINAL_CLASS)
result.enable(Capability.NUMERIC_CLASS)
result.enable(Capability.DATE_CLASS)
result.enable(Capability.MISSING_CLASS_VALUES)
# instances
result.setMinimumNumberInstances(0)
return result
def buildClassifier(self, instances):
"""
builds the ZeroR classifier with the given data
Parameter(s):
'instances' -- the data to build the classifier from
"""
self.getCapabilities().testWithFail(instances)
# remove instances with missing class
instances = Instances(instances)
instances.deleteWithMissingClass()
sumOfWeights = 0
self.__Class = instances.classAttribute()
self.__ClassValue = 0
self.__Counts = None
if (instances.classAttribute().isNumeric()):
self.__Counts = None
elif (instances.classAttribute().isNominal()):
self.__Counts = jarray.zeros(instances.numClasses(), 'd')
for i in range(len(self.__Counts)):
self.__Counts[i] = 1
sumOfWeights = instances.numClasses()
enu = instances.enumerateInstances()
while (enu.hasMoreElements()):
instance = enu.nextElement()
if (not instance.classIsMissing()):
if (instances.classAttribute().isNominal()):
self.__Counts[int(instance.classValue())] += instance.weight()
else:
self.__ClassValue += instance.weight() * instance.classValue()
sumOfWeights += instance.weight()
if (instances.classAttribute().isNumeric()):
if (Utils.gr(sumOfWeights, 0)):
self.__ClassValue /= sumOfWeights
else:
self.__ClassValue = Utils.maxIndex(self.__Counts)
Utils.normalize(self.__Counts, sumOfWeights)
return
def classifyInstance(self, instance):
"""
returns the prediction for the given instance
Parameter(s):
'instance' -- the instance to predict the class value for
Return:
the prediction for the given instance
"""
return self.__ClassValue
def distributionForInstance(self, instance):
"""
returns the class distribution for the given instance
Parameter(s):
'instance' -- the instance to calculate the class distribution for
Return:
the class distribution for the given instance
"""
result = None
if (self.__Counts == None):
result = jarray.zeros(1, 'd')
result[0] = self.__ClassValue
else:
result = self.__Counts[:]
return result
def toString(self):
"""
Prints a string representation of the classifier
Return:
string representation of the classifier
"""
if (self.__Class == None):
return "JeroR: No model built yet."
if (self.__Counts == None):
return "JeroR predicts class value: " + str(self.__ClassValue)
else:
return "JeroR predicts class value: " + str(self.__Class.value(int(self.__ClassValue)))
# simulating the Java "main" method
if __name__ == "__main__":
# need to set the following jython registory value:
# python.security.respectJavaAccessibility=false
#
# Links:
# - Python registry
# http://www.jython.org/docs/registry.html
# - Accessing Java protected static members:
# http://www.jython.org/cgi-bin/faqw.py?req=all#3.5
Classifier.runClassifier(JeroR(), sys.argv[1:])
|
#coding:utf-8
import threading
import time
'''在add(), remove()方法中添加获取和释放锁是为了体现rlock与lock的区别,在这里省去也可以'''
class Box(object):
lock = threading.RLock()
def __init__(self):
self.total_items = 0
def execute(self, n):
Box.lock.acquire()
self.total_items += n
Box.lock.release()
def add(self):
Box.lock.acquire()
self.execute(1)
Box.lock.release()
def remove(self):
Box.lock.acquire()
self.execute(-1)
Box.lock.release()
def adder(box, items):
while items > 0:
# print('adding 1 item in the box')
box.add()
# time.sleep(2)
items -= 1
def remover(box, items):
while items > 0 :
# print('removing 1 item in the box')
box.remove()
# time.sleep(2)
items -= 1
items = 1000000
box = Box()
t1 = threading.Thread(target=adder, args=(box, items))
t2 = threading.Thread(target=remover, args=(box, items))
t1.start()
t2.start()
t1.join()
t2.join()
print('%s items still remain in the box' % box.total_items)
|
import clr
clr.AddReference('RevitAPI')
from Autodesk.Revit.DB import *
clr.AddReference("RevitNodes")
import Revit
clr.ImportExtensions(Revit.Elements)
clr.ImportExtensions(Revit.GeometryConversion)
objinstances = UnwrapElement(IN[0])
vectorlist = list()
for item in objinstances:
try:
vectorlist.append(item.FacingOrientation.ToVector())
except:
vectorlist.append(list())
OUT = vectorlist |
from django.contrib.auth.models import AbstractUser
from django.db import models
from django.utils.timezone import now
class UserModel(AbstractUser):
email = models.EmailField(verbose_name='邮箱', blank=True, null=True, default='')
gender = models.CharField(choices=(('male', '男'), ('female', '女')), default='male',
max_length=10, verbose_name='性别')
profile_photo = models.ImageField(verbose_name='头像', upload_to='%Y/%m/%d', default='user_normal.jpg')
birthday = models.DateField(verbose_name='出生年月', blank=True, null=True)
add_time = models.DateTimeField(verbose_name='注册时间', auto_now_add=True)
class Meta:
verbose_name_plural = '用户管理'
verbose_name = verbose_name_plural
def __str__(self):
return self.username
class CodeModel(models.Model):
email = models.EmailField(verbose_name='邮箱')
code = models.CharField(max_length=6, verbose_name='验证码')
email_type = models.CharField(choices=(('register', '注册'),
('login', '登录'),
('change_pwd', '修改密码')), max_length=10)
add_time = models.DateTimeField(verbose_name='发送时间', default=now)
class Meta:
verbose_name_plural = '验证码管理'
verbose_name = verbose_name_plural
ordering = ['-add_time']
def __str__(self):
return self.code
|
"""Main codrspace views"""
import requests
from datetime import datetime
from StringIO import StringIO
from zipfile import ZipFile
from django.http import Http404, HttpResponse
from django.shortcuts import render, redirect, get_object_or_404
from django.utils import simplejson
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.conf import settings
from django.contrib import messages
from django.db.models import Q
from django.core.cache import cache
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from codrspace.models import Post, Profile, Media, Setting
from codrspace.forms import PostForm, MediaForm, \
SettingForm, FeedBackForm
class GithubAuthError(Exception):
pass
def index(request, template_name="home_shutdown.html"):
return render(request, template_name)
def post_detail(request, username, slug, template_name="post_detail.html"):
user = get_object_or_404(User, username=username)
post = get_object_or_404(
Post,
author=user,
slug=slug,)
try:
user_settings = Setting.objects.get(user=user)
except:
user_settings = None
if post.status == 'draft':
if post.author != request.user:
raise Http404
return render(request, template_name, {
'username': username,
'post': post,
'meta': user.profile.get_meta(),
'user_settings': user_settings
})
def post_list(request, username, post_type='published',
template_name="post_list_shutdown.html"):
user = get_object_or_404(User, username=username)
try:
user_settings = Setting.objects.get(user=user)
except:
user_settings = None
if post_type == 'published':
post_type = 'posts'
status_query = Q(status="published")
else:
post_type = 'drafts'
status_query = Q(status="draft")
posts = Post.objects.filter(
status_query,
Q(publish_dt__lte=datetime.now()) | Q(publish_dt=None),
author=user,
)
posts = posts.order_by('-publish_dt')
# paginate posts
paginator = Paginator(posts, 3)
page = request.GET.get('page')
try:
posts = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
posts = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
posts = paginator.page(paginator.num_pages)
return render(request, template_name, {
'username': username,
'posts': posts,
'post_type': post_type,
'meta': user.profile.get_meta(),
'user_settings': user_settings
})
@login_required
def drafts(request):
return post_list(request, request.user.username, post_type='drafts')
@login_required
def add(request, template_name="add.html"):
""" Add a post """
posts = Post.objects.filter(
author=request.user,
status__in=['draft', 'published']
).order_by('-pk')
media_set = Media.objects.filter(uploader=request.user).order_by('-pk')
media_form = MediaForm()
if request.method == "POST":
# media
media_form = MediaForm(request.POST, request.FILES)
if media_form.is_valid():
media = media_form.save(commit=False)
media.uploader = request.user
media.filename = unicode(media_form.cleaned_data.get('file', ''))
media.save()
messages.info(
request,
'Media %s has been uploaded.' % media.filename,
extra_tags='alert-success'
)
# post
form = PostForm(request.POST, user=request.user)
if form.is_valid() and 'submit_post' in request.POST:
post = form.save(commit=False)
# if something to submit
if post.title or post.content:
post.author = request.user
if post.status == 'published' and not post.publish_dt:
post.publish_dt = datetime.now()
post.save()
messages.info(
request,
'Added post "%s".' % post,
extra_tags='alert-success')
return redirect('edit', pk=post.pk)
else:
form = PostForm(user=request.user)
return render(request, template_name, {
'form': form,
'posts': posts,
'media_set': media_set,
'media_form': media_form,
})
@login_required
def user_settings(request, template_name="settings.html"):
""" Add/Edit a setting """
user = get_object_or_404(User, username=request.user.username)
try:
settings = Setting.objects.get(user=user)
except Setting.DoesNotExist:
settings = None
form = SettingForm(instance=settings)
if request.method == 'POST':
form = SettingForm(request.POST, instance=settings)
if form.is_valid():
msg = "Edited settings successfully."
messages.info(request, msg, extra_tags='alert-success')
settings = form.save(commit=False)
settings.user = user
settings.save()
# clear settings cache
cache_key = '%s_user_settings' % user.pk
cache.set(cache_key, None)
return render(request, template_name, {
'form': form,
})
@login_required
def api_settings(request, template_name="api_settings.html"):
""" View API settings """
from tastypie.models import ApiKey
api_key = get_object_or_404(ApiKey, user=request.user)
return render(request, template_name, {
'api_key': api_key,
})
@login_required
def delete(request, pk=0, template_name="delete.html"):
""" Delete a post """
post = get_object_or_404(Post, pk=pk, author=request.user)
user = get_object_or_404(User, username=request.user.username)
if request.method == 'POST':
if 'delete-post' in request.POST:
post.status = 'deleted'
post.save()
messages.info(request, 'Post deleted', extra_tags='alert-success')
return redirect(reverse('post_list', args=[user.username]))
return render(request, template_name, {
'post': post,
})
@login_required
def edit(request, pk=0, template_name="edit.html"):
""" Edit a post """
post = get_object_or_404(Post, pk=pk, author=request.user)
posts = Post.objects.filter(
~Q(id=post.pk),
author=request.user,
status__in=['draft', 'published']
).order_by('-pk')
media_set = Media.objects.filter(uploader=request.user).order_by('-pk')
media_form = MediaForm()
if request.method == "POST":
# media post
if 'file' in request.FILES:
media_form = MediaForm(request.POST, request.FILES)
if media_form.is_valid():
media = media_form.save(commit=False)
media.uploader = request.user
media.filename = unicode(media_form.cleaned_data.get(
'file', ''))
media.save()
# post post hehe
if 'title' in request.POST:
form = PostForm(request.POST, instance=post, user=request.user)
if form.is_valid() and 'submit_post' in request.POST:
post = form.save(commit=False)
if post.status == 'published':
if not post.publish_dt:
post.publish_dt = datetime.now()
if post.status == "draft":
post.publish_dt = None
post.save()
messages.info(
request,
'Edited post "%s".' % post,
extra_tags='alert-success')
return render(request, template_name, {
'form': form,
'post': post,
'posts': posts,
'media_set': media_set,
'media_form': media_form,
})
return render(request, template_name, {
'form': form,
'post': post,
'posts': posts,
'media_set': media_set,
'media_form': media_form,
})
form = PostForm(instance=post, user=request.user)
return render(request, template_name, {
'form': form,
'post': post,
'posts': posts,
'media_set': media_set,
'media_form': media_form,
})
def signin_start(request, slug=None, template_name="signin.html"):
"""Start of OAuth signin"""
return redirect('%s?client_id=%s&redirect_uri=%s' % (
settings.GITHUB_AUTH['auth_url'],
settings.GITHUB_AUTH['client_id'],
settings.GITHUB_AUTH['callback_url']))
def signout(request):
if request.user.is_authenticated():
logout(request)
return redirect(reverse('homepage'))
def _validate_github_response(resp):
"""Raise exception if given response has error"""
if resp.status_code != 200 or 'error' in resp.content:
raise GithubAuthError('code: %u content: %s' % (resp.status_code,
resp.content))
def _parse_github_access_token(content):
"""Super hackish way of parsing github access token from request"""
# FIXME: Awful parsing w/ lots of assumptions
# String looks like this currently
# access_token=1c21852a9f19b685d6f67f4409b5b4980a0c9d4f&token_type=bearer
return content.split('&')[0].split('=')[1]
def signin_callback(request, slug=None, template_name="base.html"):
"""Callback from Github OAuth"""
try:
code = request.GET['code']
except KeyError:
return render(request, 'auth_error.html', dictionary={
'err': 'Unable to get request code from Github'})
resp = requests.post(url=settings.GITHUB_AUTH['access_token_url'],
data={'client_id': settings.GITHUB_AUTH['client_id'],
'client_secret': settings.GITHUB_AUTH['secret'],
'code': code})
try:
_validate_github_response(resp)
except GithubAuthError, err:
return render(request, 'auth_error.html', dictionary={'err': err})
token = _parse_github_access_token(resp.content)
# Don't use token unless running in production b/c mocked service won't
# know a valid token
user_url = settings.GITHUB_AUTH['user_url']
if not settings.GITHUB_AUTH['debug']:
user_url = '%s?access_token=%s' % (user_url, token)
resp = requests.get(user_url)
try:
_validate_github_response(resp)
except GithubAuthError, err:
return redirect(reverse('auth_error', args=[err]))
github_user = simplejson.loads(resp.content)
try:
user = User.objects.get(username=github_user['login'])
except:
password = User.objects.make_random_password()
user_defaults = {
'username': github_user['login'],
'is_active': True,
'is_superuser': False,
'password': password}
user = User(**user_defaults)
if user:
user.save()
# Get/Create the user profile
try:
profile = user.get_profile()
except:
profile = Profile(
git_access_token=token,
user=user,
meta=resp.content
)
# update meta information and token
profile.git_access_token = token
profile.meta = resp.content
profile.save()
# Create settings for user
try:
user_settings = Setting.objects.get(user=user)
except:
user_settings = None
if not user_settings:
s = Setting()
s.user = user
s.timezone = "US/Central"
s.save()
# Fake auth b/c github already verified them and we aren't using our
# own #passwords...yet?
user.auto_login = True
user = authenticate(user=user)
login(request, user)
return redirect(reverse('post_list', args=[user.username]))
@login_required
def feedback(request, template_name='feedback.html'):
""" Send Feed back """
from django.core.mail import EmailMessage
user = get_object_or_404(User, username=request.user.username)
form = FeedBackForm(initial={'email': user.email})
if request.method == 'POST':
form = FeedBackForm(request.POST)
if form.is_valid():
msg = "Thanks for send us feedback. We hope to make the product better."
messages.info(request, msg, extra_tags='alert-success')
subject = 'Codrspace feedback from %s' % user.username
message = '%s (%s), %s' % (
request.user.username,
form.cleaned_data['email'],
form.cleaned_data['comments'],
)
email = EmailMessage(
subject,
message,
settings.DEFAULT_FROM_EMAIL,
[settings.SERVER_EMAIL,],
headers = {'Reply-To': form.cleaned_data['email']}
)
email.send(fail_silently=False)
return render(request, template_name, {
'form': form,
})
@login_required
def posts_download(request, username):
"""Download all posts as an archive"""
user = get_object_or_404(User, username=username)
if request.user.username != username:
raise Http404
try:
user_settings = Setting.objects.get(user=user)
except:
user_settings = None
posts = Post.objects.filter(author=user)
io_buffer = StringIO()
zip = ZipFile(io_buffer, "a")
for post in posts:
zip.writestr("{}.md".format(post.slug), post.content.encode('utf-8'))
# fix for Linux zip files read in Windows
for file in zip.filelist:
file.create_system = 0
zip.close()
response = HttpResponse(mimetype="application/zip")
response["Content-Disposition"] = "attachment; filename=codrspace_post_archive_{}.zip".format(username)
io_buffer.seek(0)
response.write(io_buffer.read())
return response
@login_required
def render_preview(request, template_name='preview.html'):
"""Ajax view for rendering preview of post"""
# make a mock post
post = {
'title': '',
'content': ''
}
if request.method == 'POST':
if 'title' in request.POST:
post['title'] = request.POST['title']
if 'content' in request.POST:
post['content'] = request.POST['content']
return render(request, template_name, {
'post': post,
})
def donate(request, template_name='donate.html'):
return render(request, template_name)
def help(request, template_name='help.html'):
return render(request, template_name)
def handler500(request, template_name='500.html'):
response = render(request, template_name)
response.status_code = 500
return render(request, template_name)
|
import pytest
from magma.config import config as magma_config
from magma.util import reset_global_context
@pytest.fixture(autouse=True)
def riscv_mini_test():
magma_config.compile_dir = 'normal'
reset_global_context()
|
import math
import numpy as np
import torch
from torch import nn
from torch.nn import Conv2d, BatchNorm2d, PReLU, Sequential, Module
from models.encoders.helpers import get_blocks, bottleneck_IR, bottleneck_IR_SE, _upsample_add
from models.stylegan2.model import EqualLinear,ScaledLeakyReLU,EqualConv2d
class GradualStyleBlock(Module):
def __init__(self, in_c, out_c, spatial):
super(GradualStyleBlock, self).__init__()
self.out_c = out_c
self.spatial = spatial
num_pools = int(np.log2(spatial))
modules = []
modules += [Conv2d(in_c, out_c, kernel_size=3, stride=2, padding=1),
nn.LeakyReLU()]
for i in range(num_pools - 1):
modules += [
Conv2d(out_c, out_c, kernel_size=3, stride=2, padding=1),
nn.LeakyReLU()
]
self.convs = nn.Sequential(*modules)
self.linear = EqualLinear(out_c, out_c, lr_mul=1)
def forward(self, x):
x = self.convs(x)
x = x.view(-1, self.out_c)
x = self.linear(x)
return x
class GradualStyleEncoder(Module):
def __init__(self, num_layers, mode='ir', opts=None):
super(GradualStyleEncoder, self).__init__()
assert num_layers in [50, 100, 152], 'num_layers should be 50,100, or 152'
assert mode in ['ir', 'ir_se'], 'mode should be ir or ir_se'
blocks = get_blocks(num_layers)
if mode == 'ir':
unit_module = bottleneck_IR
elif mode == 'ir_se':
unit_module = bottleneck_IR_SE
self.input_layer = Sequential(Conv2d(3, 64, (3, 3), 1, 1, bias=False),
BatchNorm2d(64),
PReLU(64))
modules = []
for block in blocks:
for bottleneck in block:
modules.append(unit_module(bottleneck.in_channel,
bottleneck.depth,
bottleneck.stride))
self.body = Sequential(*modules)
self.styles = nn.ModuleList()
log_size = int(math.log(opts.stylegan_size, 2))
self.style_count = 2 * log_size - 2
self.coarse_ind = 3
self.middle_ind = 7
for i in range(self.style_count):
if i < self.coarse_ind:
style = GradualStyleBlock(512, 512, 16)
elif i < self.middle_ind:
style = GradualStyleBlock(512, 512, 32)
else:
style = GradualStyleBlock(512, 512, 64)
self.styles.append(style)
self.latlayer1 = nn.Conv2d(256, 512, kernel_size=1, stride=1, padding=0)
self.latlayer2 = nn.Conv2d(128, 512, kernel_size=1, stride=1, padding=0)
def forward(self, x):
x = self.input_layer(x)
latents = []
modulelist = list(self.body._modules.values())
for i, l in enumerate(modulelist):
x = l(x)
if i == 6:
c1 = x
elif i == 20:
c2 = x
elif i == 23:
c3 = x
for j in range(self.coarse_ind):
latents.append(self.styles[j](c3))
p2 = _upsample_add(c3, self.latlayer1(c2))
for j in range(self.coarse_ind, self.middle_ind):
latents.append(self.styles[j](p2))
p1 = _upsample_add(p2, self.latlayer2(c1))
for j in range(self.middle_ind, self.style_count):
latents.append(self.styles[j](p1))
out = torch.stack(latents, dim=1)
return out
class Encoder4Editing(Module):
def __init__(self, num_layers, mode='ir', opts=None):
super(Encoder4Editing, self).__init__()
assert num_layers in [50, 100, 152], 'num_layers should be 50,100, or 152'
assert mode in ['ir', 'ir_se'], 'mode should be ir or ir_se'
blocks = get_blocks(num_layers)
if mode == 'ir':
unit_module = bottleneck_IR
elif mode == 'ir_se':
unit_module = bottleneck_IR_SE
self.input_layer = Sequential(Conv2d(3, 64, (3, 3), 1, 1, bias=False),
BatchNorm2d(64),
PReLU(64))
modules = []
for block in blocks:
for bottleneck in block:
modules.append(unit_module(bottleneck.in_channel,
bottleneck.depth,
bottleneck.stride))
self.body = Sequential(*modules)
self.styles = nn.ModuleList()
log_size = int(math.log(opts.stylegan_size, 2))
self.style_count = 2 * log_size - 2
self.coarse_ind = 3
self.middle_ind = 7
for i in range(self.style_count):
if i < self.coarse_ind:
style = GradualStyleBlock(512, 512, 16)
elif i < self.middle_ind:
style = GradualStyleBlock(512, 512, 32)
else:
style = GradualStyleBlock(512, 512, 64)
self.styles.append(style)
self.latlayer1 = nn.Conv2d(256, 512, kernel_size=1, stride=1, padding=0)
self.latlayer2 = nn.Conv2d(128, 512, kernel_size=1, stride=1, padding=0)
def get_deltas_starting_dimensions(self):
''' Get a list of the initial dimension of every delta from which it is applied '''
return list(range(self.style_count)) # Each dimension has a delta applied to it
def forward(self, x):
x = self.input_layer(x)
modulelist = list(self.body._modules.values())
for i, l in enumerate(modulelist):
x = l(x)
if i == 6:
c1 = x
elif i == 20:
c2 = x
elif i == 23:
c3 = x
# Infer main W and duplicate it
w0 = self.styles[0](c3)
w = w0.repeat(self.style_count, 1, 1).permute(1, 0, 2)
features = c3
for i in range(1, 18): # Infer additional deltas
if i == self.coarse_ind:
p2 = _upsample_add(c3, self.latlayer1(c2)) # FPN's middle features
features = p2
elif i == self.middle_ind:
p1 = _upsample_add(p2, self.latlayer2(c1)) # FPN's fine features
features = p1
delta_i = self.styles[i](features)
w[:, i] += delta_i
return w
# Consultation encoder
class ResidualEncoder(Module):
def __init__(self, opts=None):
super(ResidualEncoder, self).__init__()
self.conv_layer1 = Sequential(Conv2d(3, 32, (3, 3), 1, 1, bias=False),
BatchNorm2d(32),
PReLU(32))
self.conv_layer2 = Sequential(*[bottleneck_IR(32,48,2), bottleneck_IR(48,48,1), bottleneck_IR(48,48,1)])
self.conv_layer3 = Sequential(*[bottleneck_IR(48,64,2), bottleneck_IR(64,64,1), bottleneck_IR(64,64,1)])
self.condition_scale3 = nn.Sequential(
EqualConv2d(64, 512, 3, stride=1, padding=1, bias=True ),
ScaledLeakyReLU(0.2),
EqualConv2d(512, 512, 3, stride=1, padding=1, bias=True ))
self.condition_shift3 = nn.Sequential(
EqualConv2d(64, 512, 3, stride=1, padding=1, bias=True ),
ScaledLeakyReLU(0.2),
EqualConv2d(512, 512, 3, stride=1, padding=1, bias=True ))
def get_deltas_starting_dimensions(self):
''' Get a list of the initial dimension of every delta from which it is applied '''
return list(range(self.style_count)) # Each dimension has a delta applied to it
def forward(self, x):
conditions = []
feat1 = self.conv_layer1(x)
feat2 = self.conv_layer2(feat1)
feat3 = self.conv_layer3(feat2)
scale = self.condition_scale3(feat3)
scale = torch.nn.functional.interpolate(scale, size=(64,64) , mode='bilinear')
conditions.append(scale.clone())
shift = self.condition_shift3(feat3)
shift = torch.nn.functional.interpolate(shift, size=(64,64) , mode='bilinear')
conditions.append(shift.clone())
return conditions
# ADA
class ResidualAligner(Module):
def __init__(self, opts=None):
super(ResidualAligner, self).__init__()
self.conv_layer1 = Sequential(Conv2d(6, 16, (3, 3), 1, 1, bias=False),
BatchNorm2d(16),
PReLU(16))
self.conv_layer2 = Sequential(*[bottleneck_IR(16,32,2), bottleneck_IR(32,32,1), bottleneck_IR(32,32,1)])
self.conv_layer3 = Sequential(*[bottleneck_IR(32,48,2), bottleneck_IR(48,48,1), bottleneck_IR(48,48,1)])
self.conv_layer4 = Sequential(*[bottleneck_IR(48,64,2), bottleneck_IR(64,64,1), bottleneck_IR(64,64,1)])
self.dconv_layer1 = Sequential(*[bottleneck_IR(112,64,1), bottleneck_IR(64,32,1), bottleneck_IR(32,32,1)])
self.dconv_layer2 = Sequential(*[bottleneck_IR(64,32,1), bottleneck_IR(32,16,1), bottleneck_IR(16,16,1)])
self.dconv_layer3 = Sequential(*[bottleneck_IR(32,16,1), bottleneck_IR(16,3,1), bottleneck_IR(3,3,1)])
def forward(self, x):
feat1 = self.conv_layer1(x)
feat2 = self.conv_layer2(feat1)
feat3 = self.conv_layer3(feat2)
feat4 = self.conv_layer4(feat3)
feat4 = torch.nn.functional.interpolate(feat4, size=(64,64) , mode='bilinear')
dfea1 = self.dconv_layer1(torch.cat((feat4, feat3),1))
dfea1 = torch.nn.functional.interpolate(dfea1, size=(128,128) , mode='bilinear')
dfea2 = self.dconv_layer2(torch.cat( (dfea1, feat2),1))
dfea2 = torch.nn.functional.interpolate(dfea2, size=(256,256) , mode='bilinear')
dfea3 = self.dconv_layer3(torch.cat( (dfea2, feat1),1))
res_aligned = dfea3
return res_aligned
|
import numpy as np
import matplotlib.pyplot as plt
from scipy.integrate import ode
def fun(t, x):
y=np.zeros((4, 1))
bet,eps,ro,gam,sigma,wa,taw,mu=0.6,0.084,0.95,0.1,0.0714,0.0588, 9.1324e-4,6.8493e-5
a=np.array([bet,eps,ro,gam,sigma,wa,taw,mu])
#x=np.array([S,E,I,C])
y[0]=(a[7]+a[5])-(a[0]*x[2]+a[2]*a[0]*x[3]+(a[7]+a[5]))*x[0]-a[5]*(x[1]+x[2]+x[3])
y[1]=(a[0]*x[2]+a[1]*a[0]*x[3])*x[0]-(a[4]+a[7])*x[1]
y[2]=a[4]*x[1]-(a[3]+a[7])*x[2]
y[3]=a[2]*a[3]*x[2]-(a[5]+a[7])*x[3]
#print(y)
return y
bet,eps,ro,gam,sigma,wa,taw,mu=0.6,0.084,0.95,0.1,0.0714,0.0588, 9.1324e-4,6.8493e-5
a=np.array([bet,eps,ro,gam,sigma,wa,taw,mu])
A=np.zeros((4, 1))
R0=bet*sigma*((taw+mu)+eps*ro*gam)/((sigma+mu)*(gam+mu)*(taw+mu))
print R0
if(R0>1):
D=(a[4]+a[5]+a[7])*(a[3]+a[7]/a[4]+a[5]*(1+a[2]*a[3])/(a[6]+a[7]))
A[2]=Ieq=(a[5]+a[7])/(D*(1-1/R0))
A[1]=Eeq=(a[3]+a[7])/a[4]*Ieq
A[3]=Ceq=a[2]*a[3]*Ieq/(a[6]+a[7])
A[0]=Seq=1/R0
else:
Seq=1
Ieq=0
Eeq=(a[3]+a[7])/a[4]*Ieq
Ceq=(a[2]*a[3]*Ieq)/(a[6]+a[7])
print Seq,Eeq,Ieq,Ceq
t0 = 0.0
x0 = [0.86, 0.03,0.02,0.01]
solver = ode(fun)
solver.set_integrator('dopri5', rtol=1e-6)
t0 = 0.0
x0 = [0.86, 0.03,0.02,0.01]
if(sum(x0)<1):
solver.set_initial_value(x0, t0)
t1 = 3*365
N = 75
t = np.linspace(t0, t1, N)
sol = np.empty((N, 4))
sol[0] = x0
k = 1
while solver.successful() and solver.t < t1:
solver.integrate(t[k])
sol[k] = solver.y
k += 1
#print(sol)
S=sol[:,0]
E=sol[:,1]
I=sol[:,2]
C=sol[:,3]
"""
plt.plot(t, sol[:,0], label='S')
plt.plot(t, sol[:,1], label='E')
plt.plot(t, sol[:,2], label='I')
plt.plot(t, sol[:,3], label='C')
plt.xlabel('t')
plt.grid(True)
plt.legend()
plt.show()
"""
#R=1-(S+E+I+C)
|
from distutils.core import setup
import py2exe
requires = []
setup(
name='kiya',
version='0.1',
packages=['kiya'],
requires=requires,
console=['kiya/base.py'],
options={
'py2exe': {
'packages': [],
'includes': ['cairo',
'pygtk',
'gio',
'pango',
'pangocairo',
'atk',
'gobject']
},
'sdist': {
'formats': 'zip'
}
}
)
|
##General Calls
import time
import serial
##Pubnub Calls
from pubnub.callbacks import SubscribeCallback
from pubnub.enums import PNStatusCategory
from pubnub.pnconfiguration import PNConfiguration
from pubnub.pubnub import PubNub
##Pubnub Configuration
pnconfig = PNConfiguration()
pnconfig.subscribe_key = 'sub-c-c3322f58-e285-11e6-8d2d-02ee2ddab7fe'
pnconfig.publish_key = 'pub-c-3942c648-843c-496f-92cd-72c9f7208d04'
pnconfig.ssl = False
channel1 = 'EAP_ts1'
channel2 = 'EAP_ts2'
pubnub = PubNub(pnconfig)
def my_publish_callback(envelope, status):
# Check whether request successfully completed or not
if not status.is_error():
pass # Message successfully published to specified channel.
else:
pass # Handle message publish error. Check 'category' property to find out possible issue
# because of which request did fail.
# Request can be resent using: [status retry];
class MySubscribeCallback(SubscribeCallback):
def presence(self, pubnub, presence):
pass # handle incoming presence data
def status(self, pubnub, status):
if status.category == PNStatusCategory.PNUnexpectedDisconnectCategory:
pass # This event happens when radio / connectivity is lost
elif status.category == PNStatusCategory.PNConnectedCategory:
# Connect event. You can do stuff like publish, and know you'll get it.
# Or just use the connected event to confirm you are subscribed for
# UI / internal notifications, etc
pubnub.publish().channel(channel1).message("Connected").async(my_publish_callback)
elif status.category == PNStatusCategory.PNReconnectedCategory:
pass
# Happens as part of our regular operation. This event happens when
# radio / connectivity is lost, then regained.
elif status.category == PNStatusCategory.PNDecryptionErrorCategory:
pass
# Handle message decryption error. Probably client configured to
# encrypt messages and on live data feed it received plain text.
def message(self, pubnub, message):
pass # Handle new message stored in message.message
pubnub.add_listener(MySubscribeCallback())
pubnub.subscribe().channels(channel1).execute()
ser = serial.Serial('/dev/ttyUSB0', 9600)
var = 0
t = []
y = []
raw_data = ser.readline()
raw_data = ser.readline()
while (var <5000):
raw_data = ser.readline(17)
s = str(raw_data)
end = s.find('%')
cnts = float(s[1:end])
strain = cnts
# t.append(var)
# y.append(cnts)
var = var + 1
pubnub.publish().channel(channel1).message({'eon': [strain]}).async(my_publish_callback)
print (var, strain)
# time.sleep(1)
print("All Done!!")
pubnub.publish().channel(channel1).message("All Done!!").async(my_publish_callback)
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
# use the cgi library
import cgi
# enable debugging
#import cgitb
#cgitb.enable()
# use internal sqlite3 database
import sqlite3
#Define Main Function
def main():
# use the cgi interface to get POSTED values
fs = cgi.FieldStorage()
logid = fs.getvalue("LOGID")
logpw = fs.getvalue("LOGPW")
logem = fs.getvalue("LOGEM")
dbmode = fs.getvalue("MODE")
# set the response text to "OK" by default
dbg = "OK"
try:
dbc = sqlite3.connect("data/rsp-admin.db")
# ****** DEL MODE *********
if dbmode == "DEL":
# ID Should Exist - go ahead and try to delete it
try:
dbc.execute("DELETE FROM LOGIN WHERE ID='" + logid + "'")
dbc.commit()
except (sqlite3.Error, e):
dbg = "Failed to DELETE Login: " + logid + " Err: " + e.args[0]
else:
# ****** ADD OR CHANGE MODE *************************
# execute SQL SELECT on CGI values
csr = dbc.cursor()
csr.execute("SELECT ID FROM LOGIN WHERE ID='" + logid + "'")
# get first DB table row from cursor after select
chkrow = csr.fetchone()
if chkrow == None:
if dbmode == "LOGOUT":
dbg = "Failed to Logout: " + logid + " Record not in Database."
else:
# No ID Exists - go ahead and try to ADD it
try:
dbc.execute("INSERT INTO LOGIN (ID,PW,EMAIL) \
VALUES ('" + logid + "', '" + logpw + "', '" + logem + "')")
dbc.commit()
except (sqlite3.Error):
dbg = "Failed to ADD Login: " + logid + " to DB LOGIN Table."
else:
if dbmode == "LOGOUT":
# Have a match so Logout.
try:
dbc.execute("UPDATE LOGIN SET LOGKEY=NULL WHERE ID='" + logid + "'")
dbc.commit()
except (sqlite3.Error):
dbg = "Failed to Logout: " + logid + " in DB LOGIN Table."
else:
# Have a match so Change it.
try:
dbc.execute("UPDATE LOGIN SET PW='" + logpw + "', EMAIL='" + logem + "' \
WHERE ID='" + logid + "'")
dbc.commit()
except (sqlite3.Error):
dbg = "Failed to CHANGE Login: " + logid + " in DB LOGIN Table."
except (sqlite3.Error):
dbg = "Could not Open Admin DB."
finally:
if dbc:
# Close the Database
dbc.close()
#
# Print HTTP Response text: "OK" if no errors, else error string.
# Variable "dbg" will be set to either condition.
#
print "Content-Type: text/plain"
print
print dbg
# Run It
main()
|
import logging
from abc import ABC
class LoggingClass(ABC):
def __init__(self):
self.logger: logging.Logger = logging.getLogger(self.logger_name)
@property
def logger_name(self) -> str:
return self.__create_logger_name_from_class_name()
def __create_logger_name_from_class_name(self) -> str:
class_name: str = self.__class__.__name__
logger_name: str = ""
for i in range(len(class_name)):
logger_name += class_name[i]
if i + 2 < len(class_name) and class_name[i + 1].isupper() and class_name[i + 2].islower():
logger_name += " "
return logger_name
|
from __future__ import print_function
import urllib
import boto3
import zipfile
import json
#-------- aws variables -----------
s3_client= boto3.client('s3')
lambda_client = boto3.client('lambda')
#-------- functions begin---------
def lambda_handler(event, context):
# Get the object from the event and show its content type
zipped_bucket_name = event['Records'][0]['s3']['bucket']['name']
zipped_file_key = event['Records'][0]['s3']['object']['key']
try:
# Download and save the zip to tmp storage
s3_client.download_file(zipped_bucket_name, zipped_file_key, '/tmp/file.zip')
# Unzip the file
unzipped = zipfile.ZipFile('/tmp/file.zip')
# Iterate all file names and push front image files into the list of
# files to be converted from .tif to .png and saved to our s3 check
# images bucket
front_image_files = []
image_files = unzipped.namelist()
for file in image_files:
if file.endswith('f.tif'):
front_image_files.insert(0, file)
# Divide our list of images into smaller arrays of size n images
# [1,2,3,4,5,6,7,8,9,10] -> [[1,2,3], [4,5,6], [7,8,9], [10]]
n = 3
split_front_images = [front_image_files[i * n:(i + 1) * n] for i in range((len(front_image_files) + n - 1) // n )]
for front_images_chunk in split_front_images:
payload3={
"key": zipped_file_key,
"bucket": zipped_bucket_name,
"image_list": front_images_chunk
}
response = lambda_client.invoke(
FunctionName="uploadImages",
InvocationType='Event',
Payload=json.dumps(payload3)
)
print("REPSPONSE")
print(response)
except Exception as e:
print(e)
print('Error unzipping {} from bucket {}.'.format(zipped_file_key, zipped_bucket_name))
raise e
|
import argparse
import io
from nltk.translate.bleu_score import corpus_bleu
import sys
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--input", required=False, type=str)
parser.add_argument("--test", required=True, type=str)
parser.add_argument("--order", type=int, default=4)
args = parser.parse_args()
weights = [1. / args.order] * args.order
input_lines = (io.TextIOWrapper(sys.stdin.buffer, encoding='utf-8')
if args.input is None else open(args.input, encoding='utf-8'))
input_sentences = [line.strip().split(' ') for line in input_lines]
test_lines = open(args.test, encoding='utf-8')
test_sentences = [line.strip().split(' ') for line in test_lines]
hypotheses = input_sentences
references = [test_sentences] * len(hypotheses)
bleu = corpus_bleu(references, hypotheses, weights=weights)
print(bleu)
if __name__ == '__main__':
main()
|
from django.conf.urls import patterns, include, url
from deal import views
urlpatterns = patterns('',
url(r'^$', views.index,{'type': '0'}, name='deal.index'),
url(r'^index/(\d+)/$', views.index,name='deal.index'),
url(r'^rank_deal_type', views.rank_deal_type, name='deal.rank_deal_type'),
url(r'^rank', views.rank, name='deal.rank'),
url(r'^industry/(\d+)/$', views.industry, name='deal.industry'),
url(r'^country/(\d+)/$', views.country, name='deal.country'),
url(r'^ajax_get_list/(?P<industry_id>\d+)/(?P<country_id>\d+)/(?P<page>\d+)/(?P<pagesize>\d+)$', views.ajax_get_list, name='deal.ajax_get_list'),
) |
# -*- coding: utf-8 -*-
from BaseObject import BaseObject
# BaseStage(id, name, < teams = [], competitionId > )
class BaseStage(BaseObject):
def set_up(self, **kwargs):
self.stype = kwargs.get('stype')
self.fixtures = []
self.teams = set()
self.data['finished'] = False
self.data['winners'] = []
self.data['losers'] = []
self.data['current_fixture'] = 0
self.data['legs'] = kwargs.get('legs')
### Fixture handling toolset
def add_fixture(self, fid):
if not fid in self.fixtures:
self.fixtures.append(fid)
return True
return False
def get_fixture_by_order_id(self, oid):
if oid >= len(self.fixtures) or oid < 0:
return None
return self.fixtures[oid]
def get_fixtures(self):
for i in self.fixtures:
yield self.fixtures[i]
def del_fixture(self, fid):
if fid in self.fixtures:
self.fixtures.remove(fid)
### Teams handling toolset
def add_team(self, tid):
if not tid in self.teams:
self.teams.add(tid)
return True
return False
def get_teams(self):
for team in self.teams:
yield team
### Placeholders
def generate_fixture(self):
return False
|
"""
Copyright (c) 2018 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import numpy as np
def refer_to_faq_msg(question_num: int):
return '\n For more information please refer to Model Optimizer FAQ' \
' (<INSTALL_DIR>/deployment_tools/documentation/docs/MO_FAQ.html),' \
' question #{}. '.format(question_num)
class NamedAttrsClass:
def __init__(self, class_attrs: dict):
for key, val in class_attrs.items():
self.__setattr__(key, val)
def match_shapes(pattern: np.array, shape: np.array):
""" Check if shape matches shape pattern handling -1 and 0 in the pattern. """
# Elements with values -1 and 0 in pattern are just ignored.
# Other elements should match.
if pattern.size != shape.size:
return False
indices = [i for i, n in enumerate(pattern) if n not in [0, -1]]
return np.array_equal(pattern[indices], shape[indices])
def symm_match_shapes(shape1: np.array, shape2: np.array):
""" Check if shape matches shape pattern handling -1 and 0 in the pattern. """
# Elements with values -1 and 0 in both shapes are just ignored.
# Other elements should match. Undefined elements can be one side only.
return match_shapes(shape1, shape2) or match_shapes(shape2, shape1)
|
import argparse
import time
from max_sat import MaxSatClause, MaxSat
from genetic_algorithm import MaxSatGeneticAlgorithm
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='SATMAX genetic algorithm.')
parser.add_argument('-question', help='Question number', type=int, required=True)
parser.add_argument('-clause', help='A SATMAX clause description', type=str)
parser.add_argument('-assignment', help='An assignment as a bitstring', type=str)
parser.add_argument('-wdimacs', help='Name of file on WDIMACS format', type=str)
parser.add_argument('-time_budget', help='Number of seconds per repetition', type=float)
parser.add_argument('-repetitions', help='The number of repetitions of the algorithm', type=int)
args = parser.parse_args()
question = args.question
if question == 1:
clause = MaxSatClause(args.clause)
print(clause.check_sat(args.assignment))
elif question == 2:
max_sat = MaxSat()
max_sat.load_clauses(args.wdimacs)
print(max_sat.count_sat_clauses(args.assignment))
elif question == 3:
max_sat = MaxSat()
max_sat.load_clauses(args.wdimacs)
print('Clauses loaded...')
genetic_alg = MaxSatGeneticAlgorithm(pop_size=20, tourn_size=4, mutation_rate=1.5, time_limit=args.time_budget,
max_sat_instance=max_sat)
for i in range(args.repetitions):
start_time = time.time()
t, nsat, xsat = genetic_alg.run_ga()
print(t, nsat)
print('Elapsed time: {}'.format(time.time() - start_time))
else:
print('Incorrect question number.')
|
#-*- coding:utf-8 -*-
# python 进阶 list 元素字符串格式化 将首字母大写
def format_name(s):
return s[:1].upper()+s[1:].lower()
print map(format_name, ['adam', 'LISA', 'barT']) |
def note():
"""
>>> magic_str = "abra cadabra"
>>> count_chars(magic_str)
{'a': 5, 'b': 2, 'r': 2, 'c': 1, 'd': 1}
"""
def count_chars(my_str):
"""get a string in a param and return a dic were every char in the string
(not space) is a key and the times it appears in the string is the value
"""
the_key_list = find_keys(my_str)
print("looked at the string. this is the key list i created:", the_key_list)
chars_dic = {}
i = 0
while i < len(the_key_list):
chars_dic[the_key_list[i]] = find_appearance(the_key_list[i], my_str)
i += 1
return chars_dic
def find_keys(my_str):
key_list = []
for char in my_str:
if char not in key_list and char != " ":
key_list.append(char)
return key_list
def find_appearance(key, my_str):
appearance_number = my_str.count(key)
return appearance_number
magic_str = "aviel zwebner"
print(count_chars(magic_str)) |
from collections import namedtuple
from datetime import timedelta
import json
from django.contrib.auth.models import User
from django.core.serializers.json import DjangoJSONEncoder
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.utils import timezone
from django.utils.html import format_html
from django.utils.six import text_type
from django.utils.text import slugify
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailcore.models import Page
from wagtail.wagtailadmin.edit_handlers import MultiFieldPanel, InlinePanel
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from modelcluster.fields import ParentalKey
from wagtailsurveys.models import AbstractSurvey, AbstractFormField, AbstractFormSubmission
from unidecode import unidecode
from content.edit_handlers import ReadOnlyPanel
from users.models import RegUser
class CoachSurveyIndex(Page):
subpage_types = ['CoachSurvey']
class CoachSurvey(AbstractSurvey):
parent_page_types = ['CoachSurveyIndex']
subpage_types = []
ANSWER_YES = '1'
ANSWER_NO = '0'
CONSENT_KEY = 'survey_consent'
NONE = 0
BASELINE = 1
EATOOL = 2
ENDLINE = 3
_REVERSE = {
'SURVEY_BASELINE': BASELINE,
'SURVEY_EATOOL': EATOOL,
'SURVEY_ENDLINE': ENDLINE
}
intro = models.TextField(
# Translators: Field name on CMS
verbose_name=_('intro dialogue'),
# Translators: Help text on CMS
help_text=_("The opening line said by the Coach when introducing the Survey."),
blank=True, null=False
)
outro = models.TextField(
# Translators: Field name on CMS
verbose_name=_('outro dialogue'),
# Translators: Help text on CMS
help_text=_("The closing line said by the Coach when finishing the Survey."),
blank=True, null=False
)
notification_body = models.TextField(
# Translators: Field name on CMS
verbose_name=_('notification body'),
# Translators: Help text on CMS
help_text=_("The content body of the first Survey notification the user receives."),
blank=True, null=False
)
reminder_notification_body = models.TextField(
# Translators: Field name on CMS
verbose_name=_('reminder notification body'),
# Translators: Help text on CMS
help_text=_("The content body of the repeating Survey notifications the user receives."),
blank=True, null=False
)
deliver_after = models.PositiveIntegerField(
# Translators: Field name on CMS
verbose_name=_('days to deliver'),
# Translators: Help text on CMS
help_text=_("The number of days after user registration that the Survey will be available."),
default=1
)
bot_conversation = models.IntegerField(choices=(
(NONE, _('none')),
(BASELINE, _('baseline')),
(EATOOL, _('ea tool')),
(ENDLINE, _('endline'))
), default=NONE)
def get_data_fields(self):
data_fields = [
('user_id', _('Unique User ID')),
('name', _('Name')),
('username', _('Username')),
('mobile', _('Mobile Number')),
('gender', _('Gender')),
('age', _('Age')),
('email', _('Email')),
('consent', _('Consented to Survey')),
]
data_fields += super(CoachSurvey, self).get_data_fields()
return data_fields
def get_form_fields(self):
return self.form_fields.all()
def get_submission_class(self):
return CoachSurveySubmission
def process_consented_submission(self, consent, form):
return self.get_submission_class().objects.create(
form_data=json.dumps(form.cleaned_data, cls=DjangoJSONEncoder),
page=self, survey=self, user=form.user,
consent=consent,
# To preserve historic information
user_unique_id=form.user.id,
name=form.user.get_full_name(),
username=form.user.username,
mobile=form.user.profile.mobile,
gender=str(form.user.profile.get_gender_display() if form.user.profile.gender is not None else ""),
age=str(form.user.profile.age if form.user.profile.age is not None else ""),
email=form.user.email
)
@classmethod
def get_current(cls, user):
"""
Returns the current survey a user can complete. Surveys are delivered in the order of their delivery days
field. If the user has already submitted to a survey, it will no longer be available.
:param user: The user for checking their submissions and date registered.
:return: A tuple containing the survey and its age. Age is measured in days since the survey is first
available for the provided user.
"""
surveys = cls.objects \
.filter(live=True) \
.order_by('deliver_after', '-latest_revision_created_at') \
.exclude(page_ptr__in=CoachSurveySubmission.objects.filter(user=user).values('page'))
if user.profile:
surveys = list(filter(lambda s: user.profile.is_joined_days_passed(s.deliver_after), surveys))
user_endline = EndlineSurveySelectUser.objects.filter(user=user).first()
if surveys:
# Check to see whether use should receive the Endline Survey
if user_endline and surveys[0].bot_conversation == CoachSurvey.ENDLINE:
if user_endline.is_endline_completed or not user_endline.receive_survey:
return None, 0
survey = surveys[0]
inactivity_age = (timezone.now() - user.date_joined).days - survey.deliver_after
return survey, inactivity_age
return None, 0
@staticmethod
def get_conversation_type(bot_conversation_name):
return CoachSurvey._REVERSE.get(bot_conversation_name, None)
CoachSurvey.content_panels = AbstractSurvey.content_panels + [
MultiFieldPanel(
[
FieldPanel('deliver_after'),
FieldPanel('notification_body'),
FieldPanel('reminder_notification_body'),
],
# Translators: Admin field name
heading=_('Push Notifications')
),
MultiFieldPanel(
[
FieldPanel('intro'),
FieldPanel('outro'),
FieldPanel('bot_conversation'),
],
# Translators: Admin field name
heading=_('Coach UI')),
InlinePanel('form_fields', label=_("Form fields")),
]
class CoachFormField(AbstractFormField):
# Explicit key so that the Label can be changed without breaking submissions
key = models.CharField(
_('key'),
max_length=255,
help_text=_(
"Field identifier. Warning: Changing this will prevent existing submissions' fields from being exported."),
blank=True
)
page = ParentalKey(CoachSurvey, related_name='form_fields')
@property
def clean_name(self):
if self.key:
return self.key
else:
return super(CoachFormField, self).clean_name()
def save(self, *args, **kwargs):
if self.key:
self.key = self.slugify(self.key)
else:
self.key = super(CoachFormField, self).clean_name
super(CoachFormField, self).save(*args, **kwargs)
@staticmethod
def slugify(name):
"""Taken from `wagtailsurveys.models.AbstractFormField`
unidecode will return an ascii string while slugify wants a
unicode string on the other hand, slugify returns a safe-string
which will be converted to a normal str
"""
return str(slugify(text_type(unidecode(name))))
CoachFormField.panels = [
FieldPanel('key'),
] + CoachFormField.panels
class CoachSurveySubmission(AbstractFormSubmission):
user = models.ForeignKey(User, on_delete=models.SET_NULL, blank=False, null=True)
consent = models.BooleanField(default=False)
# The abstract base class has a `page` field which references the survey, but it has no related name. To find
# submissions from the survey, we create another foreign key relation. Deleting the survey will delete submission
# because of the `page` CASCADE option.
survey = models.ForeignKey(CoachSurvey, on_delete=models.SET_NULL, related_name='submissions', null=True)
# Fields stored at time of submission, to preserve historic data if the user is deleted
user_unique_id = models.IntegerField(default=-1)
name = models.CharField(max_length=100, default='')
username = models.CharField(max_length=150, default='')
mobile = models.CharField(max_length=15, default='')
gender = models.CharField(max_length=10, default='')
age = models.CharField(max_length=10, default='')
email = models.CharField(max_length=150, default='')
def get_data(self):
form_data = super(CoachSurveySubmission, self).get_data()
if self.user and self.user.profile:
# Populate from live user data
form_data.update({
'user_id': str(self.user.id),
'name': self.user.get_full_name(),
'username': self.user.username,
'mobile': self.user.profile.mobile,
'gender': self.user.profile.get_gender_display(),
'age': str(self.user.profile.age),
'email': self.user.email,
'consent': str(self.consent)
})
else:
# Populate from historic user data
form_data.update({
'user_id': self.user_unique_id,
'name': self.name,
'username': self.username,
'mobile': self.mobile,
'gender': self.gender,
'age': self.age,
'email': self.email,
'consent': str(self.consent)
})
return form_data
CoachSurveyResponse = namedtuple('CoachSurveyResponse', ['available', 'inactivity_age', 'survey'])
class CoachSurveySubmissionDraft(models.Model):
"""Drafts are to save the user's progress through a survey. As the user answers survey questions, an update is done
on the appropriate draft.
"""
user = models.ForeignKey(User)
survey = models.ForeignKey(CoachSurvey, related_name='drafts')
consent = models.BooleanField(default=False)
# Submission is stored as JSON
submission_data = models.TextField()
# Submission relation is set when draft is completed.
submission = models.ForeignKey(CoachSurveySubmission, null=True)
complete = models.BooleanField(default=False)
version = models.IntegerField(default=0)
created_at = models.DateTimeField(default=timezone.now)
modified_at = models.DateTimeField(default=timezone.now)
class Meta:
verbose_name = _('coach survey submission draft')
verbose_name_plural = _('coach survey submission drafts')
@property
def has_submission(self):
return bool(self.submission_data)
def save(self, *args, **kwargs):
self.version += 1
self.modified_at = timezone.now()
super(CoachSurveySubmissionDraft, self).save(*args, **kwargs)
###############################
# Endline Survey User Chooser #
###############################
class EndlineSurveySelectUser(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
receive_survey = models.BooleanField(default=False, help_text=_('Should the user receive the Endline Survey'))
survey_completed = models.BooleanField(default=False, help_text=_('Has the user already completed the survey'))
class Meta:
# Translators: Collection name on CMS
verbose_name = _('endline survey selected user')
# Translators: Plural collection name on CMS
verbose_name_plural = _('endline survey selected users')
def receive_endline_survey(self):
if self.receive_survey:
return format_html(
"<input type='checkbox' id='{}' class='mark-receive-survey' value='{}' checked='checked' />",
'participant-is-shortlisted-%d' % self.id, self.id)
else:
return format_html("<input type='checkbox' id='{}' class='mark-receive-survey' value='{}' />",
'participant-is-shortlisted-%d' % self.id, self.id)
@property
def is_baseline_completed(self):
baseline_surveys = CoachSurvey.objects.filter(bot_conversation=CoachSurvey.BASELINE).first()
completed = CoachSurveySubmission.objects.filter(survey=baseline_surveys, user=self.user).first()
if not completed:
return False
return True
@property
def is_endline_completed(self):
endline_surveys = CoachSurvey.objects.filter(bot_conversation=CoachSurvey.ENDLINE).first()
completed = CoachSurveySubmission.objects.filter(survey=endline_surveys, user=self.user).first()
if not completed:
return False
return True
EndlineSurveySelectUser.panels = [
MultiFieldPanel([
FieldPanel('user'),
FieldPanel('receive_survey'),
ReadOnlyPanel('is_baseline_completed'),
ReadOnlyPanel('is_endline_completed',)
])
]
# When signal is attached to User, it won't be fired on registration (RegUser create)
@receiver(post_save, sender=RegUser)
def create_survey_link(sender, instance, created, **kwargs):
"""Ensure survey link is created"""
if created:
EndlineSurveySelectUser.objects.get_or_create(user=instance)
|
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from django import forms
from django.core.validators import validate_slug, validate_email
from .models import Image
from .models import Clusters
class ImageForm(forms.ModelForm):
class Meta:
model = Image
fields = '__all__'
labels = {'photo':''}
CHOICES=[
('fixed','fixed'),
('working on it','working on it'),
('NA','Nothing done till now'),
]
class ClustersForm(forms.ModelForm):
cluster = forms.IntegerField(widget=forms.HiddenInput())
status= forms.CharField(label='What is the current status?', widget=forms.Select(choices=CHOICES))
class Meta:
model = Clusters
fields = ('status','cluster',)
# labels = {'photo':''}
def email(self):
email=self.cleaned_data.get("email")
user_count = User.objects.filter(email=email).count()
print (user_count)
if user_count > 0:
raise forms.validationError("tttttttttt")
return email
class SignUpForm(UserCreationForm):
first_name = forms.CharField(widget=forms.TextInput(attrs={'class': 'input','placeholder':'First Name'}))
last_name = forms.CharField(widget=forms.TextInput(attrs={'class': 'input','placeholder':'Last Name'}))
email = forms.EmailField(widget=forms.TextInput(attrs={'class': 'input','placeholder':'Email'}))
password1 = forms.CharField(
widget=forms.PasswordInput(attrs={'class':'input', 'type':'password', 'placeholder':'password'}),
)
password2 = forms.CharField(
widget=forms.PasswordInput(attrs={'class':'input', 'type':'password', 'placeholder':'password'}),
)
class Meta:
model = User
fields = ('username','first_name','last_name','email','password1','password2')
widgets={
'username': forms.TextInput(attrs={'class': 'input','placeholder':'Username'}),
} |
#Program to print multiplication table of a given number using while loop
num = int(input("Enter the number "))
i=1
while i<=10:
a = num * i
print(f"{num} X {i} = {num*i}")
i=i+1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/3/21 12:56
# @Author : Fred Yangxiaofei
# @File : paid_write_redis.py
# @Role : 用于提醒,如:将要过期的电信线路
import json
from libs.database import model_to_dict
from libs.database import db_session
from libs.redis_connect import redis_conn
from biz.get_userinfo import get_user_info
from models.paid_mg import PaidMG
import datetime
from websdk.consts import const
from websdk.utils import SendMail
def get_paid_info():
"""
获取付费管理信息
:return:
"""
paid_list = []
paid_data = db_session.query(PaidMG).all()
db_session.close()
for data in paid_data:
data_dict = model_to_dict(data)
data_dict['paid_start_time'] = str(data_dict['paid_start_time'])
data_dict['paid_end_time'] = str(data_dict['paid_end_time'])
data_dict['create_at'] = str(data_dict['create_at'])
data_dict['update_at'] = str(data_dict['update_at'])
paid_list.append(data_dict)
return paid_list
def save_data():
"""
提醒内容写入redis
:return:
"""
# 付费信息
paid_data = get_paid_info()
# CODO用户信息
user_data = get_user_info()
userdata = [json.loads(x) for x in user_data]
with redis_conn.pipeline(transaction=False) as p:
for remind in paid_data:
# print(remind)
for u in userdata:
if remind.get('nicknames'):
if u.get('nickname') in remind.get('nicknames').split(','):
#print(remind.get('paid_name'), {u.get('tel'): u.get('email')})
save_data = {u.get('tel'): u.get('email')}
p.hmset(remind.get('paid_name'), save_data)
p.execute()
def check_reminder():
"""
用途:
检查哪些事件需要进行邮件提醒
逻辑:
这里逻辑简单说明下如下:
01. 先获取到所有事件的到期时间
02. 获取所有事件中每条事件都需要提前多少天进行提醒
03. 计算从哪天开始进行提醒(过期时间 - 提前提醒天数 = 开始提醒的日期)
04. 计算出来的·开始提醒日期· <= 现在时间 都进行报警
:return:
"""
# 邮箱配置信息
config_info = redis_conn.hgetall(const.APP_SETTINGS)
sm = SendMail(mail_host=config_info.get(const.EMAIL_HOST), mail_port=config_info.get(const.EMAIL_PORT),
mail_user=config_info.get(const.EMAIL_HOST_USER),
mail_password=config_info.get(const.EMAIL_HOST_PASSWORD),
mail_ssl=True if config_info.get(const.EMAIL_USE_SSL) == '1' else False)
for msg in db_session.query(PaidMG).all():
reminder_time = msg.paid_end_time - datetime.timedelta(days=int(msg.reminder_day))
if reminder_time <= datetime.datetime.now():
emails_list = redis_conn.hvals(msg.paid_name)
print('msg_name---->',msg.paid_name)
print('email_list---->',emails_list)
content = """
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>OpenDevOps运维提醒邮件</title>
<style type="text/css">
p {
width: 100%;
margin: 30px 0 30px 0;
height: 30px;
line-height: 30px;
text-align: center;
}
table {
width: 100%;
text-align: center;
border-collapse: collapse;
}
tr.desc {
background-color: #E8E8E8;
height: 30px;
}
tr.desc td {
border-color: black;
}
td {
height: 30px;
}
</style>
<style>
.bodydiv {
width: 60%;
margin: 0 auto;
}
.tc {
text-align: center;
}
.content {
margin: 10px 0 10px 30px;
}
</style>
</head>
"""
content += """
<div class="bodydiv">
Hi, Ops:
<div class="content">
你有以下事项提醒需要关注
</div>
<table>
<tr class="desc">
<td>名称</td>
<td>过期时间</td>
<td>提前通知天数</td>
</tr>
"""
content += """
<tr>
<td>{}</td>
<td>{}</td>
<td>{}</td>
</tr>""".format(msg.paid_name, msg.paid_end_time, msg.reminder_day)
content += """
</table>
</div>
</body>
</html>
"""
# send_msg = msg.paid_name + "\n到期时间:" + str(msg.paid_end_time)
#sm.send_mail("yanghongfei@shinezone.com", "运维信息提醒", send_msg)
sm.send_mail(",".join(emails_list), '运维提醒信息', content, subtype='html')
def main():
"""
数据写redis+提醒将要过期事件
:return:
"""
save_data()
check_reminder()
if __name__ == '__main__':
main()
#pass
|
###############################################################
#
# Skeleton top job options for DQHistogramMerge_trf
#
#==============================================================
#hack: we are forced to use athena (as a dummy) within the current PyJobTransformsCore
theApp.EvtMax=1
# merge and/or rename monitoring histogram file
# file with list of ROOT files to merge
mergeListFile=open('hist_merge_list.txt','w')
inFiles=runArgs.inputFile
for f in inFiles:
mergeListFile.write( str(f) + '\n' )
mergeListFile.close()
#
#
# This is the old method which was causing stray DQHistogramMerge processes.
#
# call DQHistogramMerge
#cmd = 'DQHistogramMerge.py hist_merge_list.txt %s False' % (runArgs.outputHISTFile)
#cmd = 'sleep 500'
#import commands
#(status, output) = commands.getstatusoutput(cmd)
import os,subprocess
#writing in a temp file should allow termination eventually even if parent is killed
tmpbuff=os.tmpfile()
dqhistpipe=subprocess.Popen(["DQHistogramMerge.py", "hist_merge_list.txt", runArgs.outputHISTFile, "False"],
stdout=tmpbuff, stderr=tmpbuff, shell=False)
status=dqhistpipe.wait()
print "---------------------------------------------------------------------------------------"
print '## Output of \'DQHistogramMerge.py hist_merge_list.txt ' + runArgs.outputHISTFile + ' False\':'
try:
tmpbuff.seek(0)
for line in tmpbuff:
print line,
finally:
tmpbuff.close()
print '## DQHistogramMerge.py finished with retcode = %s' % (status)
print "---------------------------------------------------------------------------------------"
if not status==0:
raise RuntimeError("DQ HiST merging did NOT work. Stopping!")
|
import os
import logging
from slack_bolt import App
from slack_bolt.adapter.socket_mode import SocketModeHandler
from googleapiclient import discovery
from google.oauth2 import service_account
from datetime import datetime
# Initializes your app with your bot token and socket mode handler
app = App(token=os.environ.get("SLACK_BOT_TOKEN"))
# logging.basicConfig(level=logging.DEBUG)
@app.event("message")
def handle_message_events(ack):
ack()
@app.event("reaction_removed")
def handle_reaction_removed_events(ack):
ack()
@app.event("reaction_added")
def handle_reaction_added_events(ack, body, say, client):
# Acknowledge action request
ack()
# logging.warning(body)
if body['event']['reaction'] == 'white_check_mark':
messages = client.conversations_replies(
channel=body["event"]["item"]["channel"],
ts=body["event"]["item"]["ts"]
)
origin = messages["messages"][0]
user_id = origin["user"]
client.chat_postMessage(
channel=body["event"]["item"]["channel"],
text=f"<@{user_id}> мы решили твою задачу, оцени результат реакцией от :one: до :five:",
thread_ts=body["event"]["item"]["ts"]
)
elif body['event']['reaction'] in ['one', 'two', 'three', 'four', 'five']:
t_messages = client.conversations_replies(
channel=body["event"]["item"]["channel"],
ts=body["event"]["item"]["ts"]
)
messages = client.conversations_replies(
channel=body["event"]["item"]["channel"],
ts=t_messages["messages"][0]["thread_ts"]
)
origin = messages["messages"][0]
origin_link = client.chat_getPermalink(
channel=body["event"]["item"]["channel"],
message_ts=origin["ts"]
)
customer = client.users_info(user=origin["user"])
origin_check = list(filter(lambda f: (f["name"] == "white_check_mark"), origin["reactions"]))
# logging.warning(origin_check)
executor = client.users_info(user=origin_check[0]["users"][0])
if body['event']['user'] == origin["user"]:
now = datetime.now()
row = [
now.strftime("%m/%d/%Y"),
origin_link["permalink"],
customer["user"]["real_name"],
executor["user"]["real_name"],
body['event']['reaction']
]
app_to_file(row)
def app_to_file(body):
SCOPES = [
'https://www.googleapis.com/auth/sqlservice.admin',
'https://www.googleapis.com/auth/drive',
'https://www.googleapis.com/auth/drive.file',
'https://www.googleapis.com/auth/spreadsheets'
]
SERVICE_ACCOUNT_FILE = 'token.json'
credentials = service_account.Credentials.from_service_account_file(
SERVICE_ACCOUNT_FILE, scopes=SCOPES)
service = discovery.build('sheets', 'v4', credentials=credentials)
spreadsheet_id = '1XjX4OJbU0sK2f03-BmUkUWoV38uEtVwL8TbmLi78hLU'
range_ = 'A1:E1'
value_input_option = 'RAW'
insert_data_option = 'INSERT_ROWS'
value_range_body = {
"values": [
body
],
"majorDimension": "ROWS"
}
request = service.spreadsheets().values().append(
spreadsheetId=spreadsheet_id,
range=range_,
valueInputOption=value_input_option,
insertDataOption=insert_data_option,
body=value_range_body
)
request.execute()
# Start your app
if __name__ == "__main__":
handler = SocketModeHandler(app, os.environ["SLACK_APP_TOKEN"])
handler.start()
|
import bpy
decimals = 0
scale = 1000
obj = bpy.context.active_object.data
verts = []
faces = []
for v in obj.vertices:
x = round(v.co.x * scale, decimals)
y = round(v.co.z * scale, decimals)
z = round(-v.co.y * scale, decimals)
if decimals == 0:
x = int(x)
y = int(y)
z = int(z)
verts.append(str(x))
verts.append(str(y))
verts.append(str(z))
for f in obj.polygons:
for v in f.vertices:
faces.append(str(v))
print()
print(obj.name)
print()
print('vertices: ['+','.join(verts)+'],')
print('faces: ['+','.join(faces)+']')
|
from django.db import models
# Create your models here.
class Notes(models.Model):
Notes_Title = models.CharField(max_length=30)
Notes_Desc = models.TextField()
time = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.Notes_Title |
#!/usr/bin/python
#coding:utf-8
# https://a244.hateblo.jp/entry/2018/06/02/224659
#python ocr_api_sample.py
import os
import base64
import json
from requests import Request, Session
import requests
import json
import base64 # 画像はbase64でエンコードする必要があるため
API_KEY = os.environ["GOOGLE_VISION_API"]
def text_detection(image_path):
api_url = 'https://vision.googleapis.com/v1/images:annotate?key={}'.format(API_KEY)
with open(image_path, "rb") as img:
image_content = base64.b64encode(img.read())
req_body = json.dumps({
'requests': [{
'image': {
'content': image_content.decode('utf-8') # base64でエンコードしたものjsonにするためdecodeする
},
'features': [{
'type': 'TEXT_DETECTION'
}]
}]
})
res = requests.post(api_url, data=req_body)
return res.json()
if __name__ == "__main__":
img_path = "eng-768x1024.jpg"
res_json = text_detection(img_path)
# print(res_json)
res_text = res_json["responses"][0]["textAnnotations"][0]["description"]
print(json.dumps(res_json, indent=4, sort_keys=True, ensure_ascii=False))
print(res_text)
with open("data.json", "w") as js:
#json.dump(res_json, js, indent=4, ensure_ascii=False)
js.write(res_text)
|
# 오셀로 함수 = 하나 돌 놓을 때마다 돌 다 뒤집는 함수
def Othello(y, x, dol):
dy = [-1, -1, 0, 1, 1, 1, 0, -1]
dx = [0, 1, 1, 1, 0, -1, -1, -1]
for i in range(8):
Y = dy[i]
X = dx[i]
# 상대방 돌을 내 돌로 바꾸는 리스트 생성
dol_change = []
while True:
# 주변으로 이동 못할 때 or 돌이 없을 때
if y + Y < 0 or y + Y >= N or x + X < 0 or x + X >= N or matrix[y + Y][x + X] == 0: break
# (2) 자기자신과 같은 색의 돌이면
elif matrix[y + Y][x + X] == dol:
# 지금까지 저장해둔 상대방돌(위치좌표)를 내 돌로 바꾼다.
for a, b in dol_change:
matrix[a][b] = dol
# 반복문 중지, 여기까지.
break
# (1)상대방 돌이면
else:
dol_change.append([y + Y, x + X])
Y += dy[i]
X += dx[i]
T = int(input())
for tc in range(1, T + 1):
N, M = map(int, input().split())
matrix = [[0] * N for _ in range(N)]
# 초기 바둑돌 셋팅
# 1이 흑돌
# 2가 백돌
init_pos = N // 2
matrix[init_pos - 1][init_pos - 1] = matrix[init_pos][init_pos] = 2
matrix[init_pos][init_pos - 1] = matrix[init_pos - 1][init_pos] = 1
for _ in range(M):
y, x, dol = map(int, input().split())
# 일단 해당 돌을 놓고,
matrix[y - 1][x - 1] = dol
# 돌을 놓을 때마다 오셀로(뒤집는) 함수 호출
Othello(y - 1, x - 1, dol)
# 갯수세기
b_cnt = w_cnt = 0
for i in matrix:
b_cnt += i.count(1)
w_cnt += i.count(2)
print('#{} {} {}'.format(tc, b_cnt, w_cnt)) |
import os
import pandas as pd
import datetime
import shutil
parent_dir = os.getcwd()
# if there is a data folder, delete it and its content
try:
shutil.rmtree('data')
except:
pass
# make a data folder and go into it
dataFolder = 'data'
os.makedirs(dataFolder)
os.chdir(dataFolder)
# add property file
foldername = 'experiment1'
os.makedirs(foldername)
file = open('{}/experiment.properties'.format(foldername),'w')
file.write('#{}\n'.format(str(datetime.datetime.now())))
file.write('fileNamePrefix.value={}/{}/\n'.format(dataFolder, foldername))
file.close() |
import subprocess
def split_asts(project, giant_ast_file, n_per_file):
"""
Divide the list of ASTs into sub-files, to make it easier to process them
with Ben's pipeline. The sub-files will be placed in the project folder,
and also put into a tar file.
Parameters
----------
project : string
name of the project from datamodel.project, which defines the folder name
giant_ast_file : string
Name of the AST file created by AST section of run_beast.py
n_per_file : integer
number of lines per output file
"""
with open(giant_ast_file, "r") as f:
ast_data = f.readlines()
# remove the first line
del ast_data[0]
# length of file
n_lines = len(ast_data)
print("AST file contains " + str(n_lines) + " lines")
# number of new files
n_file = n_lines // n_per_file
# check if there are extras
if n_lines % n_per_file != 0:
n_file += 1
print("Splitting AST file into " + str(n_file) + " sub-files")
# loop across files
for i in range(n_file):
# print('writing file ', i+1, ' of ', n_file)
with open("./" + project + "/fake_" + str(i + 1) + ".lst", "w") as f_out:
# write n_per_file lines from large file
for j in range(i * n_per_file, (i + 1) * n_per_file):
# make sure we don't go over for the last file
if j > n_lines - 1:
break
f_out.write(ast_data[j])
# combine AST files into a tar file
file_list = ["./" + project + "/fake_" + str(i + 1) + ".lst" for i in range(n_file)]
cmd = (
"tar -cf " + "./" + project + "/" + project + "_ASTs.tar " + " ".join(file_list)
)
subprocess.run(cmd, shell=True)
# remove the individual files to avoid clutter
cmd = "rm " + " ".join(file_list)
subprocess.run(cmd, shell=True)
|
import sqlite3
import random
class BankingSystem:
def __init__(self):
self.conn = sqlite3.connect('card.s3db')
self.cur = self.conn.cursor()
# self.cur.execute('DROP TABLE card')
self.cur.execute('CREATE TABLE card (id INTEGER PRIMARY KEY AUTOINCREMENT, '
'number TEXT, pin TEXT, balance INTEGER DEFAULT 0)')
@staticmethod
def luhn(card):
# list numbers of card
n = list(map(int, list(card)))
for i in range(len(n) - 1):
if i % 2 == 0:
if n[i] * 2 <= 9:
n[i] *= 2
else:
n[i] = (n[i] * 2) // 10 + (n[i] * 2) % 10
control_sum = sum(n[:-1])
check = ((control_sum // 10) + 1) * 10 - control_sum
return check == n[-1]
def create_card(self):
i = True
while i:
card = random.randint(4000000000000000, 4000009999999999)
if self.luhn(str(card)):
i = False
return card
def menu_acc(self, card):
while True:
print('''1. Balance
2. Add income
3. Do transfer
4. Close account
5. Log out
0. Exit''')
query = input()
if query == '1':
self.cur.execute(f'SELECT balance FROM card WHERE number = {card}')
print('Balance:', self.cur.fetchone()[0])
elif query == '2':
print('Enter income:')
self.cur.execute(f'UPDATE card SET balance = balance + {int(input())} WHERE number = {card}')
self.conn.commit()
print('Income was added!')
elif query == '3':
print('''Transfer
Enter card number:''')
trans_card = input()
self.cur.execute(f'SELECT number FROM card WHERE number = {trans_card}')
if trans_card == card:
print("You can't transfer money to the same account!")
elif not self.luhn(trans_card):
print('Probably you made a mistake in the card number. Please try again!')
elif not self.cur.fetchone():
print('Such a card does not exist.')
else:
print('Enter how much money you want to transfer:')
money = int(input())
self.cur.execute(f'SELECT balance FROM card WHERE number = {card}')
if money > self.cur.fetchone()[0]:
print('Not enough money!')
else:
self.cur.execute(
f'UPDATE card SET balance = balance + {money} WHERE number = {trans_card}')
self.conn.commit()
self.cur.execute(
f'UPDATE card SET balance = balance - {money} WHERE number = {card}')
self.conn.commit()
print('Success!')
elif query == '4':
self.cur.execute(f'DELETE FROM card WHERE number = {card}')
self.conn.commit()
print('The account has been closed!')
return True
elif query == '5':
print('You have successfully logged out!')
return True
elif query == '0':
return False
def main_menu(self):
query = True
while query:
print('''1. Create an account
2. Log into account
0. Exit''')
query = input()
if query == '0':
query = False
elif query == '1':
self.cur.execute(f'INSERT INTO card(number, pin) VALUES ({self.create_card()},'
f' {str(random.randint(1000, 9999))})')
self.conn.commit()
print('''Your card has been created
Your card number:''')
self.cur.execute(f'SELECT number FROM card WHERE id = (SELECT MAX(id) FROM card)')
print(self.cur.fetchone()[0])
print('Your card PIN:')
self.cur.execute(f'SELECT pin FROM card WHERE id = (SELECT MAX(id) FROM card)')
print(self.cur.fetchone()[0])
elif query == '2':
print('Enter your card number:')
card = input()
print('Enter your PIN:')
pin = input()
self.cur.execute(f'SELECT number FROM card WHERE number={card}')
cards = self.cur.fetchone()
self.cur.execute(f'SELECT pin FROM card WHERE number = {card}')
pin_card = self.cur.fetchone()
if cards is not None and card in cards and pin in pin_card:
print('You have successfully logged in!')
query = self.menu_acc(card)
else:
print('Wrong card number or PIN!')
print('Bye!')
acc = BankingSystem()
acc.main_menu()
|
import sqlite3
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
# open a database connection
conn = sqlite3.connect("source/m4_survey_data.sqlite")
# print how many rows are there in the table named 'master'
QUERY = """SELECT COUNT(*) FROM master""" # tripe quota marks can easily be used for some '' string in the query
# the read_sql_query runs the sql query and returns the data as a dataframe
df = pd.read_sql_query(QUERY, conn)
print(df.head())
# list all tables
# print all the tables names in the database
QUERY = """
SELECT name as Table_Name FROM
sqlite_master WHERE
type = 'table'
"""
# the read_sql_query runs the sql query and returns the data as a dataframe
pd.read_sql_query(QUERY, conn)
# run a group by query
QUERY = """
SELECT Age,COUNT(*) as count
FROM master
group by age
order by age
"""
pd.read_sql_query(QUERY, conn)
# describe a table
table_name = 'master' # the table you wish to describe
QUERY = """
SELECT sql FROM sqlite_master
WHERE name= '{}'
""".format(table_name)
df = pd.read_sql_query(QUERY, conn)
# print(df.iat[0,0])
# convert table to dataframe
QUERY = """
SELECT *
FROM master
"""
df = pd.read_sql_query(QUERY, conn)
# Plot a histogram of ConvertedComp.
ax2 = plt.hist(df['ConvertedComp'])
plt.show()
# Plot a box plot of Age.
ax4 = sns.boxplot(data=df['Age'])
# Create a scatter plot of Age and WorkWeekHrs.
plt.figure(figsize=(10, 5))
sns.scatterplot(x=df.Age, y=df.WorkWeekHrs, data=df)
plt.show()
# Create a bubble plot of WorkWeekHrs and CodeRevHrs, use Age column as bubble size.
plt.figure(figsize=(10, 5))
plt.scatter(x=df.WorkWeekHrs, y=df.CodeRevHrs, c='green', alpha=0.2, s=df.Age)
plt.show()
# pie chart
df2 = pd.read_sql_query('select * from DatabaseDesireNextYear', conn)
df2 = pd.read_sql_query(
'select count(DatabaseDesireNextYear) as nextYear, DatabaseDesireNextYear from DatabaseDesireNextYear group by DatabaseDesireNextYear order by nextYear desc limit 5',
conn)
df2.head()
# print(df2['nextYear'])
df2.groupby(['DatabaseDesireNextYear']).sum().plot(kind='pie', y='nextYear', startangle=90, figsize=(15, 10),
autopct='%1.1f%%')
plt.show()
# Create a stacked chart of median WorkWeekHrs and CodeRevHrs for the age group 30 to 35.
df_age = pd.read_sql_query('SELECT WorkWeekHrs, CodeRevHrs, Age FROM master', conn)
# group respondents by age and apply median() function
df_age = df_age.groupby('Age', axis=0).median()
# step 2: plot data
df_age[30:35].plot(kind='bar', figsize=(10, 6), stacked=True)
plt.xlabel('Age') # add to x-label to the plot
plt.ylabel('Hours') # add y-label to the plot
plt.title('Median Hours by Age') # add title to the plot
plt.show()
# line chart
# step 1: get the data needed
df_comp = pd.read_sql_query('SELECT ConvertedComp, Age FROM master', conn)
# group respondents by age and apply median() function
df_comp = df_comp.groupby('Age', axis=0).median()
# step 2: plot data
df_comp[25:30].plot(kind='line', figsize=(10, 6), stacked=True)
plt.xlabel('Age') # add to x-label to the plot
plt.ylabel('$') # add y-label to the plot
plt.title('Median Compensation by Age') # add title to the plot
plt.show()
# horizontal bar chart
df_main = pd.read_sql_query('SELECT MainBranch, count(MainBranch) as Count FROM master GROUP BY MainBranch', conn)
df_main.head()
df_main.plot(kind='barh', figsize=(10, 6))
plt.xlabel('Number of Respondents') # add to x-label to the plot
plt.ylabel('Main Branch') # add y-label to the plot
plt.title('Number of Respondents by Main Branch') # add title to the plot
plt.show()
conn.close()
|
class Circle:
def __init__(self, radius):
# instance variable
self.radius = radius
self.pi = 3.14
class Area(Circle):
def findArea(self):
return f"Area: {round(self.pi * self.get_radius() * self.get_radius())}"
#
# s
class Circumference(Circle):
def findCircum(self):
return f"Circumference: {2 * self.pi * self.get_radius()}"
area = Area(6)
print(area.findArea())
peri = Circumference(7)
print(peri.findCircum())
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.