text stringlengths 38 1.54M |
|---|
import asyncio
import pytest
from yawf import BaseHandler
@pytest.fixture
def evloop():
return asyncio.get_event_loop()
def test_basehandler_without_handle(evloop):
handler = BaseHandler()
@asyncio.coroutine
def run_handler():
yield from handler(None)
with pytest.raises(NotImplementedError):
evloop.run_until_complete(run_handler())
def test_stupid_handler(evloop):
class Nothing(BaseHandler):
@asyncio.coroutine
def handle(self, ws, *args, **kwargs):
assert ws in self.websockets
yield from asyncio.sleep(0.1)
handler = Nothing()
@asyncio.coroutine
def run_handler():
yield from handler("cool")
evloop.run_until_complete(run_handler())
|
#There is an array with some numbers. All numbers are equal except for one. Try to find it!
#It’s guaranteed that array contains at least 3 numbers.
#The tests contain some very huge arrays, so think about performance.
def find_uniq(arr):
a, b = set (arr)
if arr.count(a) == 1:
return a
else:
return b |
class EurecomDataset(Dataset):
def __init__(self,domain,variation,training_dir=None,transform=None):
self.transform = transform
self.training_dir = training_dir
# For each variant keep a list
self.thermal_illu = []
self.thermal_exp = []
self.thermal_pose = []
self.thermal_occ = []
self.visible_illu = []
self.visible_exp = []
self.visible_pose = []
self.visible_occ = []
# Get all subject directories
subjects = [subject for subject in os.listdir(training_dir)]
self.num_classes = len(subjects)
for sub in subjects:
sub_p = os.path.join(training_dir,sub)
visible_pth = os.path.join(sub_p,"VIS")
thermal_pth = os.path.join(sub_p,"TH")
self.visible_illu.extend([ (os.path.join(visible_pth,x),int(x[4:7])-1) for x in os.listdir(visible_pth) if "_L" in x ])
self.visible_exp.extend([ (os.path.join(visible_pth,x) ,int(x[4:7])-1) for x in os.listdir(visible_pth) if "_N" in x or "_E" in x ])
self.visible_pose.extend([ (os.path.join(visible_pth,x),int(x[4:7])-1) for x in os.listdir(visible_pth) if "_P" in x ])
self.visible_occ.extend([ (os.path.join(visible_pth,x) ,int(x[4:7])-1) for x in os.listdir(visible_pth) if "_O" in x ])
self.thermal_illu.extend([ (os.path.join(thermal_pth,x),int(x[3:6])-1) for x in os.listdir(thermal_pth) if "_L" in x ])
self.thermal_exp.extend([ (os.path.join(thermal_pth,x) ,int(x[3:6])-1) for x in os.listdir(thermal_pth) if "_N" in x or "_E" in x ])
self.thermal_pose.extend([ (os.path.join(thermal_pth,x),int(x[3:6])-1) for x in os.listdir(thermal_pth) if "_P" in x ])
self.thermal_occ.extend([ (os.path.join(thermal_pth,x) ,int(x[3:6])-1) for x in os.listdir(thermal_pth) if "_O" in x ])
# Set dataset to intented domain
if domain == "thermal":
if variation == "illu":
self.dataset = self.thermal_illu
elif variation == "exp":
self.dataset = self.thermal_exp
elif variation == "pose":
self.dataset = self.thermal_pose
else:
self.dataset = self.thermal_occ
else:
if variation == "illu":
self.dataset = self.visible_illu
elif variation == "exp":
self.dataset = self.visible_exp
elif variation == "pose":
self.dataset = self.visible_pose
else:
self.dataset = self.visible_occ
self.count = len(self.dataset)
def __getitem__(self, index):
image,label = self.dataset[index]
img_a = Image.open(image).convert('RGB')
if self.transform is not None:
img_a = self.transform(img_a)
# return image and label
return img_a,label
def __len__(self):
return self.count |
import csv
import json
from datetime import datetime
from elasticsearch import Elasticsearch
from elasticsearch import helpers
import pandas as pd
import uuid
es = Elasticsearch()
def convert_csv_to_json(file):
workbook = pd.read_csv(file, na_filter=False)
doc_columns= []
columns=list(workbook.columns.values)
select_columns = [
'LotIRN',
'EMu Catalog IRN'
]
for column in columns:
cleaned_column = column.strip()
# print cleaned_column
if ('new_' in cleaned_column.lower() or cleaned_column in select_columns):
doc_columns.append(column)
# print 'doc_columns'
# print doc_columns
# print 'workbook'
# print workbook
dataframe=workbook[doc_columns]
# nt 'dataframe'
# print dataframepri
converted_dict = dataframe.to_dict('records')
# print 'converted_dict'
# print converted_dict
output = []
for row in converted_dict:
# cleaned_row = {k.replace(" ",""): v for k,v in row.items()}
# for k,v in cleaned_row.items():
# if type(v) is str:
# cleaned_row = k, v.replace(" ","")
new_map = {}
for key, value in row.items():
cleaned_key = key.strip()
if type(value) is str:
cleaned_value = value.strip()
else:
cleaned_value = value
row[key] = cleaned_value
new_map[cleaned_key] = row[key]
output.append(new_map)
return json.dumps(output)
def persist_doc_to_elasticsearch(documents):
doc_list = json.loads(str(documents))
# print doc_list
#create index with specific mapping
j = 0
actions = []
print len(doc_list)
while (j < len(doc_list)):
if 'LotIRN' in doc_list[j]:
doc_id = doc_list[j]['LotIRN']
elif 'EMu Catalog IRN' in doc_list[j]:
doc_id = doc_list[j]['EMu Catalog IRN']
else:
continue;
action = {
"_index": "collections-data3",
"_type": "collection",
"_id": doc_id,
"_source": doc_list[j]
}
actions.append(action)
j += 1
helpers.bulk(es, actions)
persist_doc_to_elasticsearch(convert_csv_to_json('CleanedDataSet.csv'))
def query_from_elasticsearch():
res = es.search(index="collections-data3", body={"query": {"match_all": {}}})
print("Got %d Hits:" % res['hits']['total'])
print res
query_from_elasticsearch()
|
# =============================================================================================
# Classes
# =============================================================================================
class Salario(object):
"""Representa o tipo Salario"""
# Membro privado para armazenar o valor do salário
__valor = 0.0
# Método construtor
def __init__(self, v):
if v < 0:
raise ValueError("Salário não pode ser negativo")
self.__valor = v
# Método Set
def aumentar(self, percentual):
if percentual < 0 or percentual is None:
raise Exception("Valor inválido")
self.__valor = self.__valor * (1 + percentual)
# Método Get
def getSalario(self):
return self.__valor
# Criando uma instância da classe
salario = Salario(1000.0)
print(salario.getSalario())
# Invocando método
salario.aumentar(0.10)
print(salario.getSalario()) |
from nautipy import nautipy
def test_bearing():
'''
make sure bearing is calculated correctly
'''
assert False, 'Incorrect bearing!'
|
# -*- coding: utf-8 -*-
"""
Created on Sun Sep 22 20:54:30 2019
@author: Valenzuela
"""
import numpy as np
import pandas as pd
outlook=np.array(['sunny','overcast','rainy'])
temperature = np.array(['hot','mild','cool'])
humildy=np.array(['high','normal'])
windy=np.array(['TRUE','FALSE'])
play=np.array(['yes','no'])
datos=pd.read_csv('datos.cvs')#leemos los datos del archivo con pandas
X=datos.iloc[:,range(0,3)]#metemos todos los datos en X
clases = datos.iloc[:,4]#metemos la ultima columna
K=int(np.round(len(datos)))#contamos los datos
numclima=datos.iloc[:,0]
cont=0
for i in range(0,K):#for para contar los datos con la palabra si(yes)
if('yes'==datos.iloc[i,4]):
cont=cont+1
jugarno=np.absolute(K-cont)#la inversa (los datos con palabra no)
a=0
b=0
c=0
d=0
e=0
f=0
h=0
j=0
o=0
p=0
for i in range(0,K):#for para comparar las clases y sacar su probabilidad
if('sunny'==datos.iloc[i,0] and 'yes'==datos.iloc[i,4]):
a=a+1
if('overcast'==datos.iloc[i,0] and 'yes'==datos.iloc[i,4]):
b=b+1
if('rainy'==datos.iloc[i,0] and 'yes'==datos.iloc[i,4]):
c=c+1
if('hot'==datos.iloc[i,1] and 'yes'==datos.iloc[i,4]):
d=d+1
if('mild'==datos.iloc[i,1] and 'yes'==datos.iloc[i,4]):
e=e+1
if('cool'==datos.iloc[i,1] and 'yes'==datos.iloc[i,4]):
f=f+1
if('high'==datos.iloc[i,2] and 'yes'==datos.iloc[i,4]):
h=h+1
if('normal'==datos.iloc[i,2] and 'yes'==datos.iloc[i,4]):
j=j+1
if(False==datos.iloc[i,3] and 'yes'==datos.iloc[i,4]):
o=o+1
if(True==datos.iloc[i,3] and 'yes'==datos.iloc[i,4]):
p=p+1
#dierentes probabilidades
precS=a/cont
precO=b/cont
precR=c/cont
precHot=d/cont
precM=e/cont
precC=f/cont
precH=h/cont
precN=j/cont
preco=o/cont
precp=p/cont
probclase=cont/K#la probabilidad de la clase play
datos2=pd.read_csv('datosprueba.cvs')#se lee los datos a verificar
X=datos2.iloc[:,range(0,3)]
clases = datos2.iloc[:,4]
K1=int(np.round(len(datos2)))#contamos el numero de datos
for i in range(0,K1):#for para calcular la probabilidad
resultado=1
r2=1
r3=1
r4=1
r5=1
var1 = datos2.iloc[i,0]
var2=datos2.iloc[i,1]
var3=datos2.iloc[i,2]
var4=datos2.iloc[i,3]
var5=datos2.iloc[i,4]
if(var1=='sunny'):#si encuentra sunny en la columna 0 mete la probabilidad y la multiplica al final
resultado*=precS
if(var1=='overcast'):##si encuentra overcast en la columna 0 mete la probabilidad y la multiplica al final
resultado=precO
if(var1=='rainy'):
resultado=precR
if(var2=='hot'):
r2=precHot
if(var2=='mild'):##si encuentra mild en la columna 0 mete la probabilidad y la multiplica al final
r2=precM
if(var2=='cool'):
r2=precC
if(var3=='high'):
r3=precH
if(var3=='normal'):##si encuentra normal en la columna 0 mete la probabilidad y la multiplica al final
r3=precN
if(var4==False):
r4=preco
if(var4==True):
r4=precp
if(var5=='yes'):
r5=probclase
if(var5=='no'):
r5=np.absolute(1-probclase)
print('Resultado numero',i+1,': ')
print(resultado*r2*r3*r4*r5)#se imprime el resultado de la multiplicacion para obtener la probabilidad
|
import os
import sys
import importlib
# noinspection PyUnresolvedReferences
import tests.mock_tables.dbconnector
modules_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.join(modules_path, 'src'))
from unittest import TestCase
from ax_interface import ValueType
from ax_interface.pdu_implementations import GetPDU, GetNextPDU
from ax_interface.encodings import ObjectIdentifier
from ax_interface.constants import PduTypes
from ax_interface.pdu import PDU, PDUHeader
from ax_interface.mib import MIBTable
from sonic_ax_impl.mibs.vendor.cisco import ciscoSwitchQosMIB
from sonic_ax_impl import mibs
class TestQueueCounters(TestCase):
@classmethod
def setUpClass(cls):
tests.mock_tables.dbconnector.load_namespace_config()
importlib.reload(ciscoSwitchQosMIB)
cls.lut = MIBTable(ciscoSwitchQosMIB.csqIfQosGroupStatsTable)
# Update MIBs
for updater in cls.lut.updater_instances:
updater.reinit_data()
updater.update_data()
def test_getQueueCounters(self):
for counter_id in range(1, 8):
oid = ObjectIdentifier(8, 0, 0, 0, (1, 3, 6, 1, 4, 1, 9, 9, 580, 1, 5, 5, 1, 4, 1, 2, 1, 1))
get_pdu = GetPDU(
header=PDUHeader(1, PduTypes.GET, 16, 0, 42, 0, 0, 0),
oids=[oid]
)
encoded = get_pdu.encode()
response = get_pdu.make_response(self.lut)
print(response)
value0 = response.values[0]
self.assertEqual(value0.type_, ValueType.COUNTER_64)
self.assertEqual(str(value0.name), str(oid))
self.assertEqual(value0.data, 1)
def test_getNextPduForQueueCounter(self):
oid = ObjectIdentifier(8, 0, 0, 0, (1, 3, 6, 1, 4, 1, 9, 9, 580, 1, 5, 5, 1, 4, 1, 2, 1, 1))
expected_oid = ObjectIdentifier(8, 0, 0, 0, (1, 3, 6, 1, 4, 1, 9, 9, 580, 1, 5, 5, 1, 4, 1, 2, 1, 2))
get_pdu = GetNextPDU(
header=PDUHeader(1, PduTypes.GET_NEXT, 16, 0, 42, 0, 0, 0),
oids=[oid]
)
encoded = get_pdu.encode()
response = get_pdu.make_response(self.lut)
print(response)
value0 = response.values[0]
self.assertEqual(value0.type_, ValueType.COUNTER_64)
self.assertEqual(str(value0.name), str(expected_oid))
self.assertEqual(value0.data, 23492723984237432 % pow(2, 64)) # Test integer truncation
def test_getNextPduForQueueCounter_asic2(self):
oid = ObjectIdentifier(8, 0, 0, 0, (1, 3, 6, 1, 4, 1, 9, 9, 580, 1, 5, 5, 1, 4, 9016, 2, 1, 1))
expected_oid = ObjectIdentifier(8, 0, 0, 0, (1, 3, 6, 1, 4, 1, 9, 9, 580, 1, 5, 5, 1, 4, 9016, 2, 1, 2))
get_pdu = GetNextPDU(
header=PDUHeader(1, PduTypes.GET_NEXT, 16, 0, 42, 0, 0, 0),
oids=[oid]
)
encoded = get_pdu.encode()
response = get_pdu.make_response(self.lut)
print(response)
value0 = response.values[0]
self.assertEqual(value0.type_, ValueType.COUNTER_64)
self.assertEqual(str(value0.name), str(expected_oid))
self.assertEqual(value0.data, 24) # Test integer truncation
def test_getNextPduForQueueCounter_wrapped(self):
oid = ObjectIdentifier(8, 0, 0, 0, (1, 3, 6, 1, 4, 1, 9, 9, 580, 1, 5, 5, 1, 4, 1, 2, 1, 2))
expected_oid = ObjectIdentifier(8, 0, 0, 0, (1, 3, 6, 1, 4, 1, 9, 9, 580, 1, 5, 5, 1, 4, 1, 2, 1, 3))
get_pdu = GetNextPDU(
header=PDUHeader(1, PduTypes.GET_NEXT, 16, 0, 42, 0, 0, 0),
oids=[oid]
)
encoded = get_pdu.encode()
response = get_pdu.make_response(self.lut)
print(response)
value0 = response.values[0]
self.assertEqual(value0.type_, ValueType.COUNTER_64)
self.assertEqual(str(value0.name), str(expected_oid))
self.assertEqual(value0.data, 123459) # Test integer truncation
def test_getIngressQueueCounters(self):
oid = ObjectIdentifier(8, 0, 0, 0, (1, 3, 6, 1, 4, 1, 9, 9, 580, 1, 5, 5, 1, 4, 1, 1, 1, 1))
get_pdu = GetPDU(
header=PDUHeader(1, PduTypes.GET, 16, 0, 42, 0, 0, 0),
oids=[oid]
)
encoded = get_pdu.encode()
response = get_pdu.make_response(self.lut)
print(response)
value0 = response.values[0]
self.assertEqual(value0.type_, ValueType.NO_SUCH_INSTANCE)
self.assertEqual(str(value0.name), str(oid))
self.assertEqual(value0.data, None)
def test_getMulticastQueueCountersWrapped(self):
oid = ObjectIdentifier(8, 0, 0, 0, (1, 3, 6, 1, 4, 1, 9, 9, 580, 1, 5, 5, 1, 4, 1, 2, 1, 3))
get_pdu = GetPDU(
header=PDUHeader(1, PduTypes.GET, 16, 0, 42, 0, 0, 0),
oids=[oid]
)
encoded = get_pdu.encode()
response = get_pdu.make_response(self.lut)
print(response)
value0 = response.values[0]
self.assertEqual(value0.type_, ValueType.COUNTER_64)
self.assertEqual(str(value0.name), str(oid))
self.assertEqual(value0.data, 123459)
def test_getMulticastQueueCountersWrapped_asic1(self):
oid = ObjectIdentifier(8, 0, 0, 0, (1, 3, 6, 1, 4, 1, 9, 9, 580, 1, 5, 5, 1, 4, 9, 2, 1, 3))
get_pdu = GetPDU(
header=PDUHeader(1, PduTypes.GET, 16, 0, 42, 0, 0, 0),
oids=[oid]
)
encoded = get_pdu.encode()
response = get_pdu.make_response(self.lut)
print(response)
value0 = response.values[0]
self.assertEqual(value0.type_, ValueType.COUNTER_64)
self.assertEqual(str(value0.name), str(oid))
self.assertEqual(value0.data, 10)
def test_getMulticastQueueCounters(self):
oid = ObjectIdentifier(8, 0, 0, 0, (1, 3, 6, 1, 4, 1, 9, 9, 580, 1, 5, 5, 1, 4, 5, 2, 1, 9))
get_pdu = GetPDU(
header=PDUHeader(1, PduTypes.GET, 16, 0, 42, 0, 0, 0),
oids=[oid]
)
encoded = get_pdu.encode()
response = get_pdu.make_response(self.lut)
print(response)
value0 = response.values[0]
self.assertEqual(value0.type_, ValueType.NO_SUCH_INSTANCE)
self.assertEqual(str(value0.name), str(oid))
self.assertEqual(value0.data, None)
def test_getSubtreeForQueueCounters(self):
oid = ObjectIdentifier(8, 0, 0, 0, (1, 3, 6, 1, 4, 1, 9, 9, 580, 1, 5, 5))
expected_oid = ObjectIdentifier(8, 0, 0, 0, (1, 3, 6, 1, 4, 1, 9, 9, 580, 1, 5, 5, 1, 4, 1, 2, 1, 1))
get_pdu = GetNextPDU(
header=PDUHeader(1, PduTypes.GET_NEXT, 16, 0, 42, 0, 0, 0),
oids=[oid]
)
encoded = get_pdu.encode()
response = get_pdu.make_response(self.lut)
value0 = response.values[0]
self.assertEqual(value0.type_, ValueType.COUNTER_64)
self.assertEqual(str(value0.name), str(expected_oid))
self.assertEqual(value0.data, 1)
|
import statistics
import timeit
import gc
import problog
from problog import get_evaluatable
from problog.program import PrologString
from problog.engine import DefaultEngine
from problog.logic import Term
def test_query_method1(engine, model_db, query_term):
times_query = []
for i in range(0, 100):
start = timeit.default_timer()
results = engine.query(model_db, query_term)
end = timeit.default_timer()
gc.collect()
times_query.append(end - start)
# print([query1(*args) for args in results])
# print(results)
return times_query
def test_query_method2(engine, model_db, query_term):
times_query = []
extended_model_db = model_db.extend()
extended_model_db += Term('query')(query_term)
evaluatable = problog.get_evaluatable()
for i in range(0, 100):
start = timeit.default_timer()
query_result = evaluatable.create_from(extended_model_db, engine=engine).evaluate()
end = timeit.default_timer()
gc.collect()
times_query.append(end - start)
# print(query_result)
return times_query
def main():
p = PrologString("""
mother_child(trude, sally).
father_child(tom, sally).
father_child(tom, erica).
father_child(mike, tom).
sibling(X, Y) :- parent_child(Z, X), parent_child(Z, Y).
parent_child(X, Y) :- father_child(X, Y).
parent_child(X, Y) :- mother_child(X, Y).
""")
sibling = Term('sibling')
query_term = sibling(None, None)
engine = DefaultEngine()
# prepare the model for querying
model_db = engine.prepare(p) # This compiles the Prolog model into an internal format.
# This step is optional, but it might be worthwhile if you
# want to query the same model multiple times.
times_query = test_query_method1(engine, model_db, query_term)
times_query_extended = test_query_method2(engine, model_db, query_term)
print("average duration query:", statistics.mean(times_query), "seconds")
print("average duration query:", statistics.mean(times_query_extended), "seconds")
# for statement in p:
# print(statement)
#
# knowledge = get_evaluatable().create_from(p)
#
# print(knowledge.evaluate())
if __name__ == '__main__':
main()
|
import struct, os
def parse_keyvalue(line):
if line.find("//") != -1:
line = line[:line.find("//")]
quotes = [idx for idx, c in enumerate(line) if c == '"']
if len(quotes) < 4:
return None
key = line[quotes[0]+1 : quotes[1]]
value = line[quotes[2]+1 : quotes[3]]
return (key, value)
def parse_ents(path, ent_text):
ents = []
lineNum = 0
lastBracket = -1
ent = None
for line in ent_text.splitlines():
lineNum += 1
if len(line) < 1 or line[0] == '\n':
continue
if line[0] == '{':
if lastBracket == 0:
print("\n%s.bsp ent data (line %d): Unexpected '{'\n" % (path, lineNum));
continue
lastBracket = 0
ent = {}
elif line[0] == '}':
if lastBracket == 1:
print("\n%s.bsp ent data (line %d): Unexpected '}'\n" % (path, lineNum));
lastBracket = 1
if ent == None:
continue
ents.append(ent)
ent = None
# a new ent can start on the same line as the previous one ends
if line.find("{") != -1:
ent = {}
lastBracket = 0
elif lastBracket == 0 and ent != None: # currently defining an entity
keyvalue = parse_keyvalue(line)
if keyvalue:
ent[keyvalue[0]] = keyvalue[1]
return ents
def load_entities(bsp_path):
with open(bsp_path, mode='rb') as f:
bytes = f.read()
version = struct.unpack("i", bytes[:4])
offset = struct.unpack("i", bytes[4:4+4])[0]
length = struct.unpack("i", bytes[8:8+4])[0]
ent_text = bytes[offset:offset+length].decode("ascii", "ignore")
return parse_ents(bsp_path, ent_text)
print("\nFailed to open %s" % bsp_path)
return None
def get_all_maps(maps_dir):
all_maps = []
for file in os.listdir(maps_dir):
if not file.lower().endswith('.bsp'):
continue
if '@' in file:
continue # ignore old/alternate versions of maps (w00tguy's scmapdb content pool)
all_maps.append(file)
return sorted(all_maps, key=lambda v: v.upper())
maps_dir = "./content_pool/maps"
LIFTABLE_FLAG = 1024
all_maps = get_all_maps(maps_dir)
broken_maps = []
skip_maps = []
with open("mapcycle.txt") as f:
skip_maps = f.read().splitlines()
last_progress_str = ''
for idx, map_name in enumerate(all_maps):
map_path = os.path.join(maps_dir, map_name)
progress_str = "Progress: %s / %s (%s)" % (idx, len(all_maps), map_name)
padded_progress_str = progress_str
if len(progress_str) < len(last_progress_str):
padded_progress_str += ' '*(len(last_progress_str) - len(progress_str))
last_progress_str = progress_str
print(padded_progress_str, end='\r')
if map_name.lower().replace(".bsp", "") in skip_maps:
continue
for ent in load_entities(map_path):
if ('classname' in ent and ('monster_' in ent['classname'])) and 'spawnflags' in ent and int(ent['spawnflags']) & 1 != 0:
broken_maps.append(map_name)
print("\nMATCHED %s" % map_name)
break
'''
for ent in load_entities(map_path):
if 'classname' in ent and ent["classname"] == 'func_pushable':
if 'spawnflags' in ent and int(ent['spawnflags']) & LIFTABLE_FLAG:
broken_maps.append(map_name)
print("\nOH NO %s" % map_name)
break
'''
print("\n\nResults:")
for map in broken_maps:
print(map) |
#!/usr/bin/python
import sys
input = """
push 0x736c2f2f
push 0x6e69622f
"""
input = input.splitlines()
input = filter(None, input)
instructions = []
decoded = ""
for i in input:
instructions.append(i.replace('\t',''))
print '\nNumber of instructions: ' +str(len(instructions)) + '\n'
for inst in instructions[::-1]:
print inst
if inst.startswith("push 0x"):
inst = inst.strip("push 0x")
byteList = [inst[i:i+2] for i in range(0, len(inst), 2)]
for item in byteList[::-1] :
print item + ' : ' + str(item.decode('hex'))
decoded += str(item.decode('hex'))
print '\nDecoded string: ' + decoded |
import string
dict={}
data=""
for i in range(len(string.ascii_letters)):
dict[string.ascii_letters[i]]=string.ascii_letters[i-1]
print(dict)
with open("file.txt") as f:
while True:
c=f.read(1)
if not c:
print("end of file")
break
if c in dict:
data=dict[c]
else:
data=c
file.write(data)
print(data)
file.close()
|
#!/usr/bin/python
import datetime, openpyxl as xl, os
from argparse import Namespace
import code
import operator, collections, re, argparse
from django.core.management.base import BaseCommand, CommandError
import contacts.models as cont
class Command(BaseCommand):
''' Manage 1mo and 1yr calls
actions:
- nightly: run nightly tasks
- init: schedule calls for clients that need them.
'''
help = "Manage 1mo and 1yr calls"
def add_arguments(self,parser):
subparsers = parser.add_subparsers(help='manage scheduled calls')
init_parser = subparsers.add_parser('init',cmd=parser.cmd,help='initialize phone calls and print report')
init_parser.set_defaults(action='initialize')
test_parser = subparsers.add_parser('test',cmd=parser.cmd,help='run test command')
test_parser.set_defaults(action='test')
def handle(self,*args,**options):
self.options = options
getattr(self,options['action'])()
def initialize(self):
""" Find all postpartum participants and schedule 1mo and 1yr call """
self.stdout.write( "{0} Initializing Phonecalls {0}\n".format('*'*5) )
post = cont.Contact.objects.filter(status='post').order_by('delivery_date')
total_post , total_created = 0 , 0
for c in post:
total_post += 1
month_created , month_call = None , None
if c.delta_days() < 30:
month_created , month_call = c.schedule_month_call(created=True)
year_created , year_call = c.schedule_year_call(created=True)
if month_created or year_created:
total_created += 1
self.stdout.write( "{!r:35} {} ({}) M[{} {}] Y[{} {}]".format(
c,c.delivery_date,c.delta_days(),
month_created, month_call,
year_created, year_call
) )
self.stdout.write( "Total Post: {} Created: {} Not-Created: {}\n".format(total_post,total_created,total_post-total_created) )
# Schedule calls for postdate participants
today = datetime.date.today()
over = cont.Contact.objects.filter(status='pregnant',due_date__lte=today).order_by('due_date')
total_over , total_created = 0 , 0
for c in over:
total_over += 1
month_created , month_call = c.schedule_edd_call(created=True)
if month_created:
total_created += 1
self.stdout.write ( "{!r:35} {} ({})".format(c,c.due_date,c.delta_days()) )
self.stdout.write( "Total Over: {} Created: {} Not-Created: {}\n".format(total_over,total_created,total_over-total_created) )
def test(self):
self.stdout.write( "{0} Running Test Command {0}".format('*'*5) )
|
# Generated by Django 3.0.7 on 2020-07-08 15:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0007_produto_tema'),
]
operations = [
migrations.AddField(
model_name='produto',
name='autor',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AddField(
model_name='produto',
name='dimensoes',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AddField(
model_name='produto',
name='tecnica',
field=models.CharField(blank=True, max_length=200, null=True),
),
]
|
class Room:
"""This is a class that represents the rooms"""
def __init__(self, description, north, east, south, west):
self.description = description
self.north = north
self.east = east
self.south = south
self.west = west
def main():
room_list = []
room = Room("You are in the hidden chamber.\nThere is a door to the south.", None, None, 1, None)
room_list.append(room)
room = Room("You are now in the west corridor.\nThere is a door to the north and the east", 0, 2, None, None)
room_list.append(room)
room = Room("You have entered the north hall.\nThere are doors to the east, south, and west.", None, 5, 3, 1)
room_list.append(room)
room = Room("You are now in the art gallery.\nThere are doors in every direction.", 2, 5, 4, 1)
room_list.append(room)
room = Room("You have walked onto the terrace.\nThere are doors to the north and the east.", 3, 6, None, None)
room_list.append(room)
room = Room("You are in the east quarters.\nThere are doors to the south and the west.", None, None, 6, 3)
room_list.append(room)
room = Room("You walked into the greenhouse.\nThere is one door to the north and one to the west.", 5, None, None, 4)
room_list.append(room)
current_room = 0
done = False
while not done:
print()
print(room_list[current_room].description)
userinput = input("What do you want to do? ")
if userinput.lower() == "north" or userinput.lower() == "n":
next_room = room_list[current_room].north
if next_room == None:
print("You can't go that way.")
else:
current_room = next_room
elif userinput.lower() == "east" or userinput.lower() == "e":
next_room = room_list[current_room].east
if next_room == None:
print ("You can't go that way.")
else:
current_room = next_room
elif userinput.lower() == "south" or userinput.lower() == "s":
next_room = room_list[current_room].south
if next_room == None:
print("You can't go that way.")
else:
current_room = next_room
elif userinput.lower() == "west" or userinput.lower() == "w":
next_room = room_list[current_room].west
if next_room == None:
print("You can't go that way.")
else:
current_room = next_room
elif userinput.lower() == "quit" or userinput.lower() == "q":
done = True
print("You quit.")
main() |
import datetime
from typing import Optional
from fastapi_users import models
class User(models.BaseUser):
first_name = str
birthday = Optional[datetime.date]
class Config:
arbitrary_types_allowed = True
orm_mode = True
class UserCreate(models.BaseUserCreate):
first_name = str
birthday = Optional[datetime.date]
class UserUpdate(models.BaseUserUpdate):
first_name = Optional[str]
birthday = Optional[datetime.date]
class UserDB(User, models.BaseUserDB):
pass
class SUser(models.BaseUser, models.BaseOAuthAccountMixin):
pass
|
from rest_framework import viewsets
from .models import Platillo
from .serializer import PlatillosSerializer
class PlatillosViewSet(viewsets.ModelViewSet):
queryset = Platillo.objects.all()
serializer_class = PlatillosSerializer |
from gps import *
import threading
import datetime
runGPS = None
class GPSlocation_start(threading.Thread):
def __init__(self):
global runGPS
threading.Thread.__init__(self)
runGPS = gps(mode=WATCH_ENABLE)
self.current_value = None
self.running = True
# def run(self):
# global runGPS
# while GPSlocation.running:
# runGPS.next()
GPSlocation = GPSlocation_start()
GPSlocation.start()
def mainLoop():
# currentTime = datetime.datetime.now()
# timeLog = currentTime.strftime("%d/%m/%Y %H:%M:%S")
# logLoc = open("/home/pi/Desktop/Final/Data/gps_log.txt", "a+")
# logLoc.write(f"{timeLog}\tLatitude: {runGPS.fix.latitude} N\t Longitude: {runGPS.fix.longitude} E\n")
location = f"Lat: {runGPS.fix.latitude}\tLong: {runGPS.fix.longitude}"
alt = f"Altitude: {runGPS.fix.altitude}\tSpeed {runGPS.fix.speed} (m/s) / {runGPS.fix.speed * 2.237} (mph)"
return location, alt
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function, division
import numpy as np
import collections
import matplotlib.pyplot as py
import matplotlib.gridspec as gridspec
import sys
from scipy.stats.stats import pearsonr
from scipy import stats
from operator import itemgetter
class BookImporter(object):
def __init__(self, book_file, id_min=1, id_max=1, t_min=0, t_max=1, atom_min=0, atom_max=5):
'''
input arguments:
book_file
id_min - index of minimal trial to plot
id_max - index of maximal trial to plot
t_min - down edge of time period to reconstruct
t_max - upper edge of time period to reconstruct
atom_min - minimal atom to plot
atom_max - maximal atom to plot
comment:
if we want to reconstruct only one atom then atom_min should equal atom_max
'''
super(BookImporter, self).__init__()
f = open(book_file,'rb')
data, signals, atoms, epoch_s = self._read_book(f)
self.epoch_s = epoch_s
self.atoms = atoms
self.signals = signals
self.fs = data[5]['Fs']
self.ptspmV = 0.0715
self.id_min = id_min
self.id_max = id_max
self.t_min = t_min
self.t_max = t_max
self.atom_min = atom_min
self.atom_max = atom_max
# amps = self._get_atoms_peak2peak_amplitudes()
# self.draw_reconstructed_signal()
self.draw_mean_reconstructed_signal(atoms, signals)
# amps = self._get_atoms_amplitudes()
# self.amps = amps
# print(np.median(amps))
# print(amps[:10])
# self.perform_linear_regression(amps)
# t, f, E_a, sigt, signal_a, signal_reconstruction_a, alpha_mean = self.calculate_mean_map(df = 0.2, dt = 0.008, f_a = [2, 20])
# self.alpha_mean = alpha_mean
# print(pearsonr(amps,alpha_mean)) ### (Pearson’s correlation coefficient, 2-tailed p-value)
# self.perform_linear_regression(alpha_mean)
# print(np.median(alpha_mean))
# t, f, E_a,sigt, signal_a, signal_reconstruction_a = self.calculate_map(self.atoms[2],self.signals[1][1][1], df = 0.2, dt = 0.008, f_a = [2, 20])
# self.only_draw_map(t, f, E_a, sigt, signal_a, signal_reconstruction_a)
py.show()
def _get_type(self, ident, f):
'''generacja dtype dla odczytania fragmentów, odpalana od razu,
po odczytaniu identyfikatora'''
# print(ident)
if ident == 1:
com_s = np.fromfile(f, '>u4', count=1)[0]
if not com_s==0: ## comment
return np.dtype([('comment', 'S'+str(com_s))])
else:
return None
elif ident == 2: ## header
head_s = np.fromfile(f, '>u4', count=1)[0]
return None
elif ident == 3: ## www adress
www_s = np.fromfile(f, '>u1', count=1)[0]
return np.dtype([('www', 'S'+str(www_s))])
elif ident == 4: ## data stworzenia pliku
date_s = np.fromfile(f, '>u1', count=1)[0]
return np.dtype([('date', 'S'+str(date_s))])
elif ident == 5: ## info o sygnale
sig_info_s = np.fromfile(f, '>u1', count=1)[0]
return np.dtype([('Fs', '>f4'), ('ptspmV', '>f4'),
('chnl_cnt', '>u2')])
elif ident == 6: ## info o dekompozycji
dec_info_s = np.fromfile(f, '>u1', count=1)[0]
return np.dtype([('percent', '>f4'), ('maxiterations', '>u4'),
('dict_size', '>u4'), ('dict_type', '>S1')])
elif ident == 10: #dirac
return
atom_s = np.fromfile(f, '>u1', count=1)[0]
return np.dtype([('modulus', '>f4'), ('amplitude', '>f4'),
('t', '>f4')])
elif ident == 11: #gauss
atom_s = np.fromfile(f, '>u1', count=1)[0]
return np.dtype([('modulus', '>f4'), ('amplitude', '>f4'),
('t', '>f4'), ('scale', '>f4')])
elif ident == 12: #sinus
return
atom_s = np.fromfile(f, '>u1', count=1)[0]
return np.dtype([('modulus', '>f4'), ('amplitude', '>f4'),
('f', '>f4'), ('phase', '>f4')])
elif ident == 13: #gabor
atom_s = np.fromfile(f, '>u1', count=1)[0]
return np.dtype([('modulus', '>f4'), ('amplitude', '>f4'),
('t', '>f4'), ('scale', '>f4'),
('f', '>f4'), ('phase', '>f4')])
else:
return None
def _get_signal(self, f, epoch_nr, epoch_s):
'''uruchamiana przy odnalezieniu bloku sygnału'''
sig_s = np.fromfile(f, '>u4', count=1)[0]
chnl_nr = np.fromfile(f, '>u2', count=1)[0]
signal = np.fromfile(f, '>f4', count= epoch_s)
return chnl_nr, signal
def _get_atoms(self, f):
'''uruchamiana przy odnalezieniu bloku atomów'''
atoms = collections.defaultdict(list)
atoms_s = np.fromfile(f, '>u4', count=1)[0]
a_chnl_nr = np.fromfile(f, '>u2', count=1)[0]
ident = np.fromfile(f, '>u1', count=1)
while ident in [10, 11, 12, 13]:
atom = np.fromfile(f, self._get_type(ident[0], f), count=1)[0]
atoms[ident[0]].append(atom)
ident = np.fromfile(f, '>u1', count=1)
f.seek(f.tell()-1)
return atoms, a_chnl_nr
def _gabor(self, amplitude, position, scale, afrequency, phase):
time = np.linspace(0, self.epoch_s/self.fs, self.epoch_s)
width = scale
frequency = afrequency*2*np.pi
signal = amplitude*np.exp(-np.pi*((time-position)/width)**2)*np.cos(frequency*(time-position) + phase)
return signal
def _read_book(self, f):
try:
f = open(f, 'rb')
except Exception:
f = f
version = np.fromfile(f, 'S6', count=1)
print('version: ',version[0])
data = {} # zachowanie danych z nagłówku
ident = np.fromfile(f, 'u1', count=1)[0]
ct = self._get_type(ident, f)
signals = collections.defaultdict(list)
atoms = collections.defaultdict(list)
while ident:
if ct:
point = np.fromfile(f,ct, count=1)[0]
data[ident] = point
elif ident ==7:
data_s = np.fromfile(f, '>u4', count=1)[0]
epoch_nr = np.fromfile(f, '>u2', count=1)[0]
epoch_s = np.fromfile(f, '>u4', count=1)[0]
#print('epoch_s', epoch_s)
elif ident == 8:
chnl_nr, signal = self._get_signal(f, epoch_nr, epoch_s)
signals[epoch_nr].append([chnl_nr, signal])
elif ident == 9:
pl = f.tell()
atom, a_chnl_nr = self._get_atoms(f)
atoms[a_chnl_nr] = atom
ident = np.fromfile(f, '>u1', count=1)
if ident:
ident = ident[0]
ct = self._get_type(ident, f)
return data, signals, atoms, epoch_s
def _get_atoms_amplitudes(self):
amps = []
for trial in self.atoms.keys():
amps_temp = []
for i,atom in enumerate(self.atoms[trial][13]):
position = atom['t']/self.fs
width = atom['scale']/self.fs/2
frequency = atom['f']*self.fs/2
amplitude = atom['amplitude']
phase = atom['phase']
if self.t_min<position<self.t_max:
if self.atom_min <= i <= self.atom_max:
amps_temp.append(amplitude)
# print(amps_temp)
amps.append(max(amps_temp))
return amps
def _find_extrema(self, signal):
deriv = np.gradient(signal)
deriv[np.where(deriv < 0)] = -1
deriv[np.where(deriv > 0)] = 1
d = list(deriv)
amps = []
temp = 0
data = []
for j,i in enumerate(d):
try:
if i == d[j-1] and i == 1:
if temp == 0:
start = j-1
temp += 1
else:
if temp != 0:
data.append((start,temp))
temp = 0
except:
pass
for frag in data:
slope_start = frag[0]
slope_end = frag[0] + frag[1]
amps.append(abs(signal[slope_end] - signal[slope_start]))
return max(amps)
def _get_atoms_peak2peak_amplitudes(self):
amps = []
for trial in self.atoms.keys():
signal_reconstruction = self.reconstruct_signal_freq(self.atoms[trial])
amps.append(self._find_extrema(signal_reconstruction))
# py.figure(str(trial))
# py.plot(signal_reconstruction)
# py.show()
return amps
def draw_mean_reconstructed_signal(self, atoms, signals):
N = len(atoms.keys())
tpr = len(signals[1][0][1])
t = np.arange(0, tpr/self.fs, 1/self.fs)
for i,trial in enumerate(atoms.keys()):
signal_reconstruction = self.reconstruct_signal_freq(atoms[trial])
signal = signals[1][i][1]
try:
signal_a += signal
signal_reconstruction_a += signal_reconstruction
except Exception as a:
print(a, 'objection')
signal_a = signal
signal_reconstruction_a = signal_reconstruction
signal_a /= N
signal_reconstruction_a /= N
py.figure('Mean')
t = np.linspace(-0.5, 1, tpr)
py.plot(t,signal_reconstruction_a,color='g',label='rekonstrukcja')
py.plot(t,signal_a,color='m',label=u'sygnał')
# py.ylim(-15,20)
py.xlim(-0.5,1)
py.ylabel(u'Amplituda [$\\mu$V]')
py.xlabel(u'Czas [s]')
py.axvline(x=0,color='r')
# py.axvline(x=0.3,color='r')
py.legend()
def draw_reconstructed_signal(self):
tpr = len(self.signals[1][0][1])
# t = np.arange(-0.5, tpr/self.fs, 1/self.fs)
t = np.linspace(-0.5, 1, tpr)
for i,trial in enumerate(self.atoms.keys()):
if i in xrange(self.id_min, self.id_max+1):
signal_reconstruction = self.reconstruct_signal_freq(self.atoms[trial])
signal = self.signals[1][i][1] #- np.mean(self.signals[1][i][1][:0.5*self.fs])
# amp = np.mean(self.signals[1][i][1][0.25*self.fs:0.35*self.fs])
# amp_sig.append(amp)
py.figure('Trial '+str(i))
py.plot(t,signal_reconstruction,color='g',label='rekonstrukcja')
py.plot(t,signal,color='m',label='sygnal')
# py.axvline(x=0.3,color='r')
py.axvline(x=0,color='r')
py.ylabel(u'Amplituda [$\\mu$V]')
py.xlabel(u'Czas [s]')
py.ylim(-30,40)
py.xlim(-0.5,1)
py.legend()
def reconstruct_signal_freq(self, atoms):
reconstruction = np.zeros(self.epoch_s)
for i,atom in enumerate(atoms[13]):
position = atom['t']/self.fs
width = atom['scale']/self.fs
frequency = atom['f']*self.fs/2
amplitude = atom['amplitude']
phase = atom['phase']
# if self.t_min < position < self.t_max:
if self.atom_min <= i < self.atom_max+1:
reconstruction = reconstruction + self._gabor(amplitude, position, width, frequency, phase)
return reconstruction
def perform_linear_regression(self, amps):
y = np.linspace(1,len(amps),len(amps))
A = np.array([y, np.ones(len(amps))])
w = np.linalg.lstsq(A.T,amps)[0]
line = w[0]*y+w[1]
py.figure()
py.plot(y,line,'r-',y,amps,'o')
py.ylim(-5,22)
py.ylabel(u'Amplituda [$\\mu$V]')
py.xlabel('Realizacje')
################ rysowanie mapy
# 8 Hz < f < 12 Hz
# a > 5 µV
# s > 1.5 s
def calculate_mean_map(self,df = 0.05, dt = 1/256., f_a = [0, 64.]):
N = len(self.atoms.keys())
tpr = len(self.signals[1][0][1])
# sigt = np.arange(0, tpr/self.fs, 1/self.fs)
sigt = np.linspace(-0.5, 1, tpr)
alpha_mean = []
for nr, chnl in enumerate(self.atoms.keys()):
t, f, E, sigt_s,s_s,sr_s = self.calculate_map(self.atoms[chnl],self.signals[1][nr][1], df, dt, f_a = f_a)
alpha_recon = self._calculate_alpha_power(self.atoms[chnl],self.signals[1][nr][1], df, dt, f_a = f_a)
alpha_mean.append(sum(alpha_recon))
signal_reconstruction = self._reconstruct_signal(self.atoms[chnl])
signal = self.signals[1][nr][1]
try:
signal_a += signal
E_a += E
signal_recontruction_a += signal_reconstruction
except Exception as a:
# print(a)
signal_a = signal
E_a = E
signal_recontruction_a = signal_reconstruction
signal_a /= N
signal_recontruction_a /= N
E_a /= N
return t, f, E_a, sigt, signal_a, signal_recontruction_a,alpha_mean
def calculate_map(self,atoms,signal, df, dt, contour=0, f_a = [0, 64.]):
''' atoms - dictionary with trials/channels; for each dictionary with
atoms 4 different atom's types (10,11,12,13-Gabors)'''
tpr = len(signal)
f = np.arange(f_a[0], f_a[1], df)
t = np.arange(0, tpr/self.fs, dt)
lent = len(t)
lenf = len(f)
E = np.zeros((lent, lenf)).T
t, f = np.meshgrid(t, f)
sigt = np.arange(0, tpr/self.fs, 1/self.fs)
for atom in atoms[13]:
exp1 = np.exp(-2*(atom['scale']/self.fs)**(-2)*(t-atom['t']/self.fs)**2)
exp2 = np.exp(-2*np.pi**2 *(atom['scale']/self.fs)**2*(atom['f']*self.fs/2-f)**2)
wigners = ((atom['amplitude']/self.ptspmV)**2 * (2*np.pi)**0.5 *
atom['scale']/self.fs*exp1*exp2)
E += atom['modulus']**2 * wigners
for atom in atoms[12]:
amp = atom['modulus']**2*(atom['amplitude']/self.ptspmV)**2
E[:,int(len(f)*atom['f']/(2*np.pi))] += amp
for atom in atoms[11]:
exp1 = np.exp(-2*(atom['scale']/self.fs)**(-2)*(t-atom['t']/self.fs)**2)
exp2 = np.exp(-2*np.pi**2 *(atom['scale']/self.fs)**2*(-f)**2)
wigners = ((atom['amplitude']/self.ptspmV)**2 * (2*np.pi)**0.5 *
atom['scale']/self.fs*exp1*exp2)
E += atom['modulus']**2 * wigners
for atom in atoms[10]:
amp = atom['modulus']**2*(atom['amplitude']/self.ptspmV)**2
E[int(lent*atom['t']/tpr)] += amp
signal_reconstruction = self._reconstruct_signal(atoms)
return t, f, np.log(E), sigt, signal, signal_reconstruction
# def _calculate_alpha_power(self,atoms,signal, df, dt, contour=0, f_a = [0, 64.]):
# tpr = len(signal)
# f = np.arange(f_a[0], f_a[1], df)
# t = np.arange(0, tpr/self.fs, dt)
# lent = len(t)
# lenf = len(f)
# E = np.zeros((lent, lenf)).T
# t, f = np.meshgrid(t, f)
# sigt = np.arange(0, tpr/self.fs, 1/self.fs)
# for atom in atoms[13]:
# freq = atom['f']*self.fs/2
# amp = 2*atom['amplitude']/self.ptspmV
# scale = atom['scale']/self.fs
# position = atom['t']/self.fs
# # print(freq,position,scale)
# if 8 < freq < 12: # and position < 0.5:
# print(freq,position,scale)
# exp1 = np.exp(-2*(atom['scale']/self.fs)**(-2)*(t-atom['t']/self.fs)**2)
# exp2 = np.exp(-2*np.pi**2 *(atom['scale']/self.fs)**2*(atom['f']*self.fs/2-f)**2)
# wigners = ((atom['amplitude']/self.ptspmV)**2 * (2*np.pi)**0.5 *
# atom['scale']/self.fs*exp1*exp2)
# E += atom['modulus']**2 * wigners
# return np.mean(np.mean(E))
def _calculate_alpha_power(self,atoms,signal, df, dt, contour=0, f_a = [0, 64.]):
reconstruction = np.zeros(self.epoch_s)
for atom in atoms[13]:
amplitude = atom['amplitude']
position = atom['t']/self.fs
width = atom['scale']/self.fs
frequency = atom['f']*self.fs/2
phase = atom['phase']
if 8 < frequency < 12 and position < 0.5:
# if 13 < frequency < 30 and position < 0.5: #and amplitude > 5:
# print(frequency,position,width)
reconstruction = reconstruction + self._gabor(amplitude,position,width,frequency,phase)
return np.abs(reconstruction)**2
def only_draw_map(self, t, f, E_a, sigt, signal_a, signal_recontruction_a, contour=False):
# tp = np.linspace(-0.5, 1, 10)
fig = py.figure()
gs = gridspec.GridSpec(2,1, height_ratios=[3,1])
ax1 = fig.add_subplot(gs[0])
ax1.set_ylabel(u'Czestosc [Hz]')
ax1.set_title(sys.argv[-1])
if contour:
ax1.contour(t, f, E_a)
else:
ax1.pcolor(t, f, E_a)
# ax1.set_xticklabels(tp)
ax2 = fig.add_subplot(gs[1])
ax2.plot(sigt, signal_a, 'red')
ax2.plot(sigt, signal_recontruction_a, 'blue')
ax2.axvline(x=0,color='r')
ax2.set_ylabel(u'Amplituda [$\\mu$V]')
ax2.set_xlabel(u'Czas [s]')
ax2.set_xlim(-0.5,1)
def _reconstruct_signal(self, atoms):
reconstruction = np.zeros(self.epoch_s)
for atom in atoms[13]:
position = atom['t']/self.fs
width = atom['scale']/self.fs
frequency = atom['f']*self.fs/2
amplitude = atom['amplitude']
phase = atom['phase']
# print(amplitude)
reconstruction = reconstruction + self._gabor(amplitude,position,width,frequency,phase)
return reconstruction
if __name__ == '__main__':
# fstr = './wybrane/newMP/Cz/MMP1_compatible_group_nogo_longer_mmp.b'
# fstr = './wybrane/MMP1_compatible_ind_go_mmp.b'
# fstr = './wybrane/MMP2_compatible_ind_go_mmp.b'
# fstr = './wybrane/MMP3_compatible_ind_go_mmp.b'
# fstr = './wybrane/MMP1_compatible_ind_go_longer_mmp.b'
###### dekompozycja średnich wszystkich badanych
# fstr = './wybrane/newMP/Cz/MEANS/FILT/MMP3_MEANS_GROUP_FILT_mmp.b'
# b = BookImporter(fstr, id_min=0, id_max=10, t_min=0, t_max=1.5, atom_min=0, atom_max=0)
# ampsg = b.amps
# fstr = './wybrane/newMP/Cz/MEANS/FILT/MMP3_MEANS_INDIVIDUAL_FILT_mmp.b'
# b = BookImporter(fstr, id_min=0, id_max=10, t_min=0, t_max=1.5, atom_min=0, atom_max=0)
# ampsi = b.amps
# print(stats.ranksums(ampsg,ampsi)[1])
###### pojedyncze amplitudy + alpha
# fstr = './wybrane/newMP/Cz/MMP1_compatible_ind_nogo_longer_mmp.b'
# b = BookImporter(fstr, id_min=0, id_max=10, t_min=0, t_max=1.5, atom_min=2, atom_max=2)
# ind_comp = b.amps
# ind_c_alpha = b.alpha_mean
# fstr = './wybrane/newMP/Cz/MMP1_incompatible_ind_nogo_longer_mmp.b'
# b = BookImporter(fstr, id_min=0, id_max=10, t_min=0, t_max=1.5, atom_min=2, atom_max=2)
# ind_incomp = b.amps
# ind_inc_alpha = b.alpha_mean
# ind = ind_comp + ind_incomp
# ind_alp = ind_c_alpha + ind_inc_alpha
# fstr = './wybrane/newMP/Cz/MMP1_incompatible_group_nogo_longer_mmp.b'
# b = BookImporter(fstr, id_min=0, id_max=10, t_min=0, t_max=1.5, atom_min=1, atom_max=1)
# gr_comp = b.amps
# gr_c_alpha = b.alpha_mean
# fstr = './wybrane/newMP/Cz/MMP1_compatible_group_nogo_longer_mmp.b'
# b = BookImporter(fstr, id_min=0, id_max=10, t_min=0, t_max=1.5, atom_min=1, atom_max=1)
# gr_incomp = b.amps
# gr_inc_alpha = b.alpha_mean
# gr = gr_incomp + gr_comp
# gr_alp = gr_inc_alpha + gr_c_alpha
# # # print(pearsonr(gr_comp,gr_c_alpha))
# print(pearsonr(ind_comp,ind_c_alpha))
# print(pearsonr(ind_incomp,ind_inc_alpha))
# print(pearsonr(gr_comp,gr_c_alpha))
# print(pearsonr(gr_incomp,gr_inc_alpha))
# # print(stats.shapiro(gr)[1],stats.shapiro(ind)[1])
# print(stats.kruskal(gr,ind)[1])
#######
####### do pojedynczych - warunek go
fstr = './wybrane/newMP/Cz/MMP1_compatible_ind_go_longer_mmp.b'
b = BookImporter(fstr, id_min=0, id_max=10, t_min=0, t_max=1.5, atom_min=0, atom_max=1)
|
import getpass
# coding: utf8
felhasznalo="bori99"
jelszo="qwerty"
# Kérjünk be felhasználónevet, majd jelszót, és ha megfelelo, akkor engedélyezze
# a belépést
# A jelszó lehetőség szerint ne jelenjen meg!
engedely=False
while engedely==False:
user=input("Add meg a felhasználó nevét:".encode('cp1252'))
#passw=input("A jelszó:")
passw=getpass.getpass(prompt="A jelszó:")
if user==felhasznalo and passw==jelszo:
print("Belépés OK")
engedely=True
else:
print("Belépés megtagadva!")
|
from picamera import PiCamera
from picamera.array import PiRGBArray
from time import sleep
camera = PiCamera()
camera.resolution = (640,480)
camera.framerate = 16
rawCapture = PiRGBArray(camera, size=(640,480))
camera.start_preview()
sleep(10)
camera.stop_preview()
|
import numpy as np
from typing import Tuple
# Use for testing ransac
from gen_point_cloud import rodrigues
def main():
"""
Main entry point for rigid estimation code
Func. of interest
=================
*estimate_rigid_motion: Estimate the pose using RANSAC
*_estimate_rigid_motion: Estimate the pose without RANSAC
*test_ransac: test estimation using RANSAC
"""
# test_ransac()
P,Q = get_data()
R,T = estimate_rigid_motion(P,Q,thresh=1,valid=0.2)
numPts,_ = P.shape
Q_hat = np.dot(R,P.transpose())+np.dot(T[:,np.newaxis],
np.ones((1,numPts)))
Q_hat = Q_hat.transpose()
res = np.trace(np.dot((Q-Q_hat).transpose(),(Q-Q_hat)))
# Print pose and frobenius norm of Q-Q_hat
print('R={0}'.format(R))
print('T={0}'.format(T))
print('||Q-(R*P+T)||_F^2 = {0}'.format(res))
def estimate_rigid_motion(pt1: np.ndarray, pt2: np.ndarray,
numIter: int = 100, minPts: int = 3 , thresh: float = 0.5,
valid: float = 0.1) -> Tuple[np.ndarray,np.ndarray]:
"""
Estimate the rigid motion (R,T) such that X_2 = RX_1 + T using RANSAC
:input pt1: point cloud, X_1, has dimensions Nx3
:input pt2: point cloud, X_2, has dimensions Nx3
:input numIter: (optional) no. of RANSAC iterations
:input minPts: (optional) no. of points used for initial pose hypothesis
:input thresh: (optional) threshold used to determine inlier
:input valid: (optional) min. fraction of points that are inliers
:return: the pose
"""
# Make everything is formatted correctly and
# have same dimensinos
h1,w1 = pt1.shape
h2,w2 = pt2.shape
assert w1 == 3 and w2 == 3 and h1 == h2 and \
w1 == w2
bestErr = np.inf
bestR = np.eye(3)
bestT = np.zeros(3)
for ii in range(numIter):
indx = np.random.choice(h1,minPts,replace=False)
tmpR,tmpT = _estimate_rigid_motion(pt1[indx,:],pt2[indx,:])
res = pt2.transpose() - ( np.dot(tmpR,pt1.transpose()) +
np.dot(tmpT[:,np.newaxis],np.ones((1,h1))) )
res = np.sum(res**2,axis=0)
inliers = res < thresh
numInliers = np.sum(inliers)
if numInliers > valid*h1:
tmpR,tmpT = _estimate_rigid_motion(pt1[inliers,:],pt2[inliers,:])
res = pt2[inliers,:].transpose() - \
( np.dot(tmpR,pt1[inliers,:].transpose()) +
np.dot(tmpT[:,np.newaxis],np.ones((1,numInliers))) )
res = np.sum(res**2)/numInliers
# Retain points with smallest MSE
if res < bestErr:
bestErr = res
bestR = tmpR
bestT = tmpT
return bestR,bestT
def _estimate_rigid_motion(pt1: np.ndarray, pt2: np.ndarray) \
-> Tuple[np.ndarray,np.ndarray]:
"""
Estimate the rigid motion (R,T) such that X_2 = RX_1 + T
:input pt1: point cloud, X_1, has dimensions Nx3
:input pt2: point cloud, X_2, has dimensions Nx3
:return: the pose
"""
h1,w1 = pt1.shape
h2,w2 = pt2.shape
# Make everything is formatted correctly and
# have same dimensinos
assert w1 == 3 and w2 == 3 and h1 == h2 and \
w1 == w2
# Center the points
pt1_centroid = np.mean(pt1,axis=0)
pt2_centroid = np.mean(pt2,axis=0)
pt1_cent = pt1 - np.dot( np.ones((h1,1)),
pt1_centroid[np.newaxis,:] )
pt2_cent = pt2 - np.dot( np.ones((h1,1)),
pt2_centroid[np.newaxis,:] )
# Perform SVD to get R, using quaternions may avoid handedness
# issue
U,S,V = np.linalg.svd(np.dot(pt1_cent.transpose(),pt2_cent))
V = V.transpose()
U = U[:3,:]
V = V[:3,:]
# Ensure handedness
tmp = np.dot(V,U.transpose())
d = np.linalg.det(tmp)
U[2,:] *= d
R = np.dot(V,U.transpose())
# Estimate T
T = np.mean(pt2.transpose()-np.dot(R,pt1.transpose()),axis=1)
return R,T
def test_ransac():
np.random.seed(0)
# Generate point cloud
pt1 = np.random.random((3,100))
R = rodrigues(np.random.random(3))
T = np.random.random(3)
pt2 = np.dot(R,pt1)+np.dot(T[:,np.newaxis],np.ones((1,100)))
# Add Gaussian noise with high var to a subset of the points to
# test RANSAC
outliers = np.random.choice(100,60,replace=False)
pt2[:,outliers]= pt2[:,outliers] + \
np.random.normal(size=(3,60),scale=1)
print('Ground Truth')
print('R={0}'.format(R))
print('T={0}'.format(T))
Rbad,Tbad = _estimate_rigid_motion(pt1.transpose(),pt2.transpose())
print('Estimated w/out RANSAC')
print('R={0}'.format(Rbad))
print('T={0}'.format(Tbad))
Rest,Test = estimate_rigid_motion(pt1.transpose(),pt2.transpose(),
numIter=100, minPts=3, thresh=0.5, valid=0.2)
print('Estimated w/RANSAC')
print('R={0}'.format(Rest))
print('T={0}'.format(Test))
def get_data():
"""
data from starter code
"""
P = np.array([[ 0.85715536, 0.19169091, 0.37547468],
[ 0.36011534, 0.51080372, 0.15273612],
[ 0.76553819, 0.96956225, 0.07534131],
[ 0.99562617, 0.51500125, 0.66700672],
[ 0.75873823, 0.57802293, 0.68742226],
[ 0.40803516, 0.86300463, 0.84899731],
[ 0.3681771 , 0.11123281, 0.56888489],
[ 0.42113594, 0.01455699, 0.02298089],
[ 0.41690264, 0.49362725, 0.12992772],
[ 0.7648861 , 0.99328892, 0.131986 ],
[ 0.24313837, 0.49029503, 0.45695072],
[ 0.52637675, 0.97293938, 0.55972334],
[ 0.84064045, 0.1832065 , 0.82720688],
[ 0.82169952, 0.66711466, 0.32593019],
[ 0.24323087, 0.1992479 , 0.1700658 ],
[ 0.6131605 , 0.51652155, 0.30193314],
[ 0.1025926 , 0.87449354, 0.2651951 ],
[ 0.8714256 , 0.46842985, 0.84525766],
[ 0.83809381, 0.28522291, 0.06991397],
[ 0.76453123, 0.97233898, 0.93372074]])
Q = np.array([[ 0.40901504, 0.83198029, 0.87697249],
[ 0.24356397, 0.24470076, 0.71503754],
[-0.18577546, 0.30922866, 1.15358153],
[ 0.43260626, 0.75349715, 1.3263927 ],
[ 0.49888288, 0.52382659, 1.26839902],
[ 0.60616086, 0.07094228, 1.38545005],
[ 0.7710987 , 0.47837833, 0.71313771],
[ 0.3663448 , 0.58408716, 0.34549392],
[ 0.21444126, 0.30117119, 0.71677242],
[-0.15225962, 0.29424925, 1.20281878],
[ 0.53620743, 0.15779029, 0.83104238],
[ 0.28042234, 0.10685548, 1.33672152],
[ 0.77861652, 0.81812 , 1.13699047],
[ 0.14504543, 0.52761744, 1.13663154],
[ 0.45264174, 0.33027575, 0.4701034 ],
[ 0.27256995, 0.44559883, 0.9274591 ],
[ 0.24052756, -0.17745923, 0.89671581],
[ 0.6404062 , 0.67733176, 1.34604188],
[ 0.12568749, 0.76576281, 0.74348215],
[ 0.4966933 , 0.29664203, 1.67406394]])
return P,Q
if __name__ == '__main__':
main()
|
"""
This model translates default strings into localized strings.
"""
from sqlalchemy import select, update, delete
from muddery.server.mappings.element_set import ELEMENT
from muddery.server.database.worlddata_db import WorldDataDB
from muddery.worldeditor.settings import SETTINGS
from muddery.worldeditor.dao import general_querys
class CommonMapper(object):
"""
Common data mapper.
"""
def __init__(self, model_name):
self.model_name = model_name
self.session = WorldDataDB.inst().get_session()
self.model = WorldDataDB.inst().get_model(model_name)
def all(self):
stmt = select(self.model)
result = self.session.execute(stmt)
return result.scalars().all()
def get(self, condition, for_update=False):
"""
Get a record with conditions in kwargs.
"""
stmt = select(self.model)
if for_update:
stmt = stmt.with_for_update()
for field, value in condition.items():
stmt = stmt.where(getattr(self.model, field) == value)
result = self.session.execute(stmt)
return result.scalars().one()
def filter(self, condition, order=(), for_update=False):
"""
Get a list of records with conditions in kwargs.
"""
stmt = select(self.model)
if for_update:
stmt = stmt.with_for_update()
for field, value in condition.items():
stmt = stmt.where(getattr(self.model, field) == value)
if order:
stmt = stmt.order_by(*order)
result = self.session.execute(stmt)
return result.scalars().all()
def count(self, condition):
"""
Count the number of records with conditions in kwargs.
"""
return general_querys.count(self.model_name, condition)
def add(self, values):
"""
Update or insert a record.
"""
record = self.model(**values)
self.session.add(record)
self.session.flush()
def update_or_add(self, condition, values):
"""
Update or insert a record.
"""
stmt = update(self.model).values(**values)
for field, value in condition.items():
stmt = stmt.where(getattr(self.model, field) == value)
result = self.session.execute(stmt)
if result.rowcount == 0:
# Can not found the record to update, insert a new record.
data = dict(condition, **values)
record = self.model(**data)
self.session.add(record)
self.session.flush()
def delete(self, condition):
stmt = delete(self.model)
for field, value in condition.items():
stmt = stmt.where(getattr(self.model, field) == value)
self.session.execute(stmt)
class ElementsMapper(CommonMapper):
"""
Object data's mapper.
"""
def __init__(self, element_type):
element_class = ELEMENT(element_type)
super(ElementsMapper, self).__init__(element_class.model_name)
self.base_model_name = element_class.get_base_model()
def all_with_base(self):
"""
Get all records with its base data.
"""
if self.base_model_name == self.model_name:
return general_querys.get_all_from_tables([self.model_name])
else:
return general_querys.get_all_from_tables([self.base_model_name, self.model_name])
def get_by_key_with_base(self, key):
"""
Get a record with its base data.
Args:
key: (string) object's key.
"""
if self.base_model_name == self.model_name:
return general_querys.get_tables_record_by_key([self.model_name], key)
else:
return general_querys.get_tables_record_by_key([self.base_model_name, self.model_name], key)
|
from util.Constants import Constants as cs
def compare_cards(trump, card_1, card_2):
"""
:param trump
:param card_1:
:param card_2:
:return: the number that represents the card that won
"""
value_map = cs.STANDARD_CARDS_VALUE
if card_1 == card_2:
return 0
if trump == card_1.suit:
if trump != card_2.suit:
return 0
elif value_map[card_1.value] >= value_map[card_2.value]:
return 0
else:
return 1
elif trump == card_2.suit:
return 1
elif card_1.suit == card_2.suit:
if value_map[card_1.value] >= value_map[card_2.value]:
return 0
else:
return 1
else:
return 0
def compare_cards_non_trick(trump, card_1, card_2):
"""
:param trump
:param card_1:
:param card_2:
:return: the number that represents the card that won. Card_1 gets priority in case of tie
"""
value_map = cs.STANDARD_CARDS_VALUE
if card_1 == card_2:
return 0
if card_1.suit == trump:
if card_2.suit == trump:
if value_map[card_1.value] < value_map[card_2.value]:
return 1
else:
return 0
elif card_2.suit == trump:
return 1
elif value_map[card_1.value] < value_map[card_2.value]:
return 1
else:
return 0
|
import random
# 创建随机数
n = random.randrange(1,100)
# 获取输入
guess = int(input("输入任意数值: "))
while n != guess: # 判断是否正确
# 小于
if guess < n:
print("太小了")
guess = int(input("再次输入数值: "))
# 大于
elif guess > n:
print("太大了!")
guess = int(input("再次输入数值: "))
else:
break
print("真棒,你猜对了!!") |
import torch
import torch.optim as optim
import numpy as np
from acvae import AdversarialcVAE
from train_acvae import train, test
import argparse
import pickle
from math import floor
from keras.utils import to_categorical
from tqdm import tqdm
PIK1 = 'data.dat'
PIK2 = 'movement_labels.dat'
PIK3 = 'surface_labels.dat'
def str2bool(v):
if v.lower() in ('yes', 'true', 't', 'y', '1'):
return True
elif v.lower() in ('no', 'false', 'f', 'n', '0'):
return False
else:
raise argparse.ArgumentTypeError('Boolean value expected.')
def parseArgs():
parser = argparse.ArgumentParser()
parser.add_argument("--lam", type=float, default=1.)
parser.add_argument("--n_nuisance", type=int, default=2)
parser.add_argument("--n_latent", type=int, default=1000)
parser.add_argument("--n_kernel", type=int, default=80)
parser.add_argument("--adversarial", type=str2bool, default='true')
parser.add_argument("--n_epoch", type=int, default=100)
parser.add_argument("--batch_size", type=int, default=32)
parser.add_argument("--n_chan", type=int, default=14)
parser.add_argument("--n_sample", type=int, default=400)
parser.add_argument("--inference", type=str2bool, default='false')
args = parser.parse_args()
return args
def load_data(PIK1, PIK2, PIK3):
with open(PIK1, "rb") as f:
data = pickle.load(f)
with open(PIK2, "rb") as f:
movement_labels = pickle.load(f)
with open(PIK3, "rb") as f:
surface_labels = pickle.load(f)
return data, movement_labels, surface_labels
def get_batches(data, movement_labels, surface_labels, batch_size, n_chan, n_sample):
ratio = 0.70 # use 70 percent of the data for training and rest for testing
train_iterator, test_iterator = [], []
# concatenate a list of arrays into a single array
data = np.concatenate(data) # [total_trials, num_channels, num_samples]
movement_labels = np.concatenate(movement_labels) # [total_trials]
surface_labels = np.concatenate(surface_labels) # [total_trials]
# shuffle data
indices = np.arange(data.shape[0])
np.random.shuffle(indices)
data = torch.from_numpy(data[indices])
movement_labels = movement_labels[indices]
surface_labels = surface_labels[indices]
# get one hot encodings of movement and surface labels:
movement_labels = torch.from_numpy(to_categorical(movement_labels))
surface_labels = torch.from_numpy(to_categorical(surface_labels))
# split data into batches
n_batches = floor(data.shape[0]/batch_size)
end = n_batches * batch_size
data = torch.split(data[:end], batch_size)
movement_labels = torch.split(movement_labels[:end], batch_size)
surface_labels = torch.split(surface_labels[:end], batch_size)
# get the train and test data
end = int(ratio * n_batches)
train_surf_labels, test_surf_labels = surface_labels[:end], surface_labels[end:]
# create data iterators
for i in range(end):
# reshape the batch of train data
x = data[i].view(batch_size, 1, n_chan, n_sample).float()
y = movement_labels[i].float()
train_iterator.append((x, y))
for i in range(end, n_batches):
# reshape the batch of train data
x = data[i].view(batch_size, 1, n_chan, n_sample).float()
y = movement_labels[i].float()
test_iterator.append((x, y))
return train_iterator, test_iterator, train_surf_labels, test_surf_labels
def main():
args = parseArgs()
data, movement_labels, surface_labels = load_data(PIK1, PIK2, PIK3)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
model = AdversarialcVAE(args.adversarial, args.n_sample, args.n_nuisance,
args.n_chan, args.n_latent, args.n_kernel, args.lam)
# weight decay is L2 regularization in parameter updates
optimizer = optim.Adam(model.parameters(), lr=1e-3, weight_decay=1e-2)
train_iterator, test_iterator, train_surf_labels, test_surf_labels = \
get_batches(data, movement_labels, surface_labels, args.batch_size,
args.n_chan, args.n_sample)
print("\n\nData parsed.")
train(model, device, optimizer, args.adversarial, train_iterator,
test_iterator, args.batch_size, args.n_epoch)
print('Finished Training')
# Save our trained model
PATH = 'saved_model/inference_net.pth'
torch.save(model.state_dict(), PATH)
if args.adversarial:
correct, total = 0, 0
with torch.no_grad():
for batch in tqdm(test_iterator):
test_data, test_nuisance = batch
outputs = model(test_data, test_nuisance)
s_hat = outputs[3]
_, predicted = torch.max(s_hat.data, 1)
_, actual = torch.max(test_nuisance.data, 1)
total += test_nuisance.size(0)
correct += (predicted == actual).sum().item()
print('Accuracy of the network on the test dataset: %d %%' % (
100 * correct / total))
if __name__ == "__main__":
main() |
# -*- coding: utf-8 -*-
class DummyMultiDict(dict):
"""dummy MultiDict class for testing
"""
def getall(self, key):
try:
return self.__getitem__(key)
except KeyError:
return []
class DummyFileParam(object):
"""dummy FileParam object for testing
"""
def __init__(self, filename=None, file=None):
self.filename = filename
self.file = file
class DummySession(object):
"""dummy session object for testing
"""
def __init__(self):
self.messages = []
def flash(self, message):
self.messages.append(message)
|
from __future__ import division
from gaussianMixturesTesting import Gaussian
from gaussianMixturesTesting import GM
import numpy as np
from scipy.stats import multivariate_normal as mvn
import copy
from random import random
import time
import matplotlib.pyplot as plt
if __name__ == "__main__":
#Initialize some random gaussians to try merging
testGM = GM();
numFinal = 5
numInit = 10*41;
for i in range(0,numInit//2):
tmpMean = [random()*2+3,random()*2+3];
offCov = random()*0.02;
tmpVar = [[offCov+random()*0.05,offCov],[offCov,offCov+random()*0.05]];
weight = random();
testGM.addG(Gaussian(tmpMean,tmpVar,weight));
for i in range(0,numInit//2):
tmpMean = [random()*2,random()*2];
offCov = random()*0.02;
tmpVar = [[(offCov+random()*0.02),offCov],[offCov,(offCov+random()*0.02)]];
weight = random();
testGM.addG(Gaussian(tmpMean,tmpVar,weight));
testGM.normalizeWeights();
testGM2 = copy.deepcopy(testGM);
testGMOrig = copy.deepcopy(testGM);
[x1,y1,c1] = testGM.plot2D(vis=False);
firstCondenseTime = time.clock();
testGM.condense(numFinal);
testGM.normalizeWeights();
firstCondenseTime = time.clock() - firstCondenseTime;
print("The time to condense without k-means: {0:.2f} seconds".format(firstCondenseTime));
firstCondenseTimestr = str(firstCondenseTime)[0:5];
[x2,y2,c2] = testGM.plot2D(vis=False);
isd1 = testGMOrig.ISD(testGM);
print("The ISD without k-means: " + str(isd1));
#isd1str = str(isd1)[0:3]+'e'+str(isd1)[len(str(isd1))-3:]
secondCondenseTime = time.clock();
testGM2 = testGM2.kmeansCondensationN(k=5);
testGM2.normalizeWeights();
secondCondenseTime = time.clock() - secondCondenseTime;
secondCondenseTimestr = str(secondCondenseTime)[0:5];
print("Time to condense with k-means: " + str(secondCondenseTime) + " seconds");
[x3,y3,c3] = testGM2.plot2D(vis = False);
isd2 = testGMOrig.ISD(testGM2);
print("The ISD with k-means: " + str(isd2));
#isd2str = str(isd2)[0:3]+'e'+str(isd2)[len(str(isd2))-3:]
print("");
print("Time Ratio of k-means/runnals = " + str(secondCondenseTime/firstCondenseTime));
print("Error Ratio of k-means/runnals = " + str(isd2/isd1));
if(testGM.size > numFinal):
print('Error: testGM size is: '+ str(testGM.size));
testGM1.display();
if(testGM2.size > numFinal):
print('Error: testGM2 size is: ' + str(testGM2.size));
testGM2.display();
fig = plt.figure()
ax1 = fig.add_subplot(131)
con1 = ax1.contourf(x1,y1,c1, cmap=plt.get_cmap('viridis'));
ax1.set_title('Original Mixture',fontsize=15);
plt.colorbar(con1);
minNum = 100000;
maxNum = -100000;
for i in range(0,len(c1)):
for j in range(0,len(c1[i])):
if(c1[i][j]<minNum):
minNum = c1[i][j];
if(c1[i][j] > maxNum):
maxNum = c1[i][j];
ax2 = fig.add_subplot(132)
con2 = ax2.contourf(x2,y2,c2, cmap=plt.get_cmap('viridis'));
ax2.set_title('Runnalls: {0:.2f} seconds'.format(firstCondenseTime),fontsize=15);
ax2.set_xlabel("ISD with Runnalls only: {0:.3f}".format(isd1));
#ax2.set_xlabel("Error with Runnalls only: " + isd1str);
plt.colorbar(con2,boundaries = np.linspace(minNum,0.0001,maxNum));
ax3 = fig.add_subplot(133)
con3 = ax3.contourf(x3,y3,c3, cmap=plt.get_cmap('viridis'));
ax3.set_title('Clustering: {0:.2f} seconds'.format(secondCondenseTime),fontsize=15);
ax3.set_xlabel("ISD with Clustering: {0:.3f}".format(isd2))
#ax3.set_xlabel("Error with Kmeans+Runnalls: " + isd2str);
plt.colorbar(con3,boundaries = np.linspace(minNum,0.0001,maxNum));
'''
ax5 = fig.add_subplot(235)
con5 = ax5.contourf(x2,y2,c2-c1, cmap=plt.get_cmap('viridis'));
ax5.set_title('Error with Runnals only, ISD=' + isd1str);
plt.colorbar(con5);
ax6 = fig.add_subplot(236)
con6 = ax6.contourf(x3,y3,c3-c1, cmap=plt.get_cmap('viridis'));
ax6.set_title('Error with kmeans+Runnals, ISD=' + isd2str);
plt.colorbar(con6);
'''
fig.suptitle("#Initial = " + str(numInit) + ", #Final = " +str(numFinal),fontsize=30);
plt.show();
|
from setuptools import setup, find_packages
setup(name='ConvertCSVtoParquet',
version='0.2',
description='CSV to Parquet converter',
author='KL',
packages=find_packages(),
zip_safe=False)
|
# -*- coding: utf-8 -*-
"""
工程中我们使用一个名字为config.py的Python模块用来保存全局的配置,
由于logging在工程中每个源代码文件都可能用到,
因此我们把logging模块在config.py中生成一个实例,
这样其它模块只需要引用这个实例就可以了。
在其它模块中,我们使用这样的语句引用logger对象:
# from config import logger
"""
import logging
import logging.config
import os
# 在当前工作目录下创建Log文件夹
def logger():
path = os.getcwd()+"\\Log"
if not os.path.exists(path):
os.mkdir(path)
logging.config.fileConfig('logging.conf')
logger = logging.getLogger('cisdi')
return logger
|
import sys
input = sys.stdin.readline
N = int(input())
values = sorted(list(map(int, input().split())))
start = 0
end = N-1
left = 0
right = N - 1
ans = abs(values[0] + values[N-1])
while start < end:
temp = values[start] + values[end]
if abs(temp) < ans:
ans = abs(temp)
left = start
right = end
if ans == 0:
break
if temp > 0:
end -= 1
else:
start += 1
print(values[left], values[right]) |
#!/usr/bin/env python3
import os
import codekit.codetools as codetools
import pytest
def test_tempdir():
"""Test temporary directory context manager"""
with codetools.TempDir() as temp_dir:
assert os.path.exists(temp_dir)
assert os.path.exists(temp_dir) is False
def test_debug_lvl_from_env():
"""fetching default debug level from DM_SQUARE_DEBUG env var"""
# default when unset
os.environ['DM_SQUARE_DEBUG'] = ''
assert codetools.debug_lvl_from_env() == 0
with pytest.raises(RuntimeError):
os.environ['DM_SQUARE_DEBUG'] = 'foo'
codetools.debug_lvl_from_env()
os.environ['DM_SQUARE_DEBUG'] = '42'
codetools.debug_lvl_from_env() == 42
|
import os
from Coverage import decision, coverage
from time import sleep
print(" =^..^= Welcome to the Statement - Decision Coverage tool v0.01 =^..^= ")
print("""\033[2;37;40m Made by (Khaled Redhwan) AKA
^___^ ╔═╗┬┬─┐ ╔═╗┌─┐┌┬┐ ^___^
( o.o ) ╚═╗│├┬┘ ║ ├─┤ │ ( o.o )
> ^ < ╚═╝┴┴└─o╚═╝┴ ┴ ┴ > ^ <
\033[0;37;0m""")
name = input("To begin, What's the name of the file you want to test? ")
if os.path.isfile(name + '_Cov_test.py'):
os.remove(name + '_Cov_test.py')
if os.path.isfile(name + '_Cov_engine.py'):
os.remove(name + '_Cov_engine.py')
if os.path.isfile(name + '_Dec_test.py'):
os.remove(name + '_Dec_test.py')
if os.path.isfile(name + '_Dec_engine.py'):
os.remove(name + '_Dec_engine.py')
def engineCov():
with open(name + "_Cov_engine.py", "a") as out:
out.write("import Coverage\n")
out.write(f"import {name}_Cov_test\n")
out.write(f"_coverage = {name}_Cov_test._TheTestingCounter_/Coverage.coverage('{name}')\n\n\n")
out.write("print('-------------------------------------------------------------------')\n")
out.write("print('The statement coverage percentage is ',round(_coverage*100),'%')\n")
out.write("print('-------------------------------------------------------------------')\n")
def enginDec():
with open(name + "_Dec_engine.py", 'a') as out:
out.write("import Coverage\n")
out.write(f"import {name}_Dec_test\n")
out.write(f"_coverage = {name}_Dec_test._TheTestingCounter_/Coverage.decision('{name}')\n\n\n")
out.write("print('-------------------------------------------------------------------')\n")
out.write("print('The decision coverage percentage is ',round(_coverage*100),'%')\n")
out.write("print('-------------------------------------------------------------------')\n")
coverage(name)
decision(name)
enginDec()
engineCov()
print("\033[1;36;40m Please enter the input for the statement coverage Now!\033[0m")
os.system(f"cmd /c py {name + '_Cov_engine.py'}")
os.remove(name + '_Cov_test.py')
os.remove(name + '_Cov_engine.py')
print("\n\n\n\n\n\033[1;31;40m Please enter the input for the decision coverage Now!\033[0m")
os.system(f"cmd /c py {name + '_Dec_engine.py'}")
os.remove(name + '_Dec_test.py')
os.remove(name + '_Dec_engine.py')
print("Done!")
sleep(3)
|
from django.contrib import admin
from portfolio.forms import Login
# Register your models here.
admin.site.register(Login)#I want to see this model inside the admin
|
from pandas_vb_common import *
class concat_categorical(object):
goal_time = 0.2
def setup(self):
self.s = pd.Series((list('aabbcd') * 1000000)).astype('category')
def time_concat_categorical(self):
concat([self.s, self.s]) |
# -*- coding: utf-8 -*-
'''
基于LDA,对人脸数据集进行维数约减,并计算分类准确率
Created by Longrui Dong --2018.01.05
'''
from __future__ import print_function
import matplotlib.pyplot as plt
from time import time
import logging
import itertools
import numpy as np
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sklearn.model_selection import train_test_split
from sklearn.model_selection import cross_val_score
from sklearn.metrics import cohen_kappa_score
from sklearn.metrics import confusion_matrix
print(__doc__)
# Display progress logs on stdout
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
PICTURE_PATH1 = "D:\\mycollege\\term7\\keshe\\CroppedYale"#yaleb人脸数据 10*65
PICTURE_PATH2 = "D:\\mycollege\\term7\\keshe\\att_faces"#at&t人脸数据 40*10
#载入npy文件,得到整个数据集的ndarray
data_1=np.load(PICTURE_PATH1 + "\\"+"alldataset.npy")
label_1=np.load(PICTURE_PATH1 + "\\"+"alldatalabel.npy")
data_2=np.load(PICTURE_PATH2 + "\\"+"alldataset.npy")
label_2=np.load(PICTURE_PATH2 + "\\"+"alldatalabel.npy")
n_samples1,n_features1=data_1.shape
n_classes1 = len(np.unique(label_1))
n_samples2,n_features2=data_2.shape
n_classes2 = len(np.unique(label_2))
h1=192
w1=168
h2=112
w2=92
target_names1 = []
target_names2 = []
for i in range(1,11):
names = "person" + str(i)
target_names1.append(names)
for i in range(1,41):
names = "person" + str(i)
target_names2.append(names)
#输出数据集信息
print("Total 1th dataset size:")
print("n_samples: %d" % n_samples1)
print("n_features: %d" % n_features1)
print("n_classes: %d" % n_classes1)
print("Total 2nd dataset size:")
print("n_samples: %d" % n_samples2)
print("n_features: %d" % n_features2)
print("n_classes: %d" % n_classes2)
#将数据集划分为训练集和测试集
X_train1,X_test1,y_train1,y_test1=train_test_split(data_1,label_1,test_size=130,random_state=0,stratify=label_1)
X_train2,X_test2,y_train2,y_test2=train_test_split(data_2,label_2,test_size=80,random_state=0,stratify=label_2)
#对训练数据和测试数据降维
print("对训练数据和测试数据降维")
t0=time()
lda_1=LinearDiscriminantAnalysis(solver='svd')
lda_1=lda_1.fit(X_train1,y_train1)
X_train1_new=lda_1.transform(X_train1)
X_test1_new=lda_1.transform(X_test1)
print("transformed on data1 done in %0.3fs" % (time() - t0))
t0=time()
lda_2=LinearDiscriminantAnalysis(solver='svd')
lda_2=lda_2.fit(X_train2,y_train2)
X_train2_new=lda_2.transform(X_train2)
X_test2_new=lda_2.transform(X_test2)
print("transformed on data1 done in %0.3fs" % (time() - t0))
#输出降维后的数据集信息
print("Total 1th transformed dataset size:")
print("n_samples_train: %d" % X_train1_new.shape[0])
print("n_features_train: %d" % X_train1_new.shape[1])
print("n_samples_test: %d" % X_test1_new.shape[0])
print("n_features_test: %d" % X_test1_new.shape[1])
print("Total 2nd transformed dataset size:")
print("n_samples_train: %d" % X_train2_new.shape[0])
print("n_features_train: %d" % X_train2_new.shape[1])
print("n_samples_test: %d" % X_test2_new.shape[0])
print("n_features_test: %d" % X_test2_new.shape[1])
#使用lda进行分类验证
#使用5折交叉验证来展示分类效果
print("使用5折交叉验证来展示对训练数据的分类效果")
t0=time()
clf1=LinearDiscriminantAnalysis(solver='lsqr',shrinkage='auto')
clf2=LinearDiscriminantAnalysis(solver='lsqr',shrinkage='auto')
scores1=cross_val_score(clf1, X_train1_new, y_train1, cv=5)
scores2=cross_val_score(clf2, X_train2_new, y_train2, cv=5)
print("done in %0.3fs" % (time() - t0))
#print("5-fold cross validation scores:")
print("5-fold cross validation accuracy on data1: %0.2f (+/- %0.2f)" % (scores1.mean(), scores1.std() * 2))
print("5-fold cross validation accuracy on data2: %0.2f (+/- %0.2f)" % (scores2.mean(), scores2.std() * 2))
#使用训练好的分类器对测试数据进行预测
print("使用lda分类器先对训练数据进行训练,再对测试数据进行预测:")
t0=time()
y_test1_pred=clf1.fit(X_train1_new, y_train1).predict(X_test1_new)
y_test2_pred=clf2.fit(X_train2_new, y_train2).predict(X_test2_new)
print("done in %0.3fs" % (time() - t0))
#计算测试数据集上的准确率
comp1=y_test1_pred-y_test1#比较预测的结果和真实类标
comp2=y_test2_pred-y_test2
ac_rate1=np.sum(comp1==0)/X_test1_new.shape[0]#获得正确率
ac_rate2=np.sum(comp2==0)/X_test2_new.shape[0]
print("Accuracy on test data on data1: %0.4f" %ac_rate1)
print("Accuracy on test data on data2: %0.4f" %ac_rate2)
#计算kappa score
kappa_1=cohen_kappa_score(y_test1, y_test1_pred)
kappa_2=cohen_kappa_score(y_test2, y_test2_pred)
print("Cohen’s kappa score on test data on data1: %0.4f" %kappa_1)
print("Cohen’s kappa score on test data on data2: %0.4f" %kappa_2)
def plot_confusion_matrix(cm, path, classes,
normalize=False,
title='Confusion matrix',
cmap=plt.cm.Blues):
"""
This function prints and plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
"""
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print("Normalized confusion matrix")
else:
print('Confusion matrix, without normalization')
print(cm)
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
fmt = '.2f' if normalize else 'd'
thresh = cm.max() / 2.
#是否填混淆矩阵的数字
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, format(cm[i, j], fmt),
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
#
plt.tight_layout()
plt.ylabel('True label')
plt.xlabel('Predicted label')
fig = plt.gcf()
fig.set_size_inches(18.5, 10.5)
fig.savefig(path+"\\"+"confusionmatrix.png", dpi=100)
#计算混淆矩阵
cnf_matrix_1 = confusion_matrix(y_test1, y_test1_pred)
cnf_matrix_2 = confusion_matrix(y_test2, y_test2_pred)
np.set_printoptions(precision=2)
plt.figure()
plot_confusion_matrix(cnf_matrix_1, PICTURE_PATH1, classes=target_names1, normalize=True,
title='Normalized confusion matrix on data1')
plt.figure()
plot_confusion_matrix(cnf_matrix_2, PICTURE_PATH2, classes=target_names2, normalize=True,
title='Normalized confusion matrix on data2')
plt.show()
def plot_gallery(images, titles, h, w, n_row, n_col):
"""Helper function to plot a gallery of portraits"""
plt.figure(figsize=(1.8 * n_col, 2.4 * n_row))
plt.subplots_adjust(bottom=0, left=.01, right=.99, top=.90, hspace=.35)
for i in range(n_row * n_col):
plt.subplot(n_row, n_col, i + 1)
plt.imshow(images[i].reshape((h, w)), cmap=plt.cm.gray)
plt.title(titles[i], size=12)
plt.xticks(())
plt.yticks(())
def title(y_pred, y_test, target_names, i):
#展示测试集预测结果图的题目
pred_name = target_names[y_pred[i]-1]
true_name = target_names[y_test[i]-1]
return 'predicted: %s\ntrue: %s' % (pred_name, true_name)
prediction_titles1 = [title(y_test1_pred, y_test1, target_names1, i)
for i in range(y_test1_pred.shape[0])]
prediction_titles2 = [title(y_test2_pred, y_test2, target_names2, i)
for i in range(y_test2_pred.shape[0])]
print("画图展示分类效果:")
plot_gallery(X_test1, prediction_titles1, h1, w1, 10, 13)
plot_gallery(X_test2, prediction_titles2, h2, w2, 10, 8) |
from django.contrib import admin
from . import models
from django.utils.safestring import mark_safe
from actions import Actions
# Register your models here.
class AcceuilAdmin(Actions):
fieldsets = [
('Presentation',{'fields': ['image','titre']}),
('Video', {'fields': ['video']}),
('status', {'fields': ['status']})
]
list_display = ('image_views','titre','date_add','status')
list_filter = ('status',)
search_fields = ('titre',)
date_hierarchy = "date_add"
list_display_links = ['titre']
ordering = ['titre']
list_per_page = 10
def image_views(self,obj):
return mark_safe("<img src='{url}' width= 100px height=50px >".format(url=obj.image.url))
class ArtisteAdmin(Actions):
fieldsets = [
('Presentation',{'fields': ['nom','prenom','bio']}),
('Image',{'fields': ['photo']}),
('Status',{'fields': ['status']})
]
list_display = ('nom','prenom','date_add','status','image_views')
list_filter = ('status',)
search_fields = ('nom',)
date_hierarchy = "date_add"
list_display_links = ['nom']
ordering = ['nom']
list_per_page = 10
def image_views(self,obj):
return mark_safe("<img src='{url}' width= 100px height=50px >".format(url=obj.image.url))
class GalerieAdmin(Actions):
fieldsets = [
('Presentation',{'fields': ['image','titre']}),
('status', {'fields': ['status']})
]
list_display = ('image_views','titre','date_add','status')
list_filter = ('status',)
search_fields = ('titre',)
date_hierarchy = "date_add"
list_display_links = ['titre']
ordering = ['titre']
list_per_page = 10
def image_views(self,obj):
return mark_safe("<img src='{url}' width= 100px height=50px >".format(url=obj.image.url))
class PlaylistAdmin(Actions):
fieldsets = [
('Presentation',{'fields': ['nom_artiste','video']}),
('Status',{'fields': ['status']})
]
list_display = ('nom_artiste','video','date_add','status')
list_filter = ('status',)
search_fields = ('nom_artiste',)
date_hierarchy = "date_add"
list_display_links = ['nom_artiste']
ordering = ['nom_artiste']
list_per_page = 10
def _register(model,Admin_class):
admin.site.register(model,Admin_class)
_register(models.Acceuil, AcceuilAdmin)
_register(models.Artiste, ArtisteAdmin)
_register(models.Playlist, PlaylistAdmin)
_register(models.Galerie, GalerieAdmin) |
"""
Uses FilterReg algorithm to match two point clouds
"""
import numpy as np
import open3d as o3d
from probreg import filterreg
from scanner3d.registration.pair.base_pair_reg import BasePairReg
class FilterReg(BasePairReg):
def register(self, pcd1, pcd2):
f_reg = filterreg.registration_filterreg(pcd1, pcd2)
trans = f_reg.transformation
scale_matrix = np.identity(4) * trans.scale
transformation_matrix = np.identity(4)
transformation_matrix[0:3, 0:3] = trans.rot
transformation_matrix[0:3, 3] = trans.t
transformation_matrix *= trans.scale
return transformation_matrix |
# Generated by Django 2.2.6 on 2019-10-15 13:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("shop", "0009_book")]
operations = [
migrations.AddField(
model_name="book",
name="compiler",
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name="book",
name="sub_title1",
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name="book",
name="sub_title2",
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name="book",
name="sub_title3",
field=models.CharField(blank=True, max_length=100, null=True),
),
]
|
import csv
import random
from numpy import genfromtxt
import sqlalchemy
from sqlalchemy import BigInteger
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session
from sqlalchemy.orm import sessionmaker
Base = declarative_base()
#db connect
engine = create_engine('postgresql://postgres:postgres@121.138.81.143:5432/')
Session = sessionmaker(bind=engine)
Session.configure(bind=engine)
session=Session()
class tmp_homeshopping(Base):
__tablename__='tmp_homeshopping'
start_date = Column(String)
end_date = Column(String)
parthne_name = Column(String)
shop_code = Column(String)
prod_name = Column(String)
# cate1 = Column(String)
# cate2 = Column(String)
# cate3 = Column(String)
# cate4 = Column(String)
# shop_prod_id = Column(String)
# prod_id = Column(String)
# prod_img_url_m = Column(String)
# prod_price_ = Column(String)
# prod_url_m = Column(String)
# participartion_type = Column(String)
# use_yn = Column(String)
id = Column(BigInteger, primary_key=True)
try:
f= open('/home/lcy/2017-07-17_16epg.csv', 'r')
tbl_reader = csv.reader(f,delimiter='{')
for i in tbl_reader:
print(i[8])
# tmp_homeshopping.insert().values(start_date=i[0],end_date=i[1],prod_name=i[5])
tmp_homeshopping.start_date = i[0]
print(tmp_homeshopping.start_date)
start_date= i[0],
end_date= i[1],
parthner_name = i[2],
shop_code = i[3],
prod_name = i[5],
id= i[4]
query = "INSERT INTO tmp_homeshopping (start_date, end_date, parthner_name, shop_code, prod_name, id) VALUES (%s,%s,%s,%s,%s,%s);"
data=(start_date,end_date,parthner_name,shop_code,prod_name,id)
session.execute(query,data)
session.commit()
finally:
session.close() # Close the connection
#instance테이블 생성
#
# for instance in session.query(time_cate).order_by(time_cate.id):
# if(instance.time=='6'):
# print(instance.time, instance.ranking,instance.cate1, instance.cate2)
#dic_tc_h = {'prod_id': ['time','cate1','cate2']}
#
# dic_6_1 = {}
# id = 1
#
#
# for tc in session.query(time_cate):
# for h in session.query(tmp_test_homeshopping):
# if(tc.time =='6'):
# if(tc.ranking=='1'):
# if ((tc.cate2 == h.cate2)):
# dic_6_1[id] = {
# 'prod_id': h.prod_id,
# 'time': tc.time,
# 'cate1': tc.cate1,
# 'cate2': tc.cate2
# }
# id += 1
# print (dic_6_1)
# #
# #
# # def pop_queue(the_list, size=2):
# #
# # if len(the_list) >= 2:
# # return the_list[2:], the_list[0], the_list[1]
# # elif len(the_list) >= 1:
# # return the_list[2:], the_list[0], None
# # return the_list[2:], None, None
# #
# #
# # the_list = [k for k in dic_tc_h.values()]
# #
# # while True:
# # the_list, first, second = pop_queue(the_list)
# # print (first, second)
# # if not the_list:
# # break
# #
#
# #리스트가 들어오면 0번째를 result table에 넣고 1을 0번째로 바꾸는
# keys = dic_6_1.keys()
# key = keys[0]
# keys = key[1:0]
#
#
#
# recommend_result={}
#
#
#
#
# def pop_queue(the_list):
# keys = the_list.keys()
# key = keys[0]
# keys = keys[1:0]
#
|
import pygame
import time
import random
from snake_clase import SnakeGame
rand = random.Random()
class SnakeGameGUI(SnakeGame):
def __init__(self, headless_mode = False):
super().__init__()
self.AZUL = (0, 0, 255)
self.MORADO = (255, 0, 255)
self.NEGRO = (0, 0, 0)
self.ROJO = (255, 0, 0)
self.TAMAÑO_CUADRADO = 10
self.ANCHO = self.TAMAÑO_CUADRADO*self.ancho
self.ALTO = self.TAMAÑO_CUADRADO*self.alto
self.TAMAÑO = (self.ANCHO + 400, self.ALTO)
if headless_mode == False:
self.SCREEN = pygame.display.set_mode(self.TAMAÑO)
pygame.init()
def dibujar_tablero(self):
myfont = pygame.font.SysFont("monospace", 50)
self.SCREEN.fill(self.NEGRO)
for i in range(self.alto):
for j in range(self.alto):
# check for head, body, food
if self.tablero[i, j] == 1:
tam_loc = (j*self.TAMAÑO_CUADRADO, i*self.TAMAÑO_CUADRADO, self.TAMAÑO_CUADRADO, self.TAMAÑO_CUADRADO)
pygame.draw.rect(self.SCREEN, self.AZUL, tam_loc)
elif self.tablero[i, j] == 2:
tam_loc = (j*self.TAMAÑO_CUADRADO, i*self.TAMAÑO_CUADRADO, self.TAMAÑO_CUADRADO, self.TAMAÑO_CUADRADO)
pygame.draw.rect(self.SCREEN, self.MORADO, tam_loc)
elif self.tablero[i, j] == -1:
loc = (int((j+0.5)*self.TAMAÑO_CUADRADO), int((i+0.5)*self.TAMAÑO_CUADRADO))
pygame.draw.circle(self.SCREEN, self.ROJO, loc, self.TAMAÑO_CUADRADO//2)
label = myfont.render(f"Score: {self.puntaje}", 1, self.MORADO)
self.SCREEN.blit(label, (self.ANCHO + 10,10))
tam_loc = (self.ANCHO, 0, 3, self.ALTO)
pygame.draw.rect(self.SCREEN, (255, 255, 255), tam_loc)
pygame.display.update()
def run_game(self, player_ai = None):
actualizar_rate = 1
fps = 60
contador = 0
distancia = self.distancia
pygame.init()
myfont = pygame.font.SysFont("monospace", 65)
self.dibujar_tablero()
pygame.display.update()
exit_flag = False
# Loop principal del juego
while exit_flag == False and self.game_state == True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
exit_flag = True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_UP:
direccion = [-1, 0]
elif event.key == pygame.K_DOWN:
direccion = [1, 0]
elif event.key == pygame.K_LEFT:
direccion = [0, -1]
elif event.key == pygame.K_RIGHT:
direccion = [0, 1]
else:
distancia = self.distancia
time.sleep(1.0/fps)
contador += 1
if contador >= actualizar_rate:
if player_ai != None:
direccion = player_ai()
self.actualizar_distancia(direccion)
self.actualizar_estado()
contador = 0
self.dibujar_tablero()
pygame.display.update()
# --- perdimos el juego ---
label = myfont.render(f"Game Over!", 1, self.ROJO)
self.SCREEN.blit(label, (self.ANCHO + 10, 50))
pygame.display.update()
while exit_flag == False:
for event in pygame.event.get():
if event.type == pygame.QUIT:
exit_flag = True
pygame.quit()
|
import requests
from django.shortcuts import render, redirect
from django.http import HttpResponse, Http404
from django.utils import timezone
from .models import Diner, Review, Visited, Comment, Rating
from .forms import PostForm, ReviewForm, CommentForm
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.contrib.auth import authenticate, login, logout
# Create your views here.
def signin(request):
username = request.POST['username']
password = request.POST['password']
user = authenticate(request, username=username, password=password)
if user is not None:
login(request, user)
# Redirect to a success page.
return redirect('/diners/')
else:
# Return an 'invalid login' error message.
return redirect('/diners/')
def signout(request):
logout(request)
# Redirect to a success page.
return redirect('/diners/')
def index(request):
latest_diner_list = Diner.objects.order_by('-published_date')
paginator = Paginator(latest_diner_list, 5)
page = request.GET.get('page')
diners = paginator.get_page(page)
context = {'latest_diner_list': diners}
return render(request, 'diners/index.html', context)
def detail(request, diner_id):
form = ReviewForm()
comment_form = CommentForm()
try:
diner = Diner.objects.get(pk=diner_id)
location = getCord(diner.location)
comments = Comment.objects.all()
visited = Visited.objects.filter(diner = diner, visiter = request.user)
vote_p = len(Rating.objects.filter(diner = diner))
if(len(visited) != 0):
visited = visited[0]
else:
visited = 0
reviews = Review.objects.filter(diner = diner)
except Diner.DoesNotExist:
raise Http404("Diner does not exist")
return render(request, 'diners/detail.html', {'diner': diner, 'form': form, 'visit_status': visited, 'reviews': reviews, 'comment': comment_form, 'comment_list': comments, 'rating_postive': vote_p, 'location': location})
def results(request, diner_id):
try:
diner = Diner.objects.get(pk=diner_id)
except Diner.DoesNotExist:
raise Http404("Diner does not exist")
try:
reviews = Review.objects.filter(diner = diner)
except Review.DoesNotExist:
raise Http404("Diner does not exist")
return render(request, 'diners/results.html', {'diner': diner ,'reviews': reviews})
def mark_visited(request, diner_id):
try:
diner = Diner.objects.get(pk=diner_id)
except Diner.DoesNotExist:
raise Http404("Diner does not exist")
mark_visit = Visited.objects.get_or_create(diner = diner, visiter = request.user)
vote_p = len(Rating.objects.filter(diner = diner))
form = ReviewForm()
return render(request, 'diners/detail.html', {'diner': diner, 'form': form, 'visit_status': mark_visit, 'rating_postive': vote_p})
def rate_positive(request, diner_id):
try:
diner = Diner.objects.get(pk=diner_id)
except Diner.DoesNotExist:
raise Http404("Diner does not exist")
vote = Rating.objects.update_or_create(diner = diner, author = request.user, rating_pos = 1)
return render(request, 'diners/comment_page.html', {'diner': diner})
def post_new(request):
form = PostForm()
if request.method == "POST":
form = PostForm(request.POST)
if form.is_valid():
diner = form.save(commit=False)
diner.author = request.user
diner.published_date = timezone.now()
diner.save()
p = '/diners/'+str(diner.pk)+'/'
return redirect(p, pk=diner.pk)
else:
form = PostForm()
return render(request, 'diners/post_edit.html', {'form': form})
def post_review(request, diner_id):
form = ReviewForm()
if request.method == "POST":
form = ReviewForm(request.POST)
if form.is_valid():
review = form.save(commit=False)
review.author = request.user
review.diner = Diner.objects.get(pk=diner_id)
review.published_date = timezone.now()
review.save()
p = '/diners/'+str(diner_id)+'/'
return redirect(p, pk=diner_id)
def post_comment(request, review_id):
if request.method == "POST":
form = CommentForm(request.POST)
if form.is_valid():
comment = form.save(commit=False)
comment.author = request.user
review = Review.objects.get(pk=review_id)
comment.review = review
comment.published_date = timezone.now()
comment.save()
diner_id = review.diner.id
p = '/diners/'+str(diner_id)+'/'
return redirect(p, pk=diner_id)
def getCord(location):
address = location
api_key = "AIzaSyCmK4Y5QwyeU6TsrBVJxsJVyIYN7oZz14w"
api_response = requests.get('https://maps.googleapis.com/maps/api/geocode/json?address={0}&key={1}'.format(address, api_key))
api_response_dict = api_response.json()
if api_response_dict['status'] == 'OK':
latitude = api_response_dict['results'][0]['geometry']['location']['lat']
longitude = api_response_dict['results'][0]['geometry']['location']['lng']
key = [latitude, longitude]
return key
else:
return "Error"
|
import psycopg2
import json
from db.config import config
conn = None
# database code
try:
# read connection parameters
params = config()
# connect to the PostgreSQL server
print('Connecting to the PostgreSQL database...')
conn = psycopg2.connect(**params)
# create a cursor
cur = conn.cursor()
data = []
# fetch all
print('\n')
cur.execute("SELECT * FROM crawldb.page")
rows = cur.fetchall()
for row in rows:
print(str(row[3]) + " -- " + str(row[1]) + " -- " + str(row[0]))
tmp = {}
tmp['name'] = str(row[0])
tmp['size'] = row[1]
tmp['url'] = str(row[8])
cur.execute("SELECT * FROM crawldb.link \n" +
"WHERE from_page = %s", (row[0],))
links = cur.fetchall()
tmplinks = []
for link in links:
print(link)
if link[1] != row[0]:
tmplinks.append(link[1])
tmp['imports'] = tmplinks
data.append(tmp)
# close the communication with the PostgreSQL
cur.close()
with open('dataHive.json', 'w') as outfile:
json.dump(data, outfile, indent=4)
except (Exception, psycopg2.DatabaseError) as error:
print(error)
finally:
if conn is not None:
conn.close()
print('Database connection closed.')
|
'''
设计一个支持 push,pop,top 操作,并能在常数时间内检索到最小元素的栈。
push(x) -- 将元素 x 推入栈中。
pop() -- 删除栈顶的元素。
top() -- 获取栈顶元素。
getMin() -- 检索栈中的最小元素。
示例:
MinStack minStack = new MinStack();
minStack.push(-2);
minStack.push(0);
minStack.push(-3);
minStack.getMin(); --> 返回 -3.
minStack.pop();
minStack.top(); --> 返回 0.
minStack.getMin(); --> 返回 -2.
栈顶元素,是指最后一个进栈的元素
'''
class MinStack:
def __init__(self):
"""
initialize your data structure here.
"""
# 数据栈
self.stack = []
# 辅助栈,用来存储最小元素
self.helper = []
def push(self, x: int) -> None:
self.stack.append(x)
if len(self.helper) == 0 or self.helper[-1] >= x:
self.helper.append(x)
def pop(self) -> None:
num = self.stack.pop()
if num == self.helper[-1]:
self.helper.remove(num)
def top(self) -> int:
return self.stack[-1]
def getMin(self) -> int:
if self.helper:
return self.helper[-1]
# Your MinStack object will be instantiated and called as such:
obj = MinStack()
obj.push(-2)
obj.push(0)
obj.push(-3)
print(obj.getMin())
obj.pop()
print(obj.top())
print(obj.getMin())
|
import os
import cv2
face_cascade = cv2.CascadeClassifier('/home/nitish/Desktop/project/s_w/opencv-4.1.2/data/haarcascades/haarcascade_frontalface_default.xml')
def face_extractor(img):
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) #to convert into gray scale images
faces = face_cascade.detectMultiScale(gray, scaleFactor=1.5, minNeighbors=5)
#return faces
print(faces)
if faces is():
return None
for(x,y,w,h) in faces:
cropped_face = img[y:y+h, x:x+w]
+ return cropped_face #to crop faces
cam = cv2.VideoCapture(0)
Id = input('enter your id: ')
os.mkdir("/home/nitish/Desktop/project/faces/" + Id)
count = 0
while True:
ret, frame = cam.read()
if face_extractor(frame) is not None:
count += 1
face = cv2.resize(face_extractor(frame), (250, 250))
face = cv2.cvtColor(face, cv2.COLOR_BGR2GRAY)
file_name_path="/home/nitish/Desktop/project/faces/" + Id + "/" + Id + str(count) + ".jpg"
cv2.imwrite(file_name_path, face)
cv2.putText(face, str(count), (50,50), cv2.FONT_HERSHEY_COMPLEX, 1, (0, 255, 0), 2)
cv2.imshow('Face Detector', face)
else:
print("Face not Found")
pass
if cv2.waitKey(1)==13 or count==200:
break
cam.release()
cv.destroyAllWindows()
print('Collecting Samples Complete!!!')
|
import nltk
nltk.download('punkt')
f = open('sravan.txt', 'r',encoding='utf-8')
input = f.read()
stokens = nltk.sent_tokenize(input)
wtokens = nltk.word_tokenize(input)
for s in stokens:
print(s)
print('\n')
for t in wtokens:
print(t)
|
from django.shortcuts import redirect, render, reverse
from django.http import HttpResponse, HttpResponseRedirect
from django.views.generic import FormView, CreateView
from ..models import User, AgentApplications
from ..forms import PasswordResetForm, NewPasswordForm, AgentSignUpForm
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from userAuth.decorators import anonymous_required
from django.contrib.sites.shortcuts import get_current_site
from django.shortcuts import render, redirect, render_to_response
from django.contrib.auth import login, authenticate
from django.utils.encoding import force_bytes, force_text
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from django.template.loader import render_to_string
from ..tokens import account_activation_token
from django.core.mail import EmailMessage
def AboutUs(request):
return render(request, 'aboutus.html')
def FAQ(request):
return render(request, 'faq.html')
def home(request):
if request.user.is_authenticated:
if request.user.user_type == 2:
return redirect('agent:home')
elif request.user.user_type == 1:
return redirect('customer:home')
elif request.user.user_type == 3:
return redirect('executive:home')
elif request.user.is_superuser == 1:
return redirect('/admin')
return render(request, 'home.html')
def myProfile(request):
if request.user.is_authenticated:
if request.user.user_type == 2:
return redirect('agent:home',aid=request.user.id)
elif request.user.user_type == 1:
return redirect('customer:editprofile')
elif request.user.user_type == 3:
return redirect('executive:viewprofile')
return render(request, '500.html')
@method_decorator([anonymous_required], name='dispatch')
class PartnerWithUsView(CreateView):
model = AgentApplications
fields = ('fullname', 'email', 'phone', 'zipcode','area')
template_name = 'registration/ag_signup_form.html'
def form_valid(self, form):
existing_queries = len(User.objects.filter(email=form.cleaned_data.get('email'))) + len(AgentApplications.objects.filter(email=form.cleaned_data.get('email')))
if not existing_queries:
new_appl = AgentApplications.objects.create(
fullname = form.cleaned_data.get('fullname'),
email = form.cleaned_data.get('email'),
phone = form.cleaned_data.get('phone'),
zipcode = form.cleaned_data.get('zipcode'),
area = form.cleaned_data.get('area')
)
new_appl.save()
return render_to_response('registration/newAgentAppl.html')
return render_to_response('registration/rejectAgentAppl.html')
def password_reset(request):
form = PasswordResetForm()
if request.method=='POST':
form = PasswordResetForm(request.POST)
if form.is_valid():
to_email = form.cleaned_data.get('email')
try:
user = User.objects.get(email=to_email)
except User.DoesNotExist:
user = None
if user:
current_site = get_current_site(request)
mail_subject = 'Reset your Password.'
message = render_to_string('registration/pass_reset_email.html', {
'user': user,
'domain': current_site.domain,
'uid':urlsafe_base64_encode(force_bytes(user.pk)).decode(),
'token':account_activation_token.make_token(user),
})
email = EmailMessage(
mail_subject, message, to=[to_email]
)
email.send()
return render_to_response('registration/newPass.html')
return render(request, 'registration/password_reset.html', {'form':form})
def new_password(request, uidb64, token):
try:
uid = force_text(urlsafe_base64_decode(uidb64))
user = User.objects.get(pk=uid)
except(TypeError, ValueError, OverflowError, User.DoesNotExist):
user = None
if user is not None and account_activation_token.check_token(user, token):
user.is_active = True
user.email_verified = True
form = NewPasswordForm()
if request.method=='POST':
form = NewPasswordForm(request.POST)
if form.is_valid():
form.save(user)
login(request, user)
return redirect('/')
return render(request, 'registration/new_password.html', {'form':form})
else:
return render(request, 'registration/activation_err.html')
@login_required(login_url='forbidden')
def change_password(request):
form = NewPasswordForm()
if request.method=='POST':
form = NewPasswordForm(request.POST)
if form.is_valid():
user = request.user
form.save(user)
login(request, user)
return redirect('/')
return render(request, 'registration/new_password.html', {'form':form})
def forbidden(request):
return render(request, '500.html')
|
import requests
import base64
import json
import os
import glob
from flask import current_app
redaction_request = {
"imageData":"",
"imageType":"image/png",
"imageRedactionConfigs":[
{
"infoType":{
"name":"PHONE_NUMBER"
},
"redactionColor":{
"blue":0.93,
"green":0.93,
"red":0.93
}
},
{
"infoType":{
"name":"FIRST_NAME"
},
"redactionColor":{
"blue":0.93,
"green":0.93,
"red":0.93
}
},
{
"infoType":{
"name":"LAST_NAME"
},
"redactionColor":{
"blue":0.93,
"green":0.93,
"red":0.93
}
},
{
"infoType":{
"name":"EMAIL_ADDRESS"
},
"redactionColor":{
"blue":0.93,
"green":0.93,
"red":0.93
}
}
],
"inspectConfig":{
"excludeInfoTypes": False,
"infoTypes":[
{
"name":"PHONE_NUMBER"
},
{
"name":"FIRST_NAME"
},
{
"name":"LAST_NAME"
},
{
"name":"EMAIL_ADDRESS"
}
],
"minLikelihood": "POSSIBLE"
}
}
def convertPDFToImages(folder, filename):
print("Coverting pdf to images....")
command = "cd {}; pdf-redact-tools --explode {}".format(folder, filename)
print(command)
os.system(command)
def convertImagesToPDF(folder, filename):
print("Coverting images to pdf....")
command = "cd {}; pdf-redact-tools --merge {}".format(folder, filename)
print(command)
os.system(command)
def redactImage(filepath):
with open(filepath, "rb") as image_file:
b_encoded_string = base64.b64encode(image_file.read())
encoded_string = b_encoded_string.decode('ascii')
redaction_request["imageData"] = encoded_string
r = requests.post("https://dlp.googleapis.com/v2beta2/projects/solid-future-198322/image:redact?key={}".format(current_app.config['GOOGLE_CLOUD_KEY']), data=json.dumps(redaction_request))
response = r.json()
print(r.status_code)
redacted_string = response['redactedImage']
with open(filepath, "wb") as fh:
fh.write(base64.b64decode(redacted_string))
def redactPDF(filename):
folder = current_app.config['UPLOAD_FOLDER']
extension = '.pdf'
print("Exploding PDF to images..")
convertPDFToImages(folder, filename + extension)
images_list = glob.glob("{}{}_pages/*.png".format(folder, filename))
print("PDF exploded in {} images".format(str(len(images_list))))
for image in images_list:
print("Redacting image: " + image)
redactImage(image)
print("Combining images back to PDF..")
convertImagesToPDF(folder, filename + extension)
print("Redaction complete..") |
from django.urls import path
from app.api.user import views
urlpatterns = [
# path('create', views.Position_createAPIView.as_view(), name='api-status-create'),
path('list', views.Employee_infoListAPIView.as_view(), name='api-user-list')
] |
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.model_selection import GridSearchCV
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import StratifiedKFold
from xgboost import XGBClassifier
from sklearn.ensemble import ExtraTreesClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.svm import SVC
from sklearn.naive_bayes import GaussianNB
from sklearn.ensemble import BaggingClassifier
from collections import Counter
from sklearn.utils import safe_sqr
class ClassifierBenchmark:
def __init__(self):
# Load the data
self.data = pd.read_csv("/Normalized_Mock.txt", sep="\t")
# split the data into training and testing
def splitData(self, x):
#split data into test and training
self.train, self.test = train_test_split(self.data, test_size=0.2, random_state=x, stratify=self.data[['Cluster']])
self.X, self.y = self.train.iloc[:, :-1], self.train['Cluster']
self.featureNames = self.X.columns.tolist()
self.X = self.X.to_numpy()
self.y = self.y.values
self.X_val, self.y_val = self.test.iloc[:, :-1], self.test['Cluster']
self.X_val = self.X_val.to_numpy()
self.y_val = self.y_val.values
return self.X, self.y, self.X_val, self.y_val, self.featureNames
# Run SVM-RFE and get the 200 most important features
def svmFC(self, x, step):
self.step = step
self.X, self.y, self.X_val, self.y_val, self.featureNames = self.splitData(x = x)
self.features = self.featureNames
self.j = 0
while self.X.shape[1] > 201:
self.j += 1
self.svc = SVC(kernel='linear')
self.Cs = np.array([0.5, 1.0, 10, 100])
# get the hyperparamaters
self.clf = GridSearchCV(estimator=self.svc,
param_grid=dict(C=self.Cs),
cv=5,
return_train_score=True,
n_jobs=20)
self.clf.fit(self.X, self.y)
# do 5-fold cross validation
self.cv_test_error = []
self.skf = StratifiedKFold(n_splits=5, random_state=self.j, shuffle=True)
for trn, tst in self.skf.split(self.X, self.y):
self.train_train, self.train_test = self.X[trn], self.X[tst]
self.train_clstrs, self.test_clstrs = self.y[trn], self.y[tst]
self.val_clf = SVC(C=list(self.clf.best_params_.values())[0], kernel="linear")
self.val_clf.fit(self.train_train, self.train_clstrs)
self.cv_test_error.append(self.val_clf.score(self.train_test, self.test_clstrs))
self.mean_cv_test_error = np.array(self.cv_test_error).mean()
## train classification for RFE
self.rfe_clf = SVC(C=list(self.clf.best_params_.values())[0], kernel="linear")
self.rfe_clf.fit(self.X, self.y)
# get coeffs
self.coefs = self.rfe_clf.coef_
# get ranks
if self.coefs.ndim > 1:
self.ranks = np.argsort(safe_sqr(self.coefs).sum(axis=0))
else:
self.ranks = np.argsort(safe_sqr(self.coefs))
# remove the X least important features from the array
self.to_remove_index = []
for r in range(self.step):
self.to_remove_index.append(self.ranks[r])
self.to_remove_index.sort(reverse=True)
# remove from largest index to smallest
for f in self.to_remove_index:
self.X = np.delete(self.X, f, axis=1)
self.X_val = np.delete(self.X_val, f, axis=1)
del self.features[f]
return self.X, self.y, self.X_val, self.y_val, self.features
def svmFSOutput(self, x):
self.train_X, self.y, self.val_X, self.y_val, self.proteins_feature = self.svmFC(x = x, step=1)
return self.train_X, self.y, self.val_X, self.y_val, self.proteins_feature
# random forest
def randomForest(self, x):
self.X, self.y, self.X_val, self.y_val, self.proteins_feature = self.svmFSOutput(x = x)
self.rfc = RandomForestClassifier(random_state=42)
self.param_grid = {
'n_estimators': [250, 500, 750],
'max_features': ['auto', 'sqrt'],
'max_depth': [4, 5, 6, 7, 8],
'criterion': ['gini', 'entropy']
}
self.CV_rfcFeature = GridSearchCV(estimator=self.rfc, param_grid=self.param_grid, n_jobs=20)
self.CV_rfcFeature.fit(self.X, self.y)
#self.CV_rfcFeature.score(self.X_val, self.y_val)
return (self.CV_rfcFeature.best_score_, self.CV_rfcFeature.score(self.X_val, self.y_val))
# xgboost
def xgboost(self, x):
self.X, self.y, self.X_val, self.y_val, self.proteins_feature = self.svmFSOutput(x = x)
self.xgboostModel = XGBClassifier(objective="multi:softprob", random_state=42)
#self.kfold = StratifiedKFold(n_splits=5, random_state=7, shuffle=True)
self.n_estimators = range(50, 300, 50) # tune number of decision trees
self.max_depth = range(1, 5, 2) # size of decision trees
self.learning_rate = [0.05, 0.1, 0.2] # learning rate
self.param_grid = dict(n_estimators=self.n_estimators, learning_rate=self.learning_rate, max_depth=self.max_depth)
self.kfold = StratifiedKFold(n_splits=5, random_state=1, shuffle=True)
self.grid_search_xgboost = GridSearchCV(self.xgboostModel, self.param_grid, scoring="accuracy", cv=self.kfold, n_jobs=20)
self.grid_search_xgboost = self.grid_search_xgboost.fit(self.X, self.y)
#print(self.grid_search_xgboost.score(self.X_val, self.y_val))
return (self.grid_search_xgboost.best_score_, self.grid_search_xgboost.score(self.X_val, self.y_val))
# Extremely Randomized Trees
def extreeTree(self, x):
self.X, self.y, self.X_val, self.y_val, self.proteins_feature = self.svmFSOutput(x = x)
self.etreeclassifier = ExtraTreesClassifier(random_state=0)
self.kfold = StratifiedKFold(n_splits=5, random_state=7, shuffle=True)
self.param_grid = {
'n_estimators': [100, 250, 500, 750],
'max_features': ['auto'],
'max_depth': [4, 5, 6, 7],
'criterion': ['gini']
}
self.CV_exTeeCls = GridSearchCV(estimator=self.etreeclassifier, param_grid=self.param_grid, cv=self.kfold, n_jobs=20)
self.CV_exTeeCls.fit(self.X, self.y)
self.CV_exTeeCls.score(self.X_val, self.y_val)
return (self.CV_exTeeCls.best_score_, self.CV_exTeeCls.score(self.X_val, self.y_val))
# logistic regression with L1~ ridge regression
def l1Logistic(self, x):
self.X, self.y, self.X_val, self.y_val, self.proteins_feature = self.svmFSOutput(x = x)
self.l1Log = LogisticRegression(penalty='l1',solver='liblinear')
self.kfold = StratifiedKFold(n_splits=5, random_state=7, shuffle=True)
self.param_grid = {
"C" : np.logspace(-3,3,7)
}
self.CV_l1Log = GridSearchCV(estimator=self.l1Log, param_grid=self.param_grid, cv=self.kfold, n_jobs=20)
self.CV_l1Log.fit(self.X, self.y)
self.CV_l1Log.score(self.X_val, self.y_val)
return (self.CV_l1Log.best_score_, self.CV_l1Log.score(self.X_val, self.y_val))
# logsitic regression with L2 ~ lasso
def l2logistic(self, x):
self.X, self.y, self.X_val, self.y_val, self.proteins_feature = self.svmFSOutput(x = x)
self.l2Log = LogisticRegression(penalty='l2',solver='liblinear')
self.kfold = StratifiedKFold(n_splits=5, random_state=7, shuffle=True)
self.param_grid = {
"C": np.logspace(-3, 3, 7)
}
self.CV_l2Log = GridSearchCV(estimator=self.l2Log, param_grid=self.param_grid, cv=self.kfold, n_jobs=20)
self.CV_l2Log.fit(self.X, self.y)
self.CV_l2Log.score(self.X_val, self.y_val)
return (self.CV_l2Log.best_score_, self.CV_l2Log.score(self.X_val, self.y_val))
# SVM linear
def svmLinear(self, x ):
self.X, self.y, self.X_val, self.y_val, self.proteins_feature = self.svmFSOutput(x = x)
self.svmLin = SVC(kernel='linear')
self.kfold = StratifiedKFold(n_splits=5, random_state=7, shuffle=True)
self.param_grid = {
"C": np.logspace(-3, 3, 7)
}
self.CV_svmLin = GridSearchCV(estimator=self.svmLin, param_grid=self.param_grid, cv=self.kfold, n_jobs=20)
self.CV_svmLin.fit(self.X, self.y)
self.CV_svmLin.score(self.X_val, self.y_val)
return (self.CV_svmLin.best_score_, self.CV_svmLin.score(self.X_val, self.y_val))
# SVM linear with rbf
def svmKernel(self, x):
self.X, self.y, self.X_val, self.y_val, self.proteins_feature = self.svmFSOutput(x = x)
self.svmKer = SVC(kernel='rbf')
self.kfold = StratifiedKFold(n_splits=5, random_state=7, shuffle=True)
self.param_grid = {
"C": np.logspace(-3, 3, 7),
'gamma': np.logspace(-3, 3, 7)
}
self.CV_svmKer = GridSearchCV(estimator=self.svmKer, param_grid=self.param_grid, cv=self.kfold, n_jobs=20)
self.CV_svmKer.fit(self.X, self.y)
self.CV_svmKer.score(self.X_val, self.y_val)
return (self.CV_svmKer.best_score_, self.CV_svmKer.score(self.X_val, self.y_val))
# Gaussian naive bayes
def gaussianNB(self, x):
self.X, self.y, self.X_val, self.y_val, self.proteins_feature = self.svmFSOutput(x = x)
self.gnb = GaussianNB()
self.kfold = StratifiedKFold(n_splits=5, random_state=7, shuffle=True)
self.CV_gnb = GridSearchCV(estimator=self.gnb, param_grid={}, cv=self.kfold, n_jobs=20)
self.CV_gnb.fit(self.X, self.y)
self.CV_gnb.score(self.X_val, self.y_val)
return (self.CV_gnb.best_score_, self.CV_gnb.score(self.X_val, self.y_val))
# bagging classification with decision tree
def baggingCls(self, x):
self.X, self.y, self.X_val, self.y_val, self.proteins_feature = self.svmFSOutput(x = x)
self.bagging = BaggingClassifier(n_estimators = 10, random_state = 1)
self.kfold = StratifiedKFold(n_splits=5, random_state=7, shuffle=True)
self.param_grid = {
'max_samples' : [0.50, 0.75, 1],
'max_features' : [0.50, 0.75, 1]
}
self.CV_bagging = GridSearchCV(estimator=self.bagging, param_grid={}, cv=self.kfold, n_jobs=20)
self.CV_bagging.fit(self.X, self.y)
self.CV_bagging.score(self.X_val, self.y_val)
return (self.CV_bagging.best_score_, self.CV_bagging.score(self.X_val, self.y_val))
# bagging classification with SVM
def baggingClsSvm(self, x):
self.X, self.y, self.X_val, self.y_val, self.proteins_feature = self.svmFSOutput(x = x)
self.baggingsvm = BaggingClassifier(base_estimator=SVC(), n_estimators=10, random_state = 1)
self.kfold = StratifiedKFold(n_splits=5, random_state=7, shuffle=True)
self.param_grid = {
'max_samples' : [0.75, 1],
'max_features' : [1]
}
self.CV_baggingsvm = GridSearchCV(estimator=self.bagging, param_grid={}, cv=self.kfold, n_jobs=20)
self.CV_baggingsvm.fit(self.X, self.y)
self.CV_baggingsvm.score(self.X_val, self.y_val)
return (self.CV_baggingsvm.best_score_, self.CV_baggingsvm.score(self.X_val, self.y_val))
def applyAll(self):
# iterate the classifier 100 times
self.result_array = np.array([])
self.trainingArray = np.empty((0, 10), float)
self.testingArray = np.empty((0, 10), float)
for r in range(100) :
self.X, self.y, self.X_val, self.y_val, self.proteins_feature = self.svmFSOutput(x = r)
self.rftraing, self.rftesting = self.randomForest(x = r)
self.xgboostTraining, self.xgboostTesting = self.xgboost(x = r)
self.extreeTreeTraining, self.extreeTreeTesting = self.extreeTree(x = r)
self.l1logTraining, self.l1logTesting = self.l1Logistic(x = r)
self.l2logTraining, self.l2logTesting = self.l2logistic(x = r)
self.svmlinTraining, self.svmlinTesting = self.svmLinear(x = r)
self.svmKerTraining, self.svmKerTesting = self.svmKernel(x = r)
self.gNBTraining, self.gNBTesting = self.gaussianNB(x = r)
self.baggingTraining, self.baggingTesting = self.baggingCls(x = r)
self.baggingSVMTraining, self.baggingSVMTesting = self.baggingClsSvm(x = r)
#add them to arrays
self.trainingArray = np.vstack((self.trainingArray,
np.array([self.rftraing, self.xgboostTraining, self.extreeTreeTraining,
self.l1logTraining, self.l2logTraining, self.svmlinTraining,
self.svmKerTraining, self.gNBTraining, self.baggingTraining,
self.baggingSVMTraining])))
self.testingArray = np.vstack((self.testingArray,
np.array([self.rftesting, self.xgboostTesting, self.extreeTreeTesting,
self.l1logTesting, self.l2logTesting, self.svmlinTesting,
self.svmKerTesting, self.gNBTesting, self.baggingTesting,
self.baggingSVMTesting])))
self.result_array = np.append(self.result_array, np.array(self.proteins_feature), axis=0)
self.counts = Counter(self.result_array)
return (self.trainingArray, self.testingArray, self.result_array, self.counts)
def exportArrays(self, fileName):
# export the data
self.train, self.test, self.result_array, self.counts = self.applyAll()
np.savetxt('SVM_Training_Score.txt', self.train, delimiter = '\t')
np.savetxt('SVM_Testing_Score.txt', self.test, delimiter = '\t')
np.savetxt("SVM_result_array.txt", self.result_array, fmt='%s')
# export dictionary to txt file
self.path = fileName + "/svm_feature_frequency.txt"
with open(self.path, 'w') as f:
for key, value in self.counts.items():
self.string = "{}\t{}".format(key, value)
f.write("%s\n" % self.string)
return
cls = ClassifierBenchmark()
|
from mailer.utils.helpers import get_connections
class EmailDispacher(object):
''' Email dispacher class, taking the imported engines in the
settings file. It tryies to send email to the provider
with higher priority, if it fails continue to the next one.
'''
def __init__(self, **kwargs):
self.__services = get_connections()
self.email_data = {
"sender": kwargs.pop("sender", None),
"message": kwargs.pop("message", None),
"subject": kwargs.pop("subject", None),
"receivers": kwargs.pop("receivers", []),
"carbon_copies": kwargs.pop("carbon_copies", [])
}
def add_email_service(self, service):
''' An option to manual add new mail engine '''
self.__services.append(service)
def get_registred_services(self):
''' Return a list with registred email engines '''
return [s for s in self.__services]
def send(self):
''' Trying to send email, starting with the first included service
returns ProviderResponse object with seriliazed data from
the requests. Finish on the first success.
'''
results = []
for service in self.__services:
email = service(**self.email_data)
result = email.send()
results.append(result)
if result.is_successful():
break
return results
|
def encodeVer1(text): #for Encryption
"""
This is one of the ways that the text can be encrypted. It is very simple.
Syntax: encodeVer1(text)
"""
encodedText = list(text)
length = len(encodedText)
for i in range(0,length):
encodedText[i] = ord(encodedText[i])
for i in range(0,length):
if encodedText[i] != 32:
encodedText[i] = encodedText[i] - 8
for i in range(0,length):
encodedText[i] = str(chr(encodedText[i]))
encodedText = ''.join(encodedText)
return encodedText
def decryptVer1(text): #for Decryption
"""
This is one of the ways text can be decrypted.
Syntax: decryptVer1(encrypted_text)
"""
decodedText = list(text)
length = len(decodedText)
for i in range(0,length):
decodedText[i] = ord(decodedText[i])
if decodedText[i] != 32:
decodedText[i] = decodedText[i] + 8
decodedText[i] = str(chr(decodedText[i]))
decodedText = ''.join(decodedText)
return decodedText
|
#This program removes punctuations from the string
#define punctuation
punctuations='''!--,'''
my_str="Hello , He said !!!!! ---- and went."
no_punc= ""
for char in my_str:
if char not in punctuations:
no_punc=no_punc+char
print(no_punc) |
import random
from utils.misc import read_json_from_file
def randurl():
return random.randint(1e9,1e10)
def getrec(path, currobj):
parts = path.split("/")
key = parts[0]
if currobj["disposition"] == "dict":
for child in currobj["childsarg"]:
if child["key"] == key:
if len(parts) == 1:
return child["value"]
else:
return getrec("/".join(parts[1:]), child)
return None
def get(path):
return getrec(path, read_json_from_file("schemaconfig.json", {}))
def authenabled():
return get("global/auth/enabled") |
import pandas as pd
from constants import CONTEST_ID as cid
from constants import TEAMS_NF_FD
pool_file = "-".join(['FanDuel-NFL', cid, 'players-list.csv'])
pool = pd.read_csv('data/pool/' + pool_file)
df = pd.read_csv('data/nf_projections.csv')
# modify team names to match player pool
df['team'] = df['team'].str.upper()
df = df.replace({'team': TEAMS_NF_FD})
# get eligible teams from player pool
teams_elg = pool['Team'].unique()
# from projections, remove players not on eligible teams
df = df[df['team'].isin(teams_elg)]
df.to_csv('data/projections_eligible.csv', index=False)
|
# from requests_html import HTMLSession
# import requests
import bs4
import re
import sys
from PyQt5.QtWidgets import QApplication
from PyQt5.QtCore import QUrl
from PyQt5.QtWebEngineWidgets import QWebEnginePage
def get_phone(soup):
soup=str(soup)
phone = re.findall(r'^(\+\d{1,2}\s)?\(?\d{3}\)?[\s.-]\d{3}[\s.-]\d{4}$', soup)
phone1= re.findall(r'\d{2}-\d{8}', soup)
phone.extend(phone1)
phone2= re.findall(r'\d{3}-\d{4}-\d{3}', soup)
phone.extend(phone2)
phone3 = re.findall(r'\(?\b[2-9][0-9]{2}\)?[-. ]?[2-9][0-9]{2}[-. ]?[0-9]{4}\b', soup)
phone.extend(phone3)
if phone:
return phone
else:
print ('Phone not found')
phone = ''
return phone
def get_email(soup):
email = re.findall(r'([a-zA-Z0-9._-]+@[a-zA-Z0-9._-]+\.[a-zA-Z0-9_-]+)',soup.text)
if email:
return email
else:
print ('Email not found')
email = ''
return email
class Client(QWebEnginePage):
def __init__(self,url):
self.app = QApplication(sys.argv)
QWebEnginePage.__init__(self)
self.html=""
self.loadFinished.connect(self.on_page_load)
self.load(QUrl(url))
self.app.exec_()
def on_page_load(self):
self.html=self.toHtml(self.Callable)
# print("In on_page_load \n \t HTML: ",self.html)
def Callable(self,html_str):
# print("In Callable \n \t HTML_STR: ",len(html_str))
self.html=html_str
# print("In Callable \n \t HTML_STR: ",len(self.html))
self.app.quit()
print("Enter URL")
url = str(input())
#"https://randommer.io/Phone""https://ccctechcenter.org/about/accessibility"
response= Client(url)
# response=requests.get(url)
soup = bs4.BeautifulSoup(response.html, 'html.parser')
# print(soup.prettify() )
# session= HTMLSession() #parsing html fter running javascript can be done using requests_html
# r=session.get(url)
# r.html.render()
# soup=r.html.html
# print(soup)
# print(type(soup))
email = get_email(soup)
phone = get_phone(soup)
print("Phone: ",phone)
print("Email: ",email) |
import tkinter as tk
import sys
import multiprocessing
class MyApp():
def __init__(self):
self.root = None
#线程管理器
self.processinges = []
#最优化算法
self.CoordinateAlternation = None
self.DFP = None
self.BFGS = None
self.GradientDescent = None
self.NewtonMethod = None
self.GoldSegment = None
self.InsertValue = None
self.str_fun = None
self.str_epsilon = None
self.result = None
self.result_out = None
def ok(self):
#得到输入的函数
if self.v.get() == 1:
from optpy import CoordinateAlternation as CA
self.CoordinateAlternation = CA.CoordinateAlternation(True,self.str_fun.get(),self.str_epsilon.get())
if self.v.get() == 2:
from optpy import DFP
self.DFP = DFP.DFP(True,self.str_fun.get(),self.str_epsilon.get())
if self.v.get() == 3:
from optpy import BFGS
self.BFGS = BFGS.BFGS(True,self.str_fun.get(),self.str_epsilon.get())
if self.v.get() == 4:
from optpy import GradientDescent as GD
self.GradientDescent = GD.GradientDescent(True,self.str_fun.get(),self.str_epsilon.get(),self.result_out)
if self.v.get() == 5:
from optpy import NewtonMethod as NM
self.NewtonMethod = NM.NewtonMethod(True,self.str_fun.get(),self.str_epsilon.get(),self.str_segment.get())
if self.v.get() == 6:
from optpy import GoldSegment as GS
self.GoldSegment = GS.GoldSegment(True,self.str_fun.get(),self.str_epsilon.get(),self.str_segment.get())
if self.v.get() == 7:
from optpy import InsertValue as IV
self.InsertValue = IV.InsertValue(True,self.str_fun.get(),self.str_epsilon.get(),self.str_segment.get())
if self.v.get() == 8:
from optpy import Evolution2 as EV
self.Evolution = EV.Evolution(True,self.str_fun.get(),self.str_epsilon.get(),self.N.get(),self.pc.get(),self.pm.get(),self.evo_x.get(),self.maxIteration.get())
def cancel(self):
sys.exit()
def Calculate(self):
if self.v.get() == 1:
if self.CoordinateAlternation == None:
raise('此方法未初始化')
#self.result = self.CoordinateAlternation.Calculate()
#self.result_out.set(self.result[2])
a = multiprocessing.Process(target=self.CoordinateAlternation.Calculate, name='1')
a.daemon = True
a.start()
print('进程:' + str(a.pid) + '开始运行!')
self.processinges.append(a)
if self.v.get() == 2:
if self.DFP == None:
raise('此方法未初始化')
#self.result = self.DFP.Calculate()
#self.result_out.set(self.result[2])
a = multiprocessing.Process(target=self.DFP.Calculate, name='2')
a.daemon = True
a.start()
print('进程:' + str(a.pid) + '开始运行!')
self.processinges.append(a)
if self.v.get() == 3:
if self.BFGS == None:
raise('此方法未初始化')
#self.result = self.DFP.Calculate()
#self.result_out.set(self.result[2])
a = multiprocessing.Process(target=self.BFGS.Calculate, name='3')
a.daemon = True
a.start()
print('进程:' + str(a.pid) + '开始运行!')
self.processinges.append(a)
if self.v.get() == 4:
if self.GradientDescent == None:
raise('此方法未初始化')
a = multiprocessing.Process(target=self.GradientDescent.Calculate,name='4')
a.daemon = True
a.start()
print('进程:' + str(a.pid) + '开始运行!')
self.processinges.append(a)
#self.result_out.set(self.result[2])
if self.v.get() == 5:
if self.NewtonMethod == None:
raise('此方法未初始化')
#self.result = self.NewtonMethod.Calculate()
#self.result_out.set(self.result[2])
a = multiprocessing.Process(target=self.NewtonMethod.Calculate, name='5')
a.daemon = True
a.start()
print('进程:' + str(a.pid) + '开始运行!')
self.processinges.append(a)
if self.v.get() == 6:
if self.GoldSegment == None:
raise('此方法未初始化')
#self.result = self.GoldSegment.Calculate()
#self.result_out.set(self.result[2])
a = multiprocessing.Process(target=self.GoldSegment.Calculate, name='6')
a.daemon = True
a.start()
print('进程:' + str(a.pid) + '开始运行!')
self.processinges.append(a)
if self.v.get() == 7:
if self.InsertValue == None:
raise('此方法未初始化')
#self.result = self.InsertValue.Calculate()
#self.result_out.set(self.result[2])
a = multiprocessing.Process(target=self.InsertValue.Calculate, name='7')
a.daemon = True
a.start()
print('进程:' + str(a.pid) + '开始运行!')
self.processinges.append(a)
if self.v.get() == 8:
if self.Evolution == None:
raise('此方法未初始化')
#self.result = self.Evolution.Calculate()
#self.result_out.set(self.result[2])
a = multiprocessing.Process(target=self.Evolution.Calculate, name='8')
a.daemon = True
a.start()
print('进程:' + str(a.pid) + '开始运行!')
self.processinges.append(a)
def run(self):
self.root = tk.Tk()
self.root.title('最优化算法选择器')
#使用说明
self.group1 = tk.LabelFrame(self.root, text='使用说明:', padx=5, pady=5)
self.group1.grid(row=0, column=0, padx=10, pady=10)
self.introduction = tk.Label(self.group1,text='1.请先选择优化算法,再点击初始化。\n2.输入优化参数后点击初始化。\n3.初始化完成后点击计算。\n4.支持1维,2维函数画图。')
self.introduction.grid(row=0)
# 最优化方法选择
self.group2 = tk.LabelFrame(self.root, text='1.请选择您的优化方法:', padx=5, pady=5)
self.group2.grid(row=0, column=1, padx=10, pady=10)
LANGS = [('CoordinateAlternation(多变量高次优化)', 1), ('DFP(多变量高次优化)', 2), ('BFGS(多变量高次优化)', 3) ,('GradientDescent(多变量高次优化)', 4), ('NewtonMethod(多变量高次优化)', 5),
('GoldSegment(单变量高次优化)', 6),('InsertValue(单变量高次优化)', 7),('Evolution进化算法(多变量高次优化)',8)]
self.v = tk.IntVar()
self.v.set(1)
for lang, num in LANGS:
b = tk.Radiobutton(self.group2, text=lang, variable=self.v, value=num)
b.grid(row=num, sticky=tk.W)
#进化算法参数初始化
self.N = tk.StringVar()
self.pc = tk.StringVar()
self.pm = tk.StringVar()
self.evo_x = tk.StringVar()
self.maxIteration = tk.StringVar()
self.group6 = tk.LabelFrame(self.root, text='2.进化算法(单变量高维优化)(可选)',padx=5, pady=5)
self.group6.grid(row=1,column=0,padx=10,pady=10)
tk.Label(self.group6, text='群体个数:').grid(row=0, column=0, padx=5, pady=5)
tk.Entry(self.group6, textvariable=self.N).grid(row=0, column=1, padx=5, pady=5)
tk.Label(self.group6, text='交叉概率:').grid(row=1, column=0, padx=5, pady=5)
tk.Entry(self.group6, textvariable=self.pc).grid(row=1, column=1, padx=5, pady=5)
tk.Label(self.group6, text='变异概率:').grid(row=2, column=0, padx=5, pady=5)
tk.Entry(self.group6, textvariable=self.pm).grid(row=2, column=1, padx=5, pady=5)
tk.Label(self.group6, text='搜索区域(用逗号隔开):').grid(row=3, column=0, padx=5, pady=5)
tk.Entry(self.group6, textvariable=self.evo_x).grid(row=3, column=1, padx=5, pady=5)
tk.Label(self.group6, text='最大迭代次数:').grid(row=4, column=0, padx=5, pady=5)
tk.Entry(self.group6, textvariable=self.maxIteration).grid(row=4, column=1, padx=5, pady=5)
#参数输入
self.str_fun = tk.StringVar()
self.str_epsilon = tk.StringVar()
self.str_segment = tk.StringVar()
self.group3 = tk.LabelFrame(self.root, text='2.参数输入(必选):', padx=5, pady=5)
self.group3.grid(row=1, column=1, padx=10, pady=10)
tk.Label(self.group3,text='目标函数输入:').grid(row=0,column=0,padx=10,pady=10)
tk.Entry(self.group3,textvariable = self.str_fun).grid(row=0,column=1,padx=10,pady=10)
tk.Label(self.group3, text='最大误差输入:').grid(row=1, column=0, padx=10, pady=10)
tk.Entry(self.group3, textvariable=self.str_epsilon).grid(row=1, column=1, padx=10, pady=10)
tk.Label(self.group3, text='单变量搜索区间(可选,用逗号隔开):').grid(row=2, column=0, padx=10, pady=10)
tk.Entry(self.group3, textvariable=self.str_segment).grid(row=2, column=1, padx=10, pady=10)
#输出结果
#self.result_out = tk.StringVar()
#self.group5 = tk.LabelFrame(self.root, text='结果输出:', padx=5, pady=5)
#self.group5.grid(row=2, column=0, padx=10, pady=10)
#tk.Label(self.group5,text='计算结果:\n',textvariable=self.result_out).grid(row=0,padx=100,pady=10)
# 按钮
self.group4 = tk.LabelFrame(self.root, text='3.处理按钮:', padx=5, pady=5)
self.group4.grid(row=2, padx=10, pady=10)
tk.Button(self.group4, text='初始化', width=10, command=self.ok).grid(row=0, column=0, sticky=tk.E, padx=10, pady=5)
tk.Button(self.group4, text='退出', width=10, command=self.cancel).grid(row=0, column=4, sticky=tk.E, padx=10,
pady=5)
tk.Button(self.group4, text='计算', width=10, command=self.Calculate).grid(row=0, column=1, sticky=tk.E, padx=10,
pady=5)
tk.Button(self.group4, text='停止', width=10, command=self.ProcessExit).grid(row=0, column=2, sticky=tk.E, padx=10,
pady=5)
tk.Button(self.group4, text='画图', width=10, command=self.Paint).grid(row=0, column=3, sticky=tk.E, padx=10,
pady=5)
self.root.mainloop()
def Paint(self):
if self.str_fun == None:
raise('未输入函数。')
import sympy as sy
import matplotlib.pylab as plt
import numpy as np
x_val = np.linspace(-5,5,30)
f_val = np.linspace(-5,5,30)
f = sy.simplify(self.str_fun.get())
x = list(f.free_symbols)
if x.__len__() == 1:
for i in range(x_val.__len__()):
f_val[i] = f.subs({x[0]:x_val[i]})
plt.plot(x_val,f_val)
plt.show()
elif x.__len__() == 2:
from mpl_toolkits.mplot3d import Axes3D
ax = Axes3D(plt.figure(1))
y_val = np.linspace(-5,5,30)
x_val,y_val = np.meshgrid(x_val,y_val)
z = np.zeros((30,30))
for r in range(30):
for c in range(30):
f_temp = f.subs({x[0]:x_val[r,c]})
f_temp = f_temp.subs({x[1]: y_val[r,c]})
z[r,c] = f_temp
ax.plot_surface(x_val,y_val,z,rstride=1, cstride=1, cmap='Blues')
plt.show()
else:
raise('不合法的变量维数')
def ProcessExit(self):
for i in self.processinges:
print('进程:' + str(i.pid) + '强制终止!')
i.terminate()
self.processinges.clear()
|
# Generated by Django 2.1 on 2018-08-18 14:44
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='location',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('location_id', models.IntegerField()),
('location_name', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='progress',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('student_id', models.IntegerField()),
('date', models.DateField()),
('type_classwork', models.BooleanField()),
('type_homework', models.BooleanField()),
('comments', models.CharField(max_length=200)),
('hwd', models.IntegerField()),
],
),
migrations.CreateModel(
name='students',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('student_id', models.IntegerField()),
('student_name', models.CharField(max_length=200)),
('location_id', models.IntegerField()),
('subject_id', models.IntegerField()),
],
),
migrations.CreateModel(
name='subject',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('subject_id', models.IntegerField()),
('subject_name', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='teachers',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('teacher_id', models.IntegerField()),
('teacher_name', models.CharField(max_length=200)),
('location_id', models.IntegerField()),
('subject_id', models.IntegerField()),
],
),
]
|
# -*- coding: utf-8 -*-
# @Author: WuLC
# @Date: 2016-10-03 20:02:50
# @Last modified by: WuLC
# @Last Modified time: 2016-10-03 20:03:09
# @Email: liangchaowu5@gmail.com
class Solution(object):
def longestPalindrome(self, s):
"""
:type s: str
:rtype: int
"""
count = {}
for char in s:
count.setdefault(char, 0)
count[char] += 1
result, odd = 0, False
for length in count.values():
if length % 2 == 0:
result += length
else:
result += (length - 1)
odd = True
return result+1 if odd else result
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.views import View
from django.shortcuts import render
from .models import Tweet
from .forms import TweetForm
class MainView(View):
def get(self, request):
return render(request, 'twitter/index.html')
class TweetView(View):
def get(self, request):
x = Tweet.objects.all()
return render(request, 'twitter/tweets.html', {'tweets': x})
class LoggedOutView(View):
def get(self, request):
return render(request, 'registration/logged_out.html')
def get_name(request):
# if this is a POST request we need to process the form data
if request.method == 'POST':
# create a form instance and populate it with data from the request:
form = TweetForm(request.POST)
# check whether it's valid:
if form.is_valid():
dane = form.cleaned_data['your_tweet']
forms = Tweet.objects.save(content=dane)
return render(request, 'twitter/tweets.html', {'form': forms})
# if a GET (or any other method) we'll create a blank form
else:
form = TweetForm()
return render(request, 'twitter/tweets.html', {'form': form}) |
#!/usr/bin/env python
PACKAGE = 'amr_braitenberg'
NODE = 'differential_drive_emulator'
import rospy
from geometry_msgs.msg import Twist
from amr_msgs.msg import WheelSpeeds
class DifferentialDriveEmulatorNode:
def __init__(self):
rospy.init_node(NODE)
# read differential drive params:
self._wheel_diameter = rospy.get_param('wheel_diameter', 0.15)
self._distance_between_wheels = rospy.get_param('distance_between_wheels', 0.5)
self._wheel_speed_subscriber = rospy.Subscriber('/cmd_vel_diff',
WheelSpeeds,
self._wheel_speed_callback,
queue_size=100)
self._velocity_publisher = rospy.Publisher('/cmd_vel', Twist, queue_size=10)
rospy.loginfo('Started differential drive emulator node.')
def _wheel_speed_callback(self, msg):
if len(msg.speeds) != 2:
rospy.logwarn('Ignoring WheelSpeeds message because it does not '
'contain two wheel speeds.')
return
else:
# Call constructor initialize values to 0.0
twist = Twist()
"""
==================== YOUR CODE HERE ====================
Instructions: compute linear and angular components based
on robot geometrical parameters and
fill in the twist message.
You may lookup the geometry_msgs.msg declaration at ros.org
========================================================
"""
v_left = msg.speeds[1]
v_right = msg.speeds[0]
# Linear speed in x direction: (v_left + v_right) / 2
twist.linear.x = (v_right + v_left) * self._wheel_diameter / 2
# Angular speed (v_right - v_left) / D
twist.angular.z = (v_right - v_left) / self._distance_between_wheels
self._velocity_publisher.publish(twist)
rospy.logdebug('[{:.2f} {:.2f}] --> [{:.2f} {:.2f}]'.format(msg.speeds[0],
msg.speeds[1],
twist.linear.x,
twist.angular.z))
if __name__ == '__main__':
n = DifferentialDriveEmulatorNode()
rospy.spin()
|
import collections
import game_field
import consts
import unit
import random
Map = collections.namedtuple('Map', 'row, col, name')
Point = collections.namedtuple('Point', 'row, col')
class Game:
def __init__(self):
self.field = self.get_field()
self.portal = consts.PORTAL
self.castle = consts.CASTLE
self.end = False
self.gold = consts.START_GOLD
self.enemies_left = consts.ENEMIES_ALLOWED
self.enemies = []
self.allies = []
self.debuffed_enemies = {}
self.paths = game_field.Path(self.portal, self.castle, self.field).paths
@staticmethod
def get_field():
string_field = random.choice(consts.LEVELS)
width = len(string_field)
height = len(string_field[0])
field = [[Map(0, 0, game_field.CellType.Grass)] * height for f in range(width)]
types = {'G': game_field.CellType.Grass,
' ': game_field.CellType.Path,
'P': game_field.CellType.Portal,
'C': game_field.CellType.Castle}
for row, str_field in enumerate(string_field):
for col, element in enumerate(str_field):
field[row][col] = Map(row, col, types[element])
return field
def place_unit(self, row, col, unit_type):
unit_to_place = unit_type(row, col, self)
if unit_to_place.unit_type == unit.UnitType.Tower:
price = consts.UNITS['towers'][unit_type.__name__].price
if self.gold < price:
return True
self.gold -= price
if unit_to_place.unit_type == unit.UnitType.Tower:
self.field[row][col] = unit_to_place
else:
self.enemies.append(unit_to_place)
return unit_to_place
def place_ally(self, tower, ally, unit_type):
unit_to_place = unit_type(ally.row, ally.col, self)
self.allies.append(unit_to_place)
tower.creeps.append(unit_to_place)
return unit_to_place
def get_gold_number(self):
return self.gold
def get_remaining_enemies(self):
return self.enemies_left
def game_over(self):
self.end = True
def cast_fire(self):
if self.gold < consts.FIRE_PRICE or len(self.enemies) == 0:
return False
self.gold -= consts.FIRE_PRICE
target = random.choice(self.enemies)
for enemy in self.enemies:
distance = game_field.get_distance(target, enemy)
if distance > consts.FIRE_RADIUS:
continue
enemy.take_hit(consts.FIRE_DAMAGE, self)
return target
def cast_ice(self):
if self.gold < consts.ICE_PRICE or len(self.enemies) == 0:
return False
self.gold -= consts.ICE_PRICE
target = random.choice(self.enemies)
for enemy in self.enemies:
distance = game_field.get_distance(target, enemy)
if (distance > consts.ICE_RADIUS
or enemy.speed <= consts.ICE_SLOWDOWN):
continue
self.debuffed_enemies[enemy] = enemy.speed
enemy.speed -= consts.ICE_SLOWDOWN
return target
def disable_ice_debuff(self):
for enemy in self.debuffed_enemies:
enemy.speed = self.debuffed_enemies[enemy]
self.debuffed_enemies = {}
|
"""Controller Local/Global Switch - MAYA 2015
#====================================================
#
# DEVELOPMENT HISTORY
#
#
# TO DO
# - In the long run setup little "L" and "G" shapes to attach to the local and Global Control. But for this, probably just use different colours and names, but the same Curve Shape
#
#
# RECENT FEATURES
# - Check now included so it does not build extra plusMinuAverage nodes for the switch. It uses the one already connected to the attribute if it exists!
#
#====================================================
# GUIDE
# -
#
#
# IMPORTANT NOTES
# -
#
###########################################################################################################################################################
"""
__author__ = "3DFramework"
__version__ = "1.0"
from PySide import QtCore, QtGui
import maya.cmds as cmds
from maya.app.general.mayaMixin import MayaQWidgetDockableMixin
def ctrlSelectionFilter():
validSelection = False
mySel = cmds.ls(sl=True)
myJnts = cmds.ls(mySel, type = "joint") #Filter through the selection in order make sure all things are joints.
if len(myJnts) == 0 and len(mySel) > 0: #This means we have at least 1 none joint selected
# print "Horray we have a good selection"
validSelection = True
else: #We have 2 joints so we can continue - Now work down from parent to find the child
cmds.warning('Incorrect Initial Selection - Please select just your controls (no joints)')#If we do not have 2 joints selected then exit
return validSelection
def checkRepeatSelection(selList, checkSwitchJointList, checkMessage, isJoint=True):
"""Function to check if a selection is repeated between too lists. This deal with joints which we do not need to this tool"""
validSelection = True
checkList = checkSwitchJointList
if isJoint: checkList = [sJ.getName() for sJ in checkSwitchJointList] #Colect Names since we have switch Joints, we need names to comapre to the selection
intersectionSet = set(selList).intersection(set(checkList)) #Form a set from the intersection of these lists, if Items are in the set, then then they are repeated!
if len(intersectionSet) > 0:
validSelection = False
cmds.warning('Incorrect Initial Selection - You have selected some items that match those in the ' + checkMessage + " List")#If we do not have 2 joints selected then exit
return validSelection
def nameSwitchRebuild(name,typeToReplace, replaceWith, nameEnd = "ctrl"):
nameBits = name.split("_")
print "nameBits :", nameBits
newName = ""
for i,bit in enumerate(nameBits):
if bit == typeToReplace:
newName = newName + replaceWith + "_"
else:
newName = newName + bit + "_"
newName = newName + nameEnd
return newName
def stripConstraintsFromGroup(ctrlGrp):
"""Function to look at the children of the group and delete any constraints that are found in there"""
constraintList = ["parentConstraint", "pointConstraint", "orientConstraint", "aimConstraint","scaleConstraint" ]
children = cmds.listRelatives(ctrlGrp, children = True)
for node in children:
nType = cmds.nodeType(node)
if len(nType.split("Constraint")) > 1: #If the node type string splits around the word "Constraint" then we have a constraint, so delete it.
cmds.delete(node)
def ctrlRename(ctrlName, nameAddition):
nameSplit = ctrlName.split("Ctrl")
newName = ctrlName
if len(nameSplit) > 1:
newName = nameSplit[0] + nameAddition
return newName
def shapeRecolour(obj, colourIndex):
"""Function to loop through all shapes on a node and change: 2-dark grey, 13-red, 17-yellow, 18-cyan, 20-cream"""
objShapes = cmds.listRelatives(obj, children = True, shapes =True)
#Now loop through shape nodes and change their colour
for shape in objShapes:
cmds.setAttr(shape + '.overrideEnabled', 1)
cmds.setAttr(shape + '.overrideColor', colourIndex) # color Index to represent colour
def checkForPlusMinusAverage(switchNode, switchAtt):
"""A function to loop through the output connections and check for a plusMinusAverage Node. Currently this script will return only the first found PMA if more than one is connected"""
switchPMA = None
switchCon = cmds.listConnections(switchNode + "." + switchAtt, destination=True)
switchPMAList = []
if switchCon != None: # listConnections returns "None" if there are no connection so we have to play it safe and check
for con in switchCon:
if cmds.nodeType(con) == "plusMinusAverage":
if len(con.split("pma")) > 1: #If this name splits then it is probably one of our generated plusMinusAverage Nodes
if len(con.split("switch")) > 1: # If this name splits too then it is almost definitely one of our generated plusMinusAverage Nodes
switchPMAList.append(con)
if len(switchPMAList) >= 1:
switchPMA = switchPMAList[0]
if len(switchPMAList) > 1 :
cmds.warning("More than one \"plusMinusAverage\" has been found, connecting to the first one found in the destination connection list")
return switchPMA
#====================================================
# Create Global Local Switch Setup for the Control - UI
#====================================================
class TDFR_ControllerGlobalLocalSwitch_Ui(MayaQWidgetDockableMixin, QtGui.QDialog):
"""Class to block out all the main functionality of the IKSetup UI"""
def __init__(self):
super(TDFR_ControllerGlobalLocalSwitch_Ui, self).__init__()
self.ctrlList = []
self.switchCtrl = None
self.switchCtrlAtts = []
self.switchCtrlAtt = None
self.pmaSwitchNode = None
self.ctrlsTw = QtGui.QTreeWidget()
self.ctrlsTw.setToolTip("Please select the controller that you wish to create a local/Global Switch for and then click the button below.\nThis tool will only run on one control at a time")
self.ctrlsTw.setHeaderLabel("")
self.ctrlsLbl = QtGui.QLabel("Specify Controller to add switch to")
self.ctrlsBtn = QtGui.QPushButton("Add Selected Control", self)
self.ctrlsBtn.clicked.connect(self.ctrlBtnPress)
self.ctrlsClearBtn = QtGui.QPushButton("Clear", self)
self.ctrlsClearBtn.setMaximumWidth(40)
self.ctrlsClearBtn.clicked.connect(self.clearCtrls)
self.switchCtrlTw = QtGui.QTreeWidget()
self.switchCtrlTw.setToolTip("Please select the switch Control that will control the switch system, and then highlight the attribute that you want to use to control the switch")
self.switchCtrlTw.setHeaderLabel("")
self.switchCtrlLbl = QtGui.QLabel("Please choose the switch Control Switch Attribute")
self.switchCtrlBtn = QtGui.QPushButton("Add Selected switch Control", self)
self.switchCtrlBtn.clicked.connect(self.switchCtrlBtnPress)
self.switchCtrlClearBtn = QtGui.QPushButton("Clear", self)
self.switchCtrlClearBtn.setMaximumWidth(40)
self.switchCtrlClearBtn.clicked.connect(self.clearSwitchCtrl)
masterSwitchFrame = QtGui.QFrame(self) # Create frame and Layout to hold all the User List View
masterSwitchFrame.setFrameShape(QtGui.QFrame.StyledPanel)
uiTopHLayout = QtGui.QHBoxLayout()
ctrlLayout = QtGui.QVBoxLayout()
ctrlButtonLayout = QtGui.QHBoxLayout()
ctrlButtonLayout.addWidget(self.ctrlsBtn)
ctrlButtonLayout.addWidget(self.ctrlsClearBtn)
ctrlLayout.addWidget(self.ctrlsLbl)
ctrlLayout.addWidget(self.ctrlsTw)
ctrlLayout.addLayout(ctrlButtonLayout)
switchLayout = QtGui.QVBoxLayout()
switchButtonLayout = QtGui.QHBoxLayout()
switchButtonLayout.addWidget(self.switchCtrlBtn)
switchButtonLayout.addWidget(self.switchCtrlClearBtn)
switchLayout.addWidget(self.switchCtrlLbl)
switchLayout.addWidget(self.switchCtrlTw)
switchLayout.addLayout(switchButtonLayout)
uiTopHLayout.addLayout(ctrlLayout)
uiTopHLayout.addLayout(switchLayout)
uiTotalLayout = QtGui.QVBoxLayout(masterSwitchFrame)
uiTotalLayout.addLayout(uiTopHLayout)
self.executeSwitchBtn = QtGui.QPushButton('Build Global/Local Switch for the Control/s')
self.executeSwitchBtn.clicked.connect(self.switchBuild)
self.executeSwitchBtn.setToolTip("Please hit the big green button when all the rest of the options are filled out, and it will hopefully build you a Global/Local switch system\nfor each of the controls in the control list.")
self.executeSwitchBtn.setMinimumHeight(80)
self.executeSwitchBtn.setMinimumWidth(470)
self.executeSwitchBtn.setStyleSheet("background-color: green")
uiTotalLayout.addWidget(self.executeSwitchBtn)
self.setGeometry(300, 300, 550, 360)
self.setWindowTitle('Control Global to Local Switch')
self.show()
def ctrlBtnPress(self):
"""Function to filter through the selection, make sure that we do not have any joints and add the remaining items to the Ctrls List"""
self.clearCtrls()
mySel = cmds.ls(sl=True)
if ctrlSelectionFilter() and self.checkAllSelections(mySel):
mySel = cmds.ls(sl=True)
if len(mySel) == 1:
for ctrl in mySel:
treeItem = QtGui.QTreeWidgetItem(ctrl)
treeItem.setText(0, ctrl)
treeItem.setFlags(QtCore.Qt.ItemIsEnabled) #Set the Item so it cannot be selected
self.ctrlsTw.addTopLevelItem(treeItem)
self.ctrlList.append(ctrl)
else:
cmds.warning("Incorrect Controller Selection, please only select a single controller")
def switchCtrlBtnPress(self):
"""Function to filter through the selection, make sure that we do not have any joints and add the remaining items to the Ctrls List"""
self.clearSwitchCtrl()
mySel = cmds.ls(sl=True)
if ctrlSelectionFilter() and self.checkAllSelections(mySel):
if len(mySel) == 1:
self.switchCtrl = mySel[0]
masterAtts = cmds.listAttr(mySel[0], s=True, r=True, w=True, c=True, userDefined=True) #This will give a list of readable, keyable, scalar, user defined attributes
for att in masterAtts:
treeItem = QtGui.QTreeWidgetItem(att)
treeItem.setText(0, att)
self.switchCtrlTw.addTopLevelItem(treeItem)
self.switchCtrlAtts.append(att)
else: cmds.warning("Please select a single node that has user defined attributes added, to act as the Master Control")
def checkAllSelections(self,mySel):
validSelection = True
if not checkRepeatSelection(mySel, self.ctrlList, "Controls", isJoint=False) : validSelection = False
if not checkRepeatSelection(mySel, [self.switchCtrl], "Switch Control", isJoint=False) : validSelection = False
return validSelection
def clearCtrls(self):
self.ctrlsTw.clear()
self.ctrlList = []
def clearSwitchCtrl(self):
self.switchCtrlTw.clear()
self.switchCtrl = None
self.switchCtrlAtts = []
self.switchCtrlAtt = None
def checkSwitchCtrlAtt(self):
self.switchCtrlAtt = None
if self.switchCtrlTw.currentItem():
self.switchCtrlAtt = self.switchCtrlTw.currentItem().text(0)
def switchBuild(self):
"""Method to loop through controls in the self.ctrlList, duplicate them and their groups and then create a Global/Local Switch"""
cmds.undoInfo(openChunk=True)
self.checkSwitchCtrlAtt()
#Setup plusMinusAverage Reversing Node
pMAName = nameSwitchRebuild(self.switchCtrl, "cv", "pma", nameEnd = self.switchCtrlAtt + "Switch")
#Check to see if a plusMinusAverage Node already exists
testForSwitch = checkForPlusMinusAverage(self.switchCtrl, self.switchCtrlAtt)
# print "MC",self.switchCtrl,
# print "MCA",self.switchCtrlAtt
# print "testForSwitch",testForSwitch
if testForSwitch != None:
self.pmaSwitchNode = testForSwitch
cmds.warning("Switch PlusMinusAverage Node has been found for this attribute - Connecting to this node.")
else:
#PlusMinusAverage node only needs to be created and connected if it does not already exist!
myRigPm = cmds.shadingNode('plusMinusAverage', asUtility=True, name= pMAName)
self.pmaSwitchNode = myRigPm
cmds.setAttr(self.pmaSwitchNode + ".operation", 2)
cmds.setAttr(self.pmaSwitchNode + ".input1D[0]", 1)
# print "MasterStuff :",self.switchCtrl,self.switchCtrlAtt
cmds.connectAttr(self.switchCtrl + "." + self.switchCtrlAtt, self.pmaSwitchNode + ".input1D[1]")
#Now we need to duplicate the control twice. Rename the orginal to a "Ghost Control" and label up the others as local and global Controls, first of all find the Control Group
currentCtrlGroup = cmds.listRelatives(self.ctrlList[0], parent = True)
newGlobalCtrlPack = (cmds.duplicate(currentCtrlGroup, renameChildren=True))
stripConstraintsFromGroup(newGlobalCtrlPack) #Delete out any constraint Nodes in there
newGlobalGrpName = ctrlRename(newGlobalCtrlPack[0], "GlobalCtrl")
newGlobalCtrlName = ctrlRename(newGlobalCtrlPack[1], "GlobalCtrl")
newGlobalGrpName = cmds.rename(newGlobalCtrlPack[0], newGlobalGrpName)
newGlobalCtrlName =cmds.rename(newGlobalCtrlPack[1], newGlobalCtrlName)
shapeRecolour(newGlobalCtrlName, 13) # Colour Global Control Red
newLocalCtrlPack = cmds.duplicate(currentCtrlGroup, renameChildren=True)
stripConstraintsFromGroup(newLocalCtrlPack) #Delete out any constraint Nodes in there
newLocalGrpName = ctrlRename(newLocalCtrlPack[0], "LocalCtrl")
newLocalCtrlName = ctrlRename(newLocalCtrlPack[1], "LocalCtrl")
newLocalGrpName = cmds.rename(newLocalCtrlPack[0], newLocalGrpName)
newLocalCtrlName = cmds.rename(newLocalCtrlPack[1], newLocalCtrlName)
shapeRecolour(newLocalCtrlName, 17) # Colour Local Control Yellow
#Rename the original Controls to be Ghosts
currentGroupGhostName = ctrlRename(currentCtrlGroup[0], "Ghost")
currentCtrlGhostName = ctrlRename(self.ctrlList[0], "Ghost")
currentGroupGhostName = cmds.rename(currentCtrlGroup, currentGroupGhostName) #reassigning variable to insure unique Name
currentCtrlGhostName =cmds.rename(self.ctrlList[0], currentCtrlGhostName)
shapeRecolour(currentCtrlGhostName, 2) #2 is dark grey
#Now connect up the visibility of the new controls
cmds.connectAttr(self.pmaSwitchNode + ".output1D", newLocalCtrlName + ".visibility")
cmds.connectAttr(self.switchCtrl + "." + self.switchCtrlAtt, newGlobalCtrlName + ".visibility")
#Now we need to add the parent constraint, and sort out all aspects like naming etc.
parConstName = nameSwitchRebuild(self.switchCtrl, "cv", "parC", nameEnd = self.switchCtrlAtt + "Switch")
parConst = (cmds.parentConstraint(newLocalCtrlName, newGlobalCtrlName, currentCtrlGhostName, name = parConstName))[0]
# print "parConst", parConst
weightAtts = cmds.parentConstraint(parConst, q=1, weightAliasList = True)
# print "weightAtts", weightAtts
for i, att in enumerate(weightAtts):
if i == 0: #Set first Local Weight
print "moo"
cmds.renameAttr(parConst + "." + att , "localWeight")
elif i == 1: #Set Second Global Weight
print "bob"
cmds.renameAttr(parConst + "." + att , "globalWeight")
#Now we make the correct connections to control the weights
cmds.connectAttr(self.switchCtrl + "." + self.switchCtrlAtt, parConst + ".globalWeight")
cmds.connectAttr(self.pmaSwitchNode + ".output1D", parConst + ".localWeight")
if __name__ == "__main__":
TDFR_ControllerGlobalLocalSwitch_Ui()
|
#!flask/bin/python
from flask import Flask, request
from flask_prometheus import monitor
from opentracing.ext import tags
from opentracing.propagation import Format
from jaeger_client import Config
import random
import logging
import sys
app = Flask(__name__)
def init_tracer(service):
logging.getLogger('').handlers = []
logging.basicConfig(format='%(message)s', level=logging.DEBUG)
config = Config(
config={
'sampler': {
'type': 'const',
'param': 1,
},
'logging': True,
},
service_name=service,
)
return config.initialize_tracer()
tracer = init_tracer('greeting-service')
f = open('names.txt', 'r')
names = f.readlines()
f.close()
@app.route('/')
def index():
span_ctx = tracer.extract(Format.HTTP_HEADERS, request.headers)
span_tags = {tags.SPAN_KIND: tags.SPAN_KIND_RPC_SERVER}
with tracer.start_span('get-name', child_of=span_ctx, tags=span_tags):
name = random.choice(names).strip()
app.logger.debug('NAME: ' + name)
return name
if __name__ == '__main__':
monitor(app, port=8000)
app.run(host='0.0.0.0', port=8080)
|
#!/usr/bin/python
#coding:utf-8
from django.shortcuts import render
from django.http import HttpResponse,HttpResponseRedirect,HttpResponseNotFound
import datetime
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from nmaptoolbackground.control import usercontrol,jobcontrol,ipcontrol,portcontrol,taskcontrol
from django.views import generic
from spidertool import webtool
import json
def indexpage(request):
username = request.COOKIES.get('username','')
return render_to_response('fontsearchview/search.html', {'data':'','username':username})
def mainpage(request):
content=request.GET.get('searchcontent','')
page=request.GET.get('page','0')
username = request.COOKIES.get('username','')
return render_to_response('fontsearchview/searchdetail.html', {'data':content,'page':page,'username':username})
def detailpage(request):
content=request.POST.get('content','')
page=request.POST.get('page','0')
username = request.COOKIES.get('username','')
response_data = {}
response_data['result'] = '0'
jsoncontent=None
import json
try:
jsonmsg='{'+content+'}'
jsoncontent=json.loads(jsonmsg)
except Exception,e:
print e
pass
if jsoncontent is None:
if content!='' and len(content)>0:
print '存在内容,进入elasticsearch 检索'
# extra=' or script like \'%'+content+'%\' or detail like \'%'+content+'%\' or timesearch like ' +'\'%'+content+'%\' or head like \'%' +content+'%\') and snifferdata.ip=ip_maindata.ip '
# ports,portcount,portpagecount=portcontrol.portabstractshow(ip=content,port=content,timesearch=content,state=content,name=content,product=content,version=content,page=page,extra=extra,command='or')
import sys
sys.path.append("..")
from elasticsearchmanage import elastictool
ports,portcount,portpagecount=elastictool.search(page=page,dic=None,content=content)
# extra=' where match(version,product,head,detail,script,hackinfo,disclosure,keywords) against(\''+content+'\' in Boolean mode) '
# ports,portcount,portpagecount=portcontrol.portabstractshow(page=page,extra=extra,command='or')
print '检索完毕'
response_data['result'] = '1'
response_data['ports']=ports
response_data['portslength']=portcount
response_data['portspagecount']=portpagecount
response_data['portspage']=page
response_data['username']=username
else:
action=jsoncontent.keys()
if 'use' in action or 'city' in action:
del jsoncontent['use']
jsoncontent['page']=page
if 'all' in action:
extra=' where match(version,product,head,detail,script,hackinfo,disclosure,keywords) against(\''+jsoncontent['all']+'\' in Boolean mode) '
ports,portcount,portpagecount=portcontrol.portabstractshow(page=page,extra=extra,command='or')
else:
ports,portcount,portpagecount=getattr(portcontrol, 'portabstractshow','portabstractshow')(**jsoncontent)
response_data['result'] = '1'
response_data['ports']=ports
response_data['portslength']=portcount
response_data['portspagecount']=portpagecount
response_data['portspage']=page
response_data['username']=username
else:
if len(content)==0:
return HttpResponse(json.dumps(response_data,skipkeys=True,default=webtool.object2dict), content_type="application/json")
print '进入elasticsearch 具体关键词匹配'
import sys
sys.path.append("..")
from elasticsearchmanage import elastictool
ports,portcount,portpagecount=elastictool.search(page=page,dic=jsoncontent,content=None)
response_data['result'] = '1'
response_data['ports']=ports
response_data['portslength']=portcount
response_data['portspagecount']=portpagecount
response_data['portspage']=page
response_data['username']=username
try:
return HttpResponse(json.dumps(response_data,skipkeys=True,default=webtool.object2dict), content_type="application/json")
except Exception,e:
print e
return HttpResponse(json.dumps(response_data,skipkeys=True,default=webtool.object2dict,encoding='latin-1'), content_type="application/json")
# return HttpResponse(json.dumps(response_data, skipkeys=True, default=webtool.object2dict, encoding='GB2312'),
# content_type="application/json")
|
import itertools
import mechanize
from bs4 import BeautifulSoup
br = mechanize.Browser()
comb = itertools.product(list("abcdefghijklmnopqrstuvwxyz"),repeat=4)
f = open("testfile.txt","w")
for i in comb:
br = mechanize.Browser()
response = br.open('http://localhost/sample/log.php')
br.select_form(nr=0)
br.form['user'] = "admin"
br.form['pass'] = ''.join(i)
req = br.submit()
req
soup = BeautifulSoup(req.read(),'lxml')
print ''.join(i)
f.write(''.join(i))
f.write('\n')
if soup.findAll('h2')[0].text == "hello":
print "Password found " + ''.join(i)
break
else:
continue
|
import pprint
message = 'We on an ultra light beam. This is a god dream.'
count = {}
for character in message.upper():
count.setdefault(character, 0)
count[character] = count[character] + 1
pprint.pprint(count) |
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.errors import AnsibleFilterError
from ansible.module_utils.six import iteritems, string_types
from ansible.plugins.filter.core import flatten
from numbers import Number
def config(parameters, exclude=[]):
if not isinstance(parameters, dict):
raise AnsibleFilterError('manala_vim_config expects a dict but was given a %s' % type(parameters))
[parameters.pop(key, None) for key in exclude]
result = ''
for key in sorted(parameters):
parameter = config_parameter(parameters, key)
if parameter:
result += '\n%s' % parameter
return result.lstrip()
def config_parameter(parameters, key, required=False, default=None, comment=False):
if not isinstance(parameters, dict):
raise AnsibleFilterError('manala_vim_config_parameter parameters expects a dict but was given a %s' % type(parameters))
if not isinstance(key, string_types):
raise AnsibleFilterError('manala_vim_config_parameter key expects a string but was given a %s' % type(key))
if required and key not in parameters:
raise AnsibleFilterError('manala_vim_config_parameter requires a value for key %s' % key)
value = parameters.get(key, default)
if value is None or value is True:
result = 'set %s' % key
else:
if isinstance(value, (string_types, Number)):
pass
else:
AnsibleFilterError('manala_vim_config_parameter value of an unknown type %s' % type(value))
result = 'set %s=%s' % (key, value)
if key not in parameters:
if comment is True:
result = ';' + result.replace('\n', '\n;')
elif isinstance(comment, string_types):
result = comment
return result
class FilterModule(object):
''' Manala vim config jinja2 filters '''
def filters(self):
filters = {
'manala_vim_config': config,
'manala_vim_config_parameter': config_parameter,
}
return filters
|
tot_name = os.path.join(os.path.dirname(__file__),'src/data', file_name)
# open the json datafile and read it in
with open(tot_name, 'r') as inputfile:
doc = json.load(inputfile)
# transform the data to the correct types and convert temp to celsius
id_movie = int(doc['id'])
movie_name = str(doc['original_title'])
year = str(doc['production_companies']['production_countries']['release date'])
country_origin = str(doc['production_companies']['origin_country'])
category_1 = str(doc['genres']['name'])
category_2 = str(doc['genres']['name'])
movie_rating = float(doc['popularity'])
avg_rating = float(doc['production_companies']['production_countries']['vote_average'])
total_clicks = float(doc['production_companies']['production_countries']['vote_count'])
# check for nan's in the numeric values and then enter into the database
valid_data = True
#for valid in np.isnan([lat, lon, humid, press, min_temp, max_temp, temp]):
# if valid is False:
# valid_data = False
# break;
row = (id_movie, movie_name, year, country_origin, category_1, category_2, movie_rating,
avg_rating, total_clicks)
insert_cmd = """INSERT INTO movies
(id_movie, movie_name, year,
country_origin, category_1, category_2,
movie_rating, avg_rating, total_clicks)
VALUES
(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);"""
print(insert_cmd,row)
if valid_data is True:
pg_hook.run(insert_cmd, parameters=row)
|
import json
from django.http import JsonResponse
from Management.controller import Controller, Service
def request_on(request):
controller = Controller()
data = json.loads(request.body)
room_id = data.get('roomId')
mode = data.get('mode')
target_temp = controller.settings['defaultTargetTemp']
controller.update_room_temp(room_id, data.get('currentRoomTemp'))
service = Service(room_id=room_id, mode=mode, target_temp=target_temp)
result = controller.request_on(service)
response = {
'statue': result,
'message': {
'mode': service.mode,
'targetTemp': service.target_temp,
'fan': service.fan_speed,
'fee': controller.get_room_fee(room_id),
'feeRate': service.fee_rate,
}
}
controller.print_log()
return JsonResponse(response)
def change_target_temp(request):
controller = Controller()
data = json.loads(request.body)
room_id = data.get('roomId')
target_temp = data.get('targetTemp')
controller.change_target_temp(room_id, target_temp)
controller.print_log()
return JsonResponse({'msg': 'OK'})
def change_fan_speed(request):
controller = Controller()
data = json.loads(request.body)
room_id = data.get('roomId')
fan_speed = data.get("fanSpeed")
controller.change_fan_speed(room_id, fan_speed)
if fan_speed == 1:
response = {"feeRate": controller.settings['FeeRateL']}
elif fan_speed == 2:
response = {"feeRate": controller.settings['FeeRateM']}
elif fan_speed == 3:
response = {"feeRate": controller.settings['FeeRateH']}
else:
raise RuntimeError('No such speed')
controller.print_log()
return JsonResponse(response)
def request_off(request):
controller = Controller()
data = json.loads(request.body)
room_id = data.get('roomId')
controller.request_off(room_id)
controller.print_log()
return JsonResponse({'fee': controller.get_room_fee(room_id)})
def request_info(request):
"""
接收房间状态。
1. 更新房间的室温
2. 获取房间空调状态
- "3":达到目标温度
- "4":达到温度阈值,继续服务
- "大于4": 正在回温度
"""
controller = Controller()
data = json.loads(request.body)
room_id = data.get('roomId')
statue = data.get('statue')
current_temp = data.get('currentTemp')
controller.print_log()
controller.update_room_temp(room_id, current_temp)
if statue == 3:
controller.pause_service(room_id)
elif statue == 4:
controller.resume_service(room_id)
controller.print_log()
return JsonResponse({
'statue': controller.room_state[room_id]['AC_status'],
'fee': controller.get_room_fee(room_id)
})
|
#encoding: utf-8
from .uralicApi import analyze as uralic_api_analyze
from .uralicApi import __where_models as where_models
import os, sys
from subprocess import Popen, PIPE
from mikatools import open_write
def _Cg3__parse_sentence(words, language, morphology_ignore_after=None, descrpitive=True,remove_symbols=True):
sentence = []
for word in words:
analysis = __hfst_format(word, language, morphology_ignore_after,descrpitive=descrpitive, remove_symbols=remove_symbols)
sentence.extend(analysis)
hfst_result_string = "\n".join(sentence)
return hfst_result_string
def __hfst_format(word, language, morphology_ignore_after=None, descrpitive=True,remove_symbols=True):
analysis = uralic_api_analyze(word, language,descrpitive=descrpitive,remove_symbols=remove_symbols)
hfsts = []
if len(analysis) == 0:
hfsts.append(word + "\t" +word + "+?\tinf")
for analys in analysis:
if morphology_ignore_after is None:
a = analys[0]
else:
a = analys[0].split(morphology_ignore_after)[0]
hfsts.append(word + "\t" + a + "\t" + str(analys[1]))
hfsts.append("")
return hfsts
class Cg3():
def __init__(self, language):
model_path = where_models(language)
cg_path = os.path.join(model_path, "cg")
self.cg_path = cg_path
self.language = language
def disambiguate(self, words, morphology_ignore_after=None,descrpitive=True,remove_symbols=True, temp_file=None):
hfst_output = __parse_sentence(words + [""], self.language, morphology_ignore_after, descrpitive=descrpitive,remove_symbols=remove_symbols)
if temp_file is None:
p1 = Popen(["echo", hfst_output], stdout=PIPE)
else:
f = open_write(temp_file)
f.write(hfst_output)
f.close()
p1 = Popen(["cat", temp_file], stdout=PIPE)
cg_conv = Popen(["cg-conv" ,"-f"], stdout=PIPE, stdin=p1.stdout)
vislcg3 = Popen(['vislcg3', '--grammar', self.cg_path], stdout=PIPE, stdin=cg_conv.stdout)
cg_results, error = vislcg3.communicate()
return self.__parse_cg_results(cg_results)
def __parse_cg_results(self, cg_results):
if type(cg_results) is bytes:
cg_results = cg_results.decode(sys.stdout.encoding)
lines = cg_results.split("\n")
results = []
current_word = None
current_list = []
for line in lines:
if line.startswith("\"<"):
if current_word is not None:
results.append((current_word, current_list))
current_word = line[2:-2]
current_list = []
elif line.startswith("\t"):
line = line[2:]
parts = line.split("\" ", 1)
if len(parts) < 2:
continue
w = Cg3Word(current_word, parts[0], parts[1].split(" "))
current_list.append(w)
return results
class Cg3Word():
def __init__(self, form, lemma, morphology):
self.form = form
self.lemma = lemma
self.morphology = morphology
def __repr__(self):
o = "<" + self.lemma + " - " + ", ".join(self.morphology) + ">"
if type(o) is str:
return o
return o.encode("utf-8")
class Cg3Disambiguation():
def __init__(self):
self.arg = arg
|
#!/usr/bin/env python
# encoding: utf-8
# @author: ZhouYang
try:
import cPickle as pickle
except ImportError as e:
print(e)
import pickle
import logging
from addict import Dict
from core.database import User
from pony.orm import db_session
logger = logging.getLogger(__name__)
class Session(object):
def __init__(self, sessionid, userinfo):
self.sessionid = sessionid
self.userinfo = Dict(userinfo)
self.user = self.get_user()
@db_session
def get_user(self):
if self.userinfo:
_user = User.get(id=self.userinfo.id)
else:
_user = Dict()
logger.info('_user: %s' % _user)
return _user
class SessionFactory(object):
_prefix = 'homegate-session'
_redis_client = None
def get_key(self, sessionid):
return '{prefix}-{key}'.format(prefix=self._prefix, key=sessionid)
def get_session(self, sessionid, expire_sec=None):
raw_session = self._redis_client.hgetall(self.get_key(sessionid))
if not raw_session:
return None
if expire_sec:
key = self.get_key(sessionid)
self._redis_client.expire(key, expire_sec)
session = Session(
sessionid=raw_session['sessionid'],
userinfo=pickle.loads(raw_session['userinfo'])
)
return session
def expire(self, sessionid, expire_sec=3600):
key = self.get_key(sessionid)
self._redis_client.expire(key, expire_sec)
def generate(self, sessionid, userinfo=None):
_userinfo = userinfo if userinfo else {}
userinfo_dumps = pickle.dumps(dict(_userinfo))
sessioninfo = {
'userinfo': userinfo_dumps,
'sessionid': sessionid
}
self._redis_client.hmset(self.get_key(sessionid), sessioninfo)
session = Session(sessionid=sessionid, userinfo=_userinfo)
return session
|
text = """Начало
КОНТЕНТ
Конец"""
text = 'Начало\n\nКОНТЕНТ\n\nКонец'
print('-' * 60)
print(str(text))
print('-' * 60)
print('*' * 60)
print('-'*60)
print(repr(text))
print('-'*60)
|
# Parking: |
# def can_enter_the_cave(data):
# for line in data:
# if all(cell == 0 for cell in line):
# return True
# return False
def can_enter_the_cave(data, x=0, y=0):
print(x, y)
is_last_line = x + 1 == len(data)
if data[x][y] != 0 and is_last_line:
return False
if len(data[x]) == y + 1:
return True
is_first_line = x == 0
right_pos = data[x][y + 1]
if right_pos == 0:
return can_enter_the_cave(data, x, y + 1)
if not is_first_line and data[x - 1][y] == 0:
return can_enter_the_cave(data, x - 1, y)
if not is_last_line and data[x + 1][y] == 0:
return can_enter_the_cave(data, x + 1, y)
if not is_last_line:
return can_enter_the_cave(data, x + 1, 0)
return False
def test_complex_one_not_walkable():
data = [
[0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 1, 1, 0],
]
assert can_enter_the_cave(data) == False
def test_complex_one():
data = [
[0, 0, 1, 1, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 1, 1, 1, 0],
]
assert can_enter_the_cave(data) == True
def test_can_enter_the_cave_line_with_second_lane_walkable():
data = [[0, 1, 0, 0], [0, 0, 0, 1]]
assert can_enter_the_cave(data) == True
def test_can_enter_the_cave_line_with_zeroes_but_not_walkable():
data = [[0, 1, 0, 0]]
assert can_enter_the_cave(data) == False
def test_can_enter_the_cave_only_with_zero():
data = [[0]]
assert can_enter_the_cave(data) == True
def test_can_enter_the_cave_only_with_one():
data = [[1]]
assert can_enter_the_cave(data) == False
def test_can_enter_the_cave_line_with_zeroes():
data = [[0, 0, 0, 0]]
assert can_enter_the_cave(data) == True
def test_can_enter_the_cave_line_with_ones():
data = [[1, 1, 1, 1]]
assert can_enter_the_cave(data) == False
def test_can_enter_the_cave_line_with_zeroes_and_ones():
data = [[1, 1, 1, 1], [0, 0, 0, 0]]
assert can_enter_the_cave(data) == True
|
import collections
from typing import List
import tlm.data_gen.bert_data_gen as btd
def pad0(seq, max_len):
assert len(seq) <= max_len
while len(seq) < max_len:
seq.append(0)
return seq
def truncate_seq(tokens_a, max_num_tokens, rng):
"""Truncates a pair of sequences to a maximum sequence length."""
while True:
total_length = len(tokens_a)
if total_length <= max_num_tokens:
break
# We want to sometimes truncate from the front and sometimes from the
# back to add more randomness and avoid biases.
if rng.random() < 0.5:
del tokens_a[0]
else:
tokens_a.pop()
return tokens_a
def truncate_seq_pair(tokens_a, tokens_b, max_num_tokens, rng):
cnt = 0
while True:
cnt += 1
if cnt > 912 and cnt % 100 == 0:
print("Infinited loop :")
print(tokens_a)
print(tokens_b)
print(max_num_tokens)
total_length = len(tokens_a) + len(tokens_b)
if total_length <= max_num_tokens:
break
trunc_tokens = tokens_a if len(tokens_a) > len(tokens_b) else tokens_b
assert len(trunc_tokens) >= 1
if rng.random() < 0.5:
del trunc_tokens[0]
else:
trunc_tokens.pop()
def format_tokens_n_segid(raw_tokens):
tokens = []
segment_ids = []
tokens.append("[CLS]")
segment_ids.append(0)
for token in raw_tokens:
tokens.append(token)
segment_ids.append(0)
tokens.append("[SEP]")
segment_ids.append(0)
return tokens, segment_ids
def format_tokens_pair_n_segid(tokens_a, tokens_b):
tokens = []
segment_ids = []
tokens.append("[CLS]")
segment_ids.append(0)
for token in tokens_a:
tokens.append(token)
segment_ids.append(0)
tokens.append("[SEP]")
segment_ids.append(0)
for token in tokens_b:
tokens.append(token)
segment_ids.append(1)
tokens.append("[SEP]")
segment_ids.append(1)
return tokens, segment_ids
def get_basic_input_feature_as_list(tokenizer, max_seq_length, input_tokens, segment_ids):
input_ids = tokenizer.convert_tokens_to_ids(input_tokens)
return get_basic_input_feature_as_list_all_ids(input_ids, segment_ids, max_seq_length)
def get_basic_input_feature_as_list_all_ids(input_ids, segment_ids, max_seq_length):
input_mask = [1] * len(input_ids)
segment_ids = list(segment_ids)
max_seq_length = max_seq_length
assert len(input_ids) <= max_seq_length
while len(input_ids) < max_seq_length:
input_ids.append(0)
input_mask.append(0)
segment_ids.append(0)
assert len(input_ids) == max_seq_length
assert len(input_mask) == max_seq_length
assert len(segment_ids) == max_seq_length
return input_ids, input_mask, segment_ids
def get_basic_input_feature(tokenizer, max_seq_length, input_tokens, segment_ids):
input_ids, input_mask, segment_ids = get_basic_input_feature_as_list(tokenizer, max_seq_length, input_tokens, segment_ids)
return ordered_dict_from_input_segment_mask_ids(input_ids, input_mask, segment_ids)
def ordered_dict_from_input_segment_mask_ids(input_ids, input_mask, segment_ids):
features = collections.OrderedDict()
features["input_ids"] = btd.create_int_feature(input_ids)
features["input_mask"] = btd.create_int_feature(input_mask)
features["segment_ids"] = btd.create_int_feature(segment_ids)
return features
def combine_with_sep_cls(max_seq_length, tokens1, tokens2):
max_seg2_len = max_seq_length - 3 - len(tokens1)
tokens2 = tokens2[:max_seg2_len]
tokens = ["[CLS]"] + tokens1 + ["[SEP]"] + tokens2 + ["[SEP]"]
segment_ids = [0] * (len(tokens1) + 2) \
+ [1] * (len(tokens2) + 1)
tokens = tokens[:max_seq_length]
segment_ids = segment_ids[:max_seq_length]
return tokens, segment_ids
def combine_with_sep_cls2(max_seq_length, tokens1, tokens2):
max_seg1_len = max_seq_length - 3 - len(tokens2)
tokens1 = tokens1[:max_seg1_len]
tokens = ["[CLS]"] + tokens1 + ["[SEP]"] + tokens2 + ["[SEP]"]
segment_ids = [0] * (len(tokens1) + 2) \
+ [1] * (len(tokens2) + 1)
tokens = tokens[:max_seq_length]
segment_ids = segment_ids[:max_seq_length]
return tokens, segment_ids
def concat_triplet_windows(triplet_iterator, window_length=None):
all_input_ids: List[int] = []
all_input_mask: List[int] = []
all_segment_ids: List[int] = []
for input_ids, input_mask, segment_ids in triplet_iterator:
all_input_ids.extend(input_ids)
all_input_mask.extend(input_mask)
all_segment_ids.extend(segment_ids)
if window_length is not None:
assert len(input_ids) == window_length
assert len(input_mask) == window_length
assert len(segment_ids) == window_length
return all_input_ids, all_input_mask, all_segment_ids
|
species(
label = 'CCC1OC[C]1OC([O])CCC=O(20302)',
structure = SMILES('CCC1OC[C]1OC([O])CCC=O'),
E0 = (-269.977,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2750,2800,2850,1350,1500,750,1050,1375,1000,2782.5,750,1395,475,1775,1000,2750,2950,3150,900,1000,1100,1380,1390,370,380,2900,435,300,800,800,800,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.13819,0.144801,-0.000182965,1.46478e-07,-4.87339e-11,-32259.1,49.6651], Tmin=(100,'K'), Tmax=(827.058,'K')), NASAPolynomial(coeffs=[9.48156,0.0753144,-3.2838e-05,6.03793e-09,-4.09622e-13,-33726.6,-1.43944], Tmin=(827.058,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-269.977,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(627.743,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + ring(Oxetane) + radical(CCOJ) + radical(C2CsJOCs)"""),
)
species(
label = 'CCC1OCC1=O(3198)',
structure = SMILES('CCC1OCC1=O'),
E0 = (-248.346,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2950,3150,900,1000,1100,2750,2800,2850,1350,1500,750,1050,1375,1000,2750,2850,1437.5,1250,1305,750,350,300,800,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (100.116,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(4038.67,'J/mol'), sigma=(6.4294,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=630.83 K, Pc=34.48 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.71322,0.0296461,5.9882e-05,-9.84158e-08,3.90783e-11,-29768.4,23.7596], Tmin=(100,'K'), Tmax=(979.251,'K')), NASAPolynomial(coeffs=[15.843,0.0231785,-8.71379e-06,1.72785e-09,-1.32224e-13,-34992.9,-56.6588], Tmin=(979.251,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-248.346,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(349.208,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(Cs-(Cds-O2d)CsOsH) + group(Cs-CsCsHH) + group(Cs-(Cds-O2d)OsHH) + group(Cs-CsHHH) + group(Cds-OdCsCs) + ring(Cyclobutane)"""),
)
species(
label = 'O=CCCC=O(5767)',
structure = SMILES('O=CCCC=O'),
E0 = (-309.903,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,2695,2870,700,800,1380,1410,450,500,1750,1800,900,1100,180],'cm^-1')),
HinderedRotor(inertia=(0.18601,'amu*angstrom^2'), symmetry=1, barrier=(4.27673,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.186498,'amu*angstrom^2'), symmetry=1, barrier=(4.28796,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.186203,'amu*angstrom^2'), symmetry=1, barrier=(4.28117,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (86.0892,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3653.08,'J/mol'), sigma=(5.8998,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=570.60 K, Pc=40.36 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.98042,0.0494518,-6.06096e-05,5.52155e-08,-2.1639e-11,-37204.9,20.2227], Tmin=(100,'K'), Tmax=(774.657,'K')), NASAPolynomial(coeffs=[2.99129,0.0355848,-1.7014e-05,3.28724e-09,-2.30123e-13,-37102,17.2786], Tmin=(774.657,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-309.903,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(270.22,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-O2d)CsHH) + group(Cs-(Cds-O2d)CsHH) + group(Cds-OdCsH) + group(Cds-OdCsH)"""),
)
species(
label = 'CCC1OC[C]1OC1CCC([O])O1(20383)',
structure = SMILES('CCC1OC[C]1OC1CCC([O])O1'),
E0 = (-281.839,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.42937,0.109737,-6.18149e-05,4.54564e-09,7.20347e-12,-33693.1,42.7607], Tmin=(100,'K'), Tmax=(925.843,'K')), NASAPolynomial(coeffs=[15.8569,0.0596168,-2.04089e-05,3.38632e-09,-2.21197e-13,-37946.8,-44.9889], Tmin=(925.843,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-281.839,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(640.214,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsOsOsH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + ring(Tetrahydrofuran) + ring(Oxetane) + radical(C2CsJOCs) + radical(CCOJ)"""),
)
species(
label = 'CCC1OCC12OC([O])CCC2[O](20384)',
structure = SMILES('CCC1OCC12OC([O])CCC2[O]'),
E0 = (-253.172,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.869341,0.108292,-6.49054e-05,1.83693e-08,-2.09923e-12,-30277.4,35.34], Tmin=(100,'K'), Tmax=(1916.57,'K')), NASAPolynomial(coeffs=[23.2305,0.057995,-2.55419e-05,4.67724e-09,-3.13266e-13,-39515.4,-96.606], Tmin=(1916.57,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-253.172,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(648.529,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(O2s-CsH) + group(Cs-CsCsCsOs) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + polycyclic(s1_4_6_ane) + radical(CCOJ) + radical(CC(C)OJ)"""),
)
species(
label = 'H(3)',
structure = SMILES('[H]'),
E0 = (211.792,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (1.00794,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1205.6,'J/mol'), sigma=(2.05,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,9.24385e-15,-1.3678e-17,6.66185e-21,-1.00107e-24,25472.7,-0.459566], Tmin=(100,'K'), Tmax=(3459.6,'K')), NASAPolynomial(coeffs=[2.5,9.20456e-12,-3.58608e-15,6.15199e-19,-3.92042e-23,25472.7,-0.459566], Tmin=(3459.6,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(211.792,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""H""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'CCC1OCC=1OC([O])CCC=O(20385)',
structure = SMILES('CCC1OCC=1OC([O])CCC=O'),
E0 = (-397.925,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2750,2800,2850,1350,1500,750,1050,1375,1000,2782.5,750,1395,475,1775,1000,1380,1390,370,380,2900,435,2750,3150,900,1100,300,800,800,800,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (185.197,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.70124,0.132094,-0.000112537,4.79552e-08,-8.1242e-12,-47605.5,49.0951], Tmin=(100,'K'), Tmax=(1414.09,'K')), NASAPolynomial(coeffs=[28.8819,0.042755,-1.77691e-05,3.27691e-09,-2.2535e-13,-56537.8,-114.218], Tmin=(1414.09,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-397.925,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(602.799,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)OsHH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsOs) + group(Cds-OdCsH) + ring(Cyclobutene) + radical(CCOJ)"""),
)
species(
label = 'CCC1OC[C]1OC(=O)CCC=O(20386)',
structure = SMILES('CCC1OC[C]1OC(=O)CCC=O'),
E0 = (-467.118,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2782.5,750,1395,475,1775,1000,2750,2950,3150,900,1000,1100,2750,2800,2850,1350,1500,750,1050,1375,1000,300,800,800,800,800,800,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (185.197,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.37904,0.12562,-0.000126186,8.0713e-08,-2.26566e-11,-55994.1,45.5903], Tmin=(100,'K'), Tmax=(839.663,'K')), NASAPolynomial(coeffs=[9.67489,0.0729599,-3.21107e-05,6.01865e-09,-4.16797e-13,-57850.3,-5.80658], Tmin=(839.663,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-467.118,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(602.799,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-Cs(Cds-O2d)) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsOs) + group(Cds-OdCsH) + ring(Oxetane) + radical(C2CsJOC(O)C)"""),
)
species(
label = 'CCC1OC=C1OC([O])CCC=O(20387)',
structure = SMILES('CCC1OC=C1OC([O])CCC=O'),
E0 = (-399.585,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2750,2800,2850,1350,1500,750,1050,1375,1000,2782.5,750,1395,475,1775,1000,2750,3150,900,1100,1380,1390,370,380,2900,435,300,800,800,800,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (185.197,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.8694,0.128429,-8.15859e-05,2.85299e-09,1.04631e-11,-47791.8,48.7435], Tmin=(100,'K'), Tmax=(1020.62,'K')), NASAPolynomial(coeffs=[31.8912,0.0375945,-1.48107e-05,2.81934e-09,-2.04527e-13,-57251.8,-131.249], Tmin=(1020.62,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-399.585,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(602.799,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-(Cds-Cds)CsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-CdsOsH) + group(Cds-OdCsH) + ring(Cyclobutene) + radical(CCOJ)"""),
)
species(
label = '[O][CH]CCC=O(5764)',
structure = SMILES('[O][CH]CCC=O'),
E0 = (19.0721,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,2782.5,750,1395,475,1775,1000,3025,407.5,1350,352.5,180,180,2092.49],'cm^-1')),
HinderedRotor(inertia=(0.166661,'amu*angstrom^2'), symmetry=1, barrier=(3.83187,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.164989,'amu*angstrom^2'), symmetry=1, barrier=(3.79342,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.166144,'amu*angstrom^2'), symmetry=1, barrier=(3.81999,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (86.0892,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.56305,0.0641173,-0.000107371,1.07453e-07,-4.10375e-11,2371.43,23.494], Tmin=(100,'K'), Tmax=(845.992,'K')), NASAPolynomial(coeffs=[1.74033,0.0387928,-1.90537e-05,3.64333e-09,-2.50242e-13,3217.68,27.8472], Tmin=(845.992,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(19.0721,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(270.22,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsHH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cds-OdCsH) + radical(CCsJOH) + radical(CCOJ)"""),
)
species(
label = 'C2H5(29)',
structure = SMILES('C[CH2]'),
E0 = (107.874,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,1190.6,1642.82,1642.96,3622.23,3622.39],'cm^-1')),
HinderedRotor(inertia=(0.866817,'amu*angstrom^2'), symmetry=1, barrier=(19.9298,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (29.0611,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(2097.75,'J/mol'), sigma=(4.302,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=1.5, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.24186,-0.00356905,4.82667e-05,-5.85401e-08,2.25805e-11,12969,4.44704], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[4.32196,0.0123931,-4.39681e-06,7.0352e-10,-4.18435e-14,12175.9,0.171104], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(107.874,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(153.818,'J/(mol*K)'), label="""C2H5""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = '[O]C(CCC=O)OC1=COC1(20388)',
structure = SMILES('[O]C(CCC=O)OC1=COC1'),
E0 = (-333.488,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,2782.5,750,1395,475,1775,1000,1380,1390,370,380,2900,435,2750,2950,3150,900,1000,1100,300,800,800,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (157.144,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.34863,0.100836,-6.99046e-05,9.33745e-09,5.65076e-12,-39902.1,39.3085], Tmin=(100,'K'), Tmax=(1025.12,'K')), NASAPolynomial(coeffs=[25.6698,0.0280181,-1.1068e-05,2.10337e-09,-1.52239e-13,-47154.9,-100.067], Tmin=(1025.12,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-333.488,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(461.453,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-(Cds-Cds)OsHH) + group(Cds-CdsCsOs) + group(Cds-CdsOsH) + group(Cds-OdCsH) + ring(Cyclobutene) + radical(CCOJ)"""),
)
species(
label = 'CH2CH2CHO(560)',
structure = SMILES('[CH2]CC=O'),
E0 = (11.2619,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,3000,3100,440,815,1455,1000,2782.5,750,1395,475,1775,1000],'cm^-1')),
HinderedRotor(inertia=(0.221237,'amu*angstrom^2'), symmetry=1, barrier=(5.08666,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.221178,'amu*angstrom^2'), symmetry=1, barrier=(5.08532,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (57.0712,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.76345,0.0293577,-2.47892e-05,1.49239e-08,-4.38497e-12,1397.08,14.4322], Tmin=(100,'K'), Tmax=(767.858,'K')), NASAPolynomial(coeffs=[4.24224,0.0216537,-9.73869e-06,1.85596e-09,-1.3004e-13,1169.99,7.6886], Tmin=(767.858,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(11.2619,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(199.547,'J/(mol*K)'), label="""CH2CH2CHO""", comment="""Thermo library: DFT_QCI_thermo"""),
)
species(
label = 'CCC1OC[C]1OC=O(5315)',
structure = SMILES('CCC1OC[C]1OC=O'),
E0 = (-280.322,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,2782.5,750,1395,475,1775,1000,2750,2950,3150,900,1000,1100,2750,2800,2850,1350,1500,750,1050,1375,1000,300,800,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (129.134,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.492146,0.0672293,-2.45511e-05,-1.73969e-08,1.28007e-11,-33579.6,31.2146], Tmin=(100,'K'), Tmax=(930.574,'K')), NASAPolynomial(coeffs=[14.0675,0.033878,-1.10921e-05,1.83214e-09,-1.21431e-13,-37188.7,-39.1183], Tmin=(930.574,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-280.322,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(415.724,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-Cs(Cds-O2d)) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdOsH) + ring(Oxetane) + radical(C2CsJOC(O)H)"""),
)
species(
label = 'CC[C]1OCC1OC([O])CCC=O(20389)',
structure = SMILES('CC[C]1OCC1OC([O])CCC=O'),
E0 = (-269.977,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2750,2800,2850,1350,1500,750,1050,1375,1000,2782.5,750,1395,475,1775,1000,2750,2950,3150,900,1000,1100,1380,1390,370,380,2900,435,300,800,800,800,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.13819,0.144801,-0.000182965,1.46478e-07,-4.87339e-11,-32259.1,49.6651], Tmin=(100,'K'), Tmax=(827.058,'K')), NASAPolynomial(coeffs=[9.48156,0.0753144,-3.2838e-05,6.03793e-09,-4.09622e-13,-33726.6,-1.43944], Tmin=(827.058,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-269.977,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(627.743,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + ring(Oxetane) + radical(C2CsJOCs) + radical(CCOJ)"""),
)
species(
label = 'CCC1OC[C]1O[C](O)CCC=O(20390)',
structure = SMILES('CCC1OC[C]1O[C](O)CCC=O'),
E0 = (-290.436,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.40834,0.14756,-0.0001782,1.29164e-07,-3.87819e-11,-34706.8,50.4668], Tmin=(100,'K'), Tmax=(810.227,'K')), NASAPolynomial(coeffs=[13.9026,0.0667204,-2.79559e-05,5.06134e-09,-3.41358e-13,-37339.6,-24.7277], Tmin=(810.227,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-290.436,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(623.585,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + ring(Oxetane) + radical(C2CsJOCs) + radical(Cs_P)"""),
)
species(
label = 'CCC1O[CH]C1OC([O])CCC=O(20391)',
structure = SMILES('CCC1O[CH]C1OC([O])CCC=O'),
E0 = (-270.225,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2750,2800,2850,1350,1500,750,1050,1375,1000,2782.5,750,1395,475,1775,1000,2750,2950,3150,900,1000,1100,1380,1390,370,380,2900,435,300,800,800,800,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.14158,0.138595,-0.000141721,8.44615e-08,-2.10145e-11,-32282.4,48.4942], Tmin=(100,'K'), Tmax=(961.623,'K')), NASAPolynomial(coeffs=[15.5409,0.065042,-2.69891e-05,4.9214e-09,-3.35898e-13,-35683.2,-36.1218], Tmin=(961.623,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-270.225,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(627.743,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + ring(Oxetane) + radical(CCsJOCs) + radical(CCOJ)"""),
)
species(
label = 'CCC1OCC1O[C]([O])CCC=O(20392)',
structure = SMILES('CCC1OCC1O[C]([O])CCC=O'),
E0 = (-245.435,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2782.5,750,1395,475,1775,1000,360,370,350,2750,2883.33,3016.67,3150,900,966.667,1033.33,1100,2750,2800,2850,1350,1500,750,1050,1375,1000,300,800,800,800,800,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.70326,0.131048,-0.000128203,7.6283e-08,-1.95091e-11,-29318.4,48.1692], Tmin=(100,'K'), Tmax=(924.052,'K')), NASAPolynomial(coeffs=[12.2409,0.0706878,-3.0223e-05,5.59588e-09,-3.85255e-13,-31895.5,-18.0022], Tmin=(924.052,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-245.435,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(627.743,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + ring(Oxetane) + radical(Cs_P) + radical(CCOJ)"""),
)
species(
label = 'C[CH]C1OCC1OC([O])CCC=O(20393)',
structure = SMILES('C[CH]C1OCC1OC([O])CCC=O'),
E0 = (-250.779,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2750,2800,2850,1350,1500,750,1050,1375,1000,2750,2883.33,3016.67,3150,900,966.667,1033.33,1100,1380,1390,370,380,2900,435,2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,2782.5,750,1395,475,1775,1000,300,800,800,800,800,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.57777,0.125606,-0.000111457,5.82447e-08,-1.30521e-11,-29963.3,49.5182], Tmin=(100,'K'), Tmax=(1044.38,'K')), NASAPolynomial(coeffs=[13.5229,0.0677689,-2.83859e-05,5.2164e-09,-3.58075e-13,-33117.4,-23.9892], Tmin=(1044.38,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-250.779,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(627.743,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + ring(Oxetane) + radical(CCJCO) + radical(CCOJ)"""),
)
species(
label = 'CCC1OC[C]1OC(O)[CH]CC=O(20394)',
structure = SMILES('CCC1OC[C]1OC(O)[CH]CC=O'),
E0 = (-295.78,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2750,2800,2850,1350,1500,750,1050,1375,1000,3615,1277.5,1000,2782.5,750,1395,475,1775,1000,1380,1390,370,380,2900,435,2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,2750,2950,3150,900,1000,1100,300,800,800,800,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.17417,0.140631,-0.000155298,1.01809e-07,-2.77659e-11,-35356.1,51.4398], Tmin=(100,'K'), Tmax=(883.481,'K')), NASAPolynomial(coeffs=[14.5398,0.0649553,-2.68091e-05,4.84934e-09,-3.28352e-13,-38309.3,-27.1244], Tmin=(883.481,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-295.78,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(623.585,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + ring(Oxetane) + radical(C2CsJOCs) + radical(CCJCO)"""),
)
species(
label = 'CCC1OC[C]1OC(O)C[CH]C=O(20395)',
structure = SMILES('CCC1OC[C]1OC(O)CC=C[O]'),
E0 = (-352.431,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-3.02157,0.142478,-0.000137735,7.13474e-08,-1.47243e-11,-42124.7,51.6799], Tmin=(100,'K'), Tmax=(1179.62,'K')), NASAPolynomial(coeffs=[25.152,0.0469444,-1.62555e-05,2.69292e-09,-1.74303e-13,-48771.5,-88.8957], Tmin=(1179.62,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-352.431,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(627.743,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(O2s-(Cds-Cd)H) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + ring(Oxetane) + radical(C=COJ) + radical(C2CsJOCs)"""),
)
species(
label = '[CH2]CC1OCC1OC([O])CCC=O(20396)',
structure = SMILES('[CH2]CC1OCC1OC([O])CCC=O'),
E0 = (-245.435,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2782.5,750,1395,475,1775,1000,2750,2883.33,3016.67,3150,900,966.667,1033.33,1100,3000,3100,440,815,1455,1000,1380,1390,370,380,2900,435,300,800,800,800,800,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.70324,0.131047,-0.000128202,7.62824e-08,-1.95089e-11,-29318.4,49.2678], Tmin=(100,'K'), Tmax=(924.084,'K')), NASAPolynomial(coeffs=[12.241,0.0706877,-3.02229e-05,5.59587e-09,-3.85254e-13,-31895.5,-16.9038], Tmin=(924.084,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-245.435,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(627.743,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + ring(Oxetane) + radical(CCOJ) + radical(RCCJ)"""),
)
species(
label = 'CCC1OCC1OC([O])[CH]CC=O(20397)',
structure = SMILES('CCC1OCC1OC([O])[CH]CC=O'),
E0 = (-250.779,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2750,2800,2850,1350,1500,750,1050,1375,1000,2750,2883.33,3016.67,3150,900,966.667,1033.33,1100,1380,1390,370,380,2900,435,2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,2782.5,750,1395,475,1775,1000,300,800,800,800,800,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.57777,0.125606,-0.000111457,5.82447e-08,-1.30521e-11,-29963.3,49.5182], Tmin=(100,'K'), Tmax=(1044.38,'K')), NASAPolynomial(coeffs=[13.5229,0.0677689,-2.83859e-05,5.2164e-09,-3.58075e-13,-33117.4,-23.9892], Tmin=(1044.38,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-250.779,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(627.743,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + ring(Oxetane) + radical(CCJCO) + radical(CCOJ)"""),
)
species(
label = 'CC[C]1OC[C]1OC(O)CCC=O(20398)',
structure = SMILES('CC[C]1OC[C]1OC(O)CCC=O'),
E0 = (-314.978,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.65659,0.158974,-0.000224038,1.86495e-07,-6.18209e-11,-37655.5,51.3016], Tmin=(100,'K'), Tmax=(869.29,'K')), NASAPolynomial(coeffs=[10.9312,0.0717143,-3.0785e-05,5.55432e-09,-3.69968e-13,-39083.2,-6.97254], Tmin=(869.29,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-314.978,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(623.585,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + ring(Oxetane) + radical(C2CsJOCs) + radical(C2CsJOCs)"""),
)
species(
label = 'CCC1O[CH][C]1OC(O)CCC=O(20399)',
structure = SMILES('CCC1O[CH][C]1OC(O)CCC=O'),
E0 = (-315.226,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.77707,0.154201,-0.000188128,1.32039e-07,-3.77117e-11,-37673.7,50.5477], Tmin=(100,'K'), Tmax=(852.565,'K')), NASAPolynomial(coeffs=[16.9923,0.0614505,-2.49466e-05,4.4413e-09,-2.96604e-13,-41044.7,-41.6751], Tmin=(852.565,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-315.226,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(623.585,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + ring(Oxetane) + radical(C2CsJOCs) + radical(CCsJOCs)"""),
)
species(
label = 'CCC1OC[C]1OC(O)CC[C]=O(20400)',
structure = SMILES('CCC1OC[C]1OC(O)CC[C]=O'),
E0 = (-335.722,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.5679,0.150409,-0.000185085,1.34287e-07,-3.99307e-11,-40147.1,51.1677], Tmin=(100,'K'), Tmax=(836.398,'K')), NASAPolynomial(coeffs=[15.1086,0.0638248,-2.61331e-05,4.6644e-09,-3.11485e-13,-43032.4,-30.5253], Tmin=(836.398,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-335.722,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(623.585,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + ring(Oxetane) + radical(CCCJ=O) + radical(C2CsJOCs)"""),
)
species(
label = 'CCC1OCC1OC([O])C[CH]C=O(20401)',
structure = SMILES('CCC1OCC1OC([O])CC=C[O]'),
E0 = (-307.429,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.468,0.127912,-9.53446e-05,2.96112e-08,-8.35876e-13,-36729.9,49.9157], Tmin=(100,'K'), Tmax=(1019.36,'K')), NASAPolynomial(coeffs=[23.4881,0.0508668,-1.84768e-05,3.21329e-09,-2.16818e-13,-43310.5,-82.127], Tmin=(1019.36,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-307.429,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(631.9,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(O2s-(Cds-Cd)H) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + ring(Oxetane) + radical(C=COJ) + radical(CCOJ)"""),
)
species(
label = 'CCC1OCC1OC([O])CC[C]=O(20402)',
structure = SMILES('CCC1OCC1OC([O])CC[C]=O'),
E0 = (-290.72,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.88739,0.134197,-0.000136201,8.29673e-08,-2.13931e-11,-34757.6,48.9576], Tmin=(100,'K'), Tmax=(924.152,'K')), NASAPolynomial(coeffs=[13.4593,0.0677733,-2.83904e-05,5.19681e-09,-3.55218e-13,-37594.3,-23.8712], Tmin=(924.152,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-290.72,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(627.743,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + ring(Oxetane) + radical(CCCJ=O) + radical(CCOJ)"""),
)
species(
label = 'C[CH]C1OC[C]1OC(O)CCC=O(20403)',
structure = SMILES('C[CH]C1OC[C]1OC(O)CCC=O'),
E0 = (-295.78,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.17417,0.140631,-0.000155298,1.01809e-07,-2.77659e-11,-35356.1,51.4398], Tmin=(100,'K'), Tmax=(883.481,'K')), NASAPolynomial(coeffs=[14.5398,0.0649553,-2.68091e-05,4.84934e-09,-3.28352e-13,-38309.3,-27.1244], Tmin=(883.481,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-295.78,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(623.585,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + ring(Oxetane) + radical(C2CsJOCs) + radical(CCJCO)"""),
)
species(
label = '[CH2]CC1OC[C]1OC(O)CCC=O(20404)',
structure = SMILES('[CH2]CC1OC[C]1OC(O)CCC=O'),
E0 = (-290.436,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.40834,0.14756,-0.0001782,1.29164e-07,-3.87819e-11,-34706.8,51.5654], Tmin=(100,'K'), Tmax=(810.227,'K')), NASAPolynomial(coeffs=[13.9026,0.0667204,-2.79559e-05,5.06134e-09,-3.41358e-13,-37339.6,-23.6291], Tmin=(810.227,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-290.436,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(623.585,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + ring(Oxetane) + radical(RCCJ) + radical(C2CsJOCs)"""),
)
species(
label = 'CCC1OC[C]1[O](5301)',
structure = SMILES('CCC1OC[C]1[O]'),
E0 = (77.9953,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2950,3150,900,1000,1100,2750,2800,2850,1350,1500,750,1050,1375,1000,2750,2850,1437.5,1250,1305,750,350,300,800,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (100.116,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.337264,0.0640604,-5.08064e-05,2.24144e-08,-3.87551e-12,9526.71,26.3635], Tmin=(100,'K'), Tmax=(1621.89,'K')), NASAPolynomial(coeffs=[13.2337,0.0238287,-5.8057e-06,7.14199e-10,-3.68955e-14,6451.58,-38.6747], Tmin=(1621.89,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(77.9953,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(349.208,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + ring(Oxetane) + radical(C2CsJOH) + radical(CC(C)OJ)"""),
)
species(
label = 'CCC1OC[C]1OC1CC[CH]OO1(20405)',
structure = SMILES('CCC1OC[C]1OC1CC[CH]OO1'),
E0 = (-78.7661,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-3.24712,0.129928,-0.000106381,4.75864e-08,-8.36239e-12,-9187.27,47.361], Tmin=(100,'K'), Tmax=(1560.2,'K')), NASAPolynomial(coeffs=[24.3971,0.0453409,-1.18739e-05,1.57054e-09,-8.63196e-14,-16144.3,-92.9539], Tmin=(1560.2,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-78.7661,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(640.214,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-OsCs) + group(O2s-OsCs) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsOsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + ring(Oxetane) + ring(12dioxane) + radical(C2CsJOCs) + radical(CCsJOOC)"""),
)
species(
label = 'CCC1OCC12O[CH]CCC([O])O2(20406)',
structure = SMILES('CCC1OCC12O[CH]CCC([O])O2'),
E0 = (-432.354,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-3.91833,0.166621,-0.000171125,8.79912e-08,-1.787e-11,-51708.7,14.6048], Tmin=(100,'K'), Tmax=(1195,'K')), NASAPolynomial(coeffs=[31.9617,0.0465184,-2.03653e-05,3.88402e-09,-2.73999e-13,-60283.9,-164.887], Tmin=(1195,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-432.354,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(648.529,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsOsOs) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsOsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + polycyclic(PolycyclicRing) + radical(CCsJOCs) + radical(CCOJ)"""),
)
species(
label = 'C=C(OC([O])CCC=O)C([O])CC(20407)',
structure = SMILES('C=C(OC([O])CCC=O)C([O])CC'),
E0 = (-321.056,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,2950,3100,1380,975,1025,1650,1380,1383.33,1386.67,1390,370,373.333,376.667,380,2800,3000,430,440,350,440,435,1725,2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2782.5,750,1395,475,1775,1000,180,180,180,484.391,652.643,1600,1828.57,2971.43,3200],'cm^-1')),
HinderedRotor(inertia=(0.156706,'amu*angstrom^2'), symmetry=1, barrier=(3.60298,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156706,'amu*angstrom^2'), symmetry=1, barrier=(3.60298,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156706,'amu*angstrom^2'), symmetry=1, barrier=(3.60298,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156706,'amu*angstrom^2'), symmetry=1, barrier=(3.60298,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156706,'amu*angstrom^2'), symmetry=1, barrier=(3.60298,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156706,'amu*angstrom^2'), symmetry=1, barrier=(3.60298,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156706,'amu*angstrom^2'), symmetry=1, barrier=(3.60298,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156706,'amu*angstrom^2'), symmetry=1, barrier=(3.60298,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.08086,0.138658,-0.000130973,6.64241e-08,-1.40052e-11,-38399.2,51.3006], Tmin=(100,'K'), Tmax=(1116.49,'K')), NASAPolynomial(coeffs=[19.0555,0.0629324,-2.92345e-05,5.67452e-09,-4.02242e-13,-43118.9,-52.9989], Tmin=(1116.49,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-321.056,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(623.585,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(O2s-CsH) + group(Cs-(Cds-Cds)CsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-OdCsH) + group(Cds-CdsHH) + radical(CC(C)OJ) + radical(CCOJ)"""),
)
species(
label = 'CCC=C(C[O])OC([O])CCC=O(20408)',
structure = SMILES('CCC=C(C[O])OC([O])CCC=O'),
E0 = (-318.286,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3010,987.5,1337.5,450,1655,2750,2800,2850,1350,1500,750,1050,1375,1000,1380,1390,370,380,2900,435,350,440,435,1725,2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,2782.5,750,1395,475,1775,1000,180,180,180,507.203,650.943,1600,1828.57,2971.43,3200],'cm^-1')),
HinderedRotor(inertia=(0.157552,'amu*angstrom^2'), symmetry=1, barrier=(3.62243,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.157552,'amu*angstrom^2'), symmetry=1, barrier=(3.62243,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.157552,'amu*angstrom^2'), symmetry=1, barrier=(3.62243,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.157552,'amu*angstrom^2'), symmetry=1, barrier=(3.62243,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.157552,'amu*angstrom^2'), symmetry=1, barrier=(3.62243,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.157552,'amu*angstrom^2'), symmetry=1, barrier=(3.62243,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.157552,'amu*angstrom^2'), symmetry=1, barrier=(3.62243,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.157552,'amu*angstrom^2'), symmetry=1, barrier=(3.62243,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.41955,0.134838,-0.00013034,4.98039e-08,1.04681e-11,-38100.7,50.1487], Tmin=(100,'K'), Tmax=(560.581,'K')), NASAPolynomial(coeffs=[9.28461,0.0794916,-3.85256e-05,7.54564e-09,-5.35133e-13,-39631.3,1.75498], Tmin=(560.581,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-318.286,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(623.585,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(O2s-CsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)OsHH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsH) + group(Cds-OdCsH) + radical(CCOJ) + radical(CCOJ)"""),
)
species(
label = 'CCC1OCC1OC(=O)CCC=O(20409)',
structure = SMILES('CCC1OCC1OC(=O)CCC=O'),
E0 = (-667.906,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.541,0.119734,-9.37254e-05,4.17789e-08,-7.94591e-12,-80129.4,44.7951], Tmin=(100,'K'), Tmax=(1215.48,'K')), NASAPolynomial(coeffs=[15.1089,0.0649407,-2.61051e-05,4.69018e-09,-3.17413e-13,-84176.9,-38.7801], Tmin=(1215.48,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-667.906,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(627.743,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-Cs(Cds-O2d)) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsOs) + group(Cds-OdCsH) + ring(Oxetane)"""),
)
species(
label = 'CCC1OCC=1OC(O)CCC=O(20410)',
structure = SMILES('CCC1OCC=1OC(O)CCC=O'),
E0 = (-623.631,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-3.02517,0.134628,-0.000100776,2.56687e-08,1.77292e-12,-74735.3,49.3832], Tmin=(100,'K'), Tmax=(1050.77,'K')), NASAPolynomial(coeffs=[30.5093,0.0411918,-1.62434e-05,3.03033e-09,-2.14959e-13,-83671.9,-123.052], Tmin=(1050.77,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-623.631,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(623.585,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-(Cds-Cds)OsHH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsOs) + group(Cds-OdCsH) + ring(Cyclobutene)"""),
)
species(
label = 'CCC1OC=C1OC(O)CCC=O(20411)',
structure = SMILES('CCC1OC=C1OC(O)CCC=O'),
E0 = (-625.29,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-3.16621,0.130465,-6.71656e-05,-2.4331e-08,2.31067e-11,-74922.7,48.9451], Tmin=(100,'K'), Tmax=(978.049,'K')), NASAPolynomial(coeffs=[35.9618,0.0322116,-1.12135e-05,2.10526e-09,-1.56686e-13,-85531,-154.06], Tmin=(978.049,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-625.29,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(623.585,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-(Cds-Cds)CsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-CdsOsH) + group(Cds-OdCsH) + ring(Cyclobutene)"""),
)
species(
label = 'CH2(S)(23)',
structure = SMILES('[CH2]'),
E0 = (419.862,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1369.36,2789.41,2993.36],'cm^-1')),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (14.0266,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.19195,-0.00230793,8.0509e-06,-6.60123e-09,1.95638e-12,50484.3,-0.754589], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.28556,0.00460255,-1.97412e-06,4.09548e-10,-3.34695e-14,50922.4,8.67684], Tmin=(1000,'K'), Tmax=(3000,'K'))], Tmin=(200,'K'), Tmax=(3000,'K'), E0=(419.862,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(58.2013,'J/(mol*K)'), label="""CH2(S)""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = 'CC1OC[C]1OC([O])CCC=O(20412)',
structure = SMILES('CC1OC[C]1OC([O])CCC=O'),
E0 = (-246.197,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,2750,2800,2850,1350,1500,750,1050,1375,1000,2782.5,750,1395,475,1775,1000,2750,2950,3150,900,1000,1100,1380,1390,370,380,2900,435,300,800,800,800,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (172.178,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.47523,0.129822,-0.000168372,1.37239e-07,-4.59433e-11,-29422.5,45.0397], Tmin=(100,'K'), Tmax=(842.274,'K')), NASAPolynomial(coeffs=[8.52872,0.0669519,-2.90505e-05,5.31161e-09,-3.58462e-13,-30562.8,1.72815], Tmin=(842.274,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-246.197,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(557.07,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + ring(Oxetane) + radical(C2CsJOCs) + radical(CCOJ)"""),
)
species(
label = 'CO(12)',
structure = SMILES('[C-]#[O+]'),
E0 = (-119.219,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2084.51],'cm^-1')),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (28.0101,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(762.44,'J/mol'), sigma=(3.69,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(1.76,'angstroms^3'), rotrelaxcollnum=4.0, comment="""PrimaryTransportLibrary"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.5971,-0.00102424,2.83336e-06,-1.75825e-09,3.42587e-13,-14343.2,3.45822], Tmin=(100,'K'), Tmax=(1669.93,'K')), NASAPolynomial(coeffs=[2.92796,0.00181931,-8.35308e-07,1.51269e-10,-9.88872e-15,-14292.7,6.51157], Tmin=(1669.93,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-119.219,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(37.4151,'J/(mol*K)'), label="""CO""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'CCC([O])O[C]1COC1CC(13615)',
structure = SMILES('CCC([O])O[C]1COC1CC'),
E0 = (-163.397,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2950,3150,900,1000,1100,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,1380,1390,370,380,2900,435,2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,300,800,800,800,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (158.195,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(4545.5,'J/mol'), sigma=(7.92794,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=710.00 K, Pc=20.7 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.05834,0.116184,-0.000116392,7.47035e-08,-2.08086e-11,-19474.5,41.5234], Tmin=(100,'K'), Tmax=(851.634,'K')), NASAPolynomial(coeffs=[9.71033,0.0656052,-2.7308e-05,4.96835e-09,-3.37759e-13,-21308.7,-8.69985], Tmin=(851.634,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-163.397,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(582.013,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + ring(Oxetane) + radical(C2CsJOCs) + radical(CCOJ)"""),
)
species(
label = 'CCC1([CH]OC1)OC([O])CCC=O(20413)',
structure = SMILES('CCC1([CH]OC1)OC([O])CCC=O'),
E0 = (-275.566,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2750,2800,2850,1350,1500,750,1050,1375,1000,2782.5,750,1395,475,1775,1000,2750,2950,3150,900,1000,1100,1380,1390,370,380,2900,435,300,800,800,800,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.46781,0.146424,-0.000162758,1.05012e-07,-2.79282e-11,-32913.7,48.4699], Tmin=(100,'K'), Tmax=(908.246,'K')), NASAPolynomial(coeffs=[16.3177,0.063688,-2.61131e-05,4.70941e-09,-3.18498e-13,-36325.9,-40.3507], Tmin=(908.246,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-275.566,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(627.743,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsCsOs) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + ring(Oxetane) + radical(CCsJOCs) + radical(CCOJ)"""),
)
species(
label = 'CCC1OCC12OC(CCC=O)O2(20305)',
structure = SMILES('CCC1OCC12OC(CCC=O)O2'),
E0 = (-523.446,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.45586,0.12214,-7.49089e-05,1.01979e-08,4.23153e-12,-62706.8,41.8245], Tmin=(100,'K'), Tmax=(1108.68,'K')), NASAPolynomial(coeffs=[26.53,0.0498505,-2.07897e-05,3.92363e-09,-2.77024e-13,-71118.5,-109.955], Tmin=(1108.68,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-523.446,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(636.057,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsCs) + group(Cs-CsCsOsOs) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + polycyclic(s1_4_4_ane)"""),
)
species(
label = 'C=COCC([O])O[C]1COC1CC(20414)',
structure = SMILES('C=COCC([O])O[C]1COC1CC'),
E0 = (-228.381,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,3010,987.5,1337.5,450,1655,2950,3100,1380,975,1025,1650,1380,1390,370,380,2900,435,2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,2750,2950,3150,900,1000,1100,300,800,800,800,800,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.84151,0.144804,-0.000144533,7.85552e-08,-1.71977e-11,-27216.6,49.2335], Tmin=(100,'K'), Tmax=(1106.53,'K')), NASAPolynomial(coeffs=[22.5953,0.0528519,-1.98824e-05,3.45464e-09,-2.29935e-13,-32845.9,-76.0593], Tmin=(1106.53,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-228.381,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(627.743,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsOsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + ring(Oxetane) + radical(CCOJ) + radical(C2CsJOCs)"""),
)
species(
label = 'CCC1OC[C]1OC([O])CC=CO(20415)',
structure = SMILES('CCC1OC[C]1OC([O])CC=CO'),
E0 = (-268.188,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,3615,1277.5,1000,2995,3025,975,1000,1300,1375,400,500,1630,1680,1380,1390,370,380,2900,435,2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,2750,2950,3150,900,1000,1100,300,800,800,800,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (186.205,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.87526,0.144386,-0.000145286,7.95566e-08,-1.7475e-11,-32002.2,50.6063], Tmin=(100,'K'), Tmax=(1106.21,'K')), NASAPolynomial(coeffs=[22.9804,0.0508914,-1.85062e-05,3.15019e-09,-2.0703e-13,-37722.4,-76.7414], Tmin=(1106.21,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-268.188,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(627.743,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(O2s-CsH) + group(O2s-(Cds-Cd)H) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + ring(Oxetane) + radical(CCOJ) + radical(C2CsJOCs)"""),
)
species(
label = 'CCC1[C]CO1(5334)',
structure = SMILES('CCC1[C]CO1'),
E0 = (291.692,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,2750,2950,3150,900,1000,1100,2750,2800,2850,1350,1500,750,1050,1375,1000,300,800,800,800,800,800,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (84.1164,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.84973,0.0349169,2.96939e-05,-6.85898e-08,3.18014e-11,35171.6,19.2435], Tmin=(100,'K'), Tmax=(884.129,'K')), NASAPolynomial(coeffs=[12.4599,0.0209117,-4.22488e-06,4.7909e-10,-2.69891e-14,31966.7,-38.1523], Tmin=(884.129,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(291.692,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(324.264,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + ring(Oxetane) + radical(CCJ2_triplet)"""),
)
species(
label = '[O]C([O])CCC=O(18653)',
structure = SMILES('[O]C([O])CCC=O'),
E0 = (-135.752,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,1380,1390,370,380,2900,435,2782.5,750,1395,475,1775,1000,180,180,1940,1940.45],'cm^-1')),
HinderedRotor(inertia=(0.0847157,'amu*angstrom^2'), symmetry=1, barrier=(1.94778,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0845682,'amu*angstrom^2'), symmetry=1, barrier=(1.94439,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0849048,'amu*angstrom^2'), symmetry=1, barrier=(1.95213,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (102.089,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.53632,0.0352151,-1.37843e-05,9.95017e-10,1.44007e-13,-16394.9,16.3348], Tmin=(100,'K'), Tmax=(2711.71,'K')), NASAPolynomial(coeffs=[49.2619,-0.011801,1.73631e-06,-2.42589e-10,2.04225e-14,-47621.7,-256.909], Tmin=(2711.71,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-135.752,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(295.164,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-CsH) + group(Cs-CsCsHH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsOsH) + group(Cds-OdCsH) + radical(CCOJ) + radical(CCOJ)"""),
)
species(
label = 'O(4)',
structure = SMILES('[O]'),
E0 = (243.005,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (15.9994,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(665.16,'J/mol'), sigma=(2.75,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,9.24385e-15,-1.3678e-17,6.66185e-21,-1.00107e-24,29226.7,5.11107], Tmin=(100,'K'), Tmax=(3459.6,'K')), NASAPolynomial(coeffs=[2.5,9.20456e-12,-3.58608e-15,6.15199e-19,-3.92042e-23,29226.7,5.11107], Tmin=(3459.6,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(243.005,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""O""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'CCC1OC[C]1O[CH]CCC=O(20416)',
structure = SMILES('CCC1OC[C]1O[CH]CCC=O'),
E0 = (-114.995,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,2782.5,750,1395,475,1775,1000,3025,407.5,1350,352.5,2750,2950,3150,900,1000,1100,2750,2800,2850,1350,1500,750,1050,1375,1000,300,800,800,800,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (170.206,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.81982,0.135273,-0.000143253,7.98382e-08,-1.37989e-11,-13627.6,43.9919], Tmin=(100,'K'), Tmax=(697.299,'K')), NASAPolynomial(coeffs=[14.1712,0.0618968,-2.48934e-05,4.42766e-09,-2.96444e-13,-16303.9,-30.5898], Tmin=(697.299,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-114.995,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(602.799,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(Cs-CsCsOsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + ring(Oxetane) + radical(CCsJOCs) + radical(C2CsJOCs)"""),
)
species(
label = 'N2',
structure = SMILES('N#N'),
E0 = (-8.69489,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (28.0135,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(810.913,'J/mol'), sigma=(3.621,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(1.76,'angstroms^3'), rotrelaxcollnum=4.0, comment="""PrimaryTransportLibrary"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.61263,-0.00100893,2.49898e-06,-1.43376e-09,2.58636e-13,-1051.1,2.6527], Tmin=(100,'K'), Tmax=(1817.04,'K')), NASAPolynomial(coeffs=[2.9759,0.00164141,-7.19722e-07,1.25378e-10,-7.91526e-15,-1025.84,5.53757], Tmin=(1817.04,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-8.69489,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(37.4151,'J/(mol*K)'), label="""N2""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'Ne',
structure = SMILES('[Ne]'),
E0 = (-6.19738,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (20.1797,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1235.53,'J/mol'), sigma=(3.758e-10,'m'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with fixed Lennard Jones Parameters. This is the fallback method! Try improving transport databases!"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-6.19738,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""Ne""", comment="""Thermo library: primaryThermoLibrary"""),
)
transitionState(
label = 'TS1',
E0 = (-269.977,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS2',
E0 = (-243.591,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS3',
E0 = (-253.172,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS4',
E0 = (-174.844,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS5',
E0 = (-208.82,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS6',
E0 = (-181.023,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS7',
E0 = (-196.221,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS8',
E0 = (-203.062,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS9',
E0 = (-241.028,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS10',
E0 = (-105.598,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS11',
E0 = (-111.941,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS12',
E0 = (-103.702,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS13',
E0 = (-84.4766,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS14',
E0 = (-82.2195,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS15',
E0 = (-190.218,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS16',
E0 = (-167.832,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS17',
E0 = (-135.856,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS18',
E0 = (-141.2,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS19',
E0 = (-205.543,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS20',
E0 = (-205.251,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS21',
E0 = (-237.24,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS22',
E0 = (-224.953,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS23',
E0 = (-202.928,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS24',
E0 = (-190.272,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS25',
E0 = (-163.076,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS26',
E0 = (97.0675,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS27',
E0 = (-78.7661,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS28',
E0 = (-234.283,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS29',
E0 = (-196.164,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS30',
E0 = (-193.394,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS31',
E0 = (-206.577,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS32',
E0 = (-235.459,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS33',
E0 = (-235.459,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS34',
E0 = (173.665,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS35',
E0 = (75.1156,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS36',
E0 = (-118.247,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS37',
E0 = (-261.693,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS38',
E0 = (85.4193,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS39',
E0 = (-124.321,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS40',
E0 = (155.941,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS41',
E0 = (128.01,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
reaction(
label = 'reaction1',
reactants = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
products = ['CCC1OCC1=O(3198)', 'O=CCCC=O(5767)'],
transitionState = 'TS1',
kinetics = Arrhenius(A=(5e+12,'s^-1'), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Exact match found for rate rule [RJJ]
Euclidian distance = 0
family: 1,4_Linear_birad_scission"""),
)
reaction(
label = 'reaction2',
reactants = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
products = ['CCC1OC[C]1OC1CCC([O])O1(20383)'],
transitionState = 'TS2',
kinetics = Arrhenius(A=(132522,'s^-1'), n=1.48406, Ea=(26.3859,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R6_SSS;multiplebond_intra;radadd_intra] for rate rule [R6_SSS_CO;carbonylbond_intra_H;radadd_intra_O]
Euclidian distance = 2.44948974278
family: Intra_R_Add_Exocyclic"""),
)
reaction(
label = 'reaction3',
reactants = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
products = ['CCC1OCC12OC([O])CCC2[O](20384)'],
transitionState = 'TS3',
kinetics = Arrhenius(A=(2020,'s^-1'), n=1.67, Ea=(16.8054,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R7_SSSS;multiplebond_intra;radadd_intra_csNdNd] for rate rule [R7_SSSS_CO;carbonylbond_intra_H;radadd_intra_csNdNd]
Euclidian distance = 2.2360679775
family: Intra_R_Add_Exocyclic
Ea raised from 12.9 to 16.8 kJ/mol to match endothermicity of reaction."""),
)
reaction(
label = 'reaction4',
reactants = ['H(3)', 'CCC1OCC=1OC([O])CCC=O(20385)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS4',
kinetics = Arrhenius(A=(170.641,'m^3/(mol*s)'), n=1.56204, Ea=(11.2897,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Cds_Cds;HJ] for rate rule [Cds-OsCs_Cds;HJ]
Euclidian distance = 1.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction5',
reactants = ['H(3)', 'CCC1OC[C]1OC(=O)CCC=O(20386)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS5',
kinetics = Arrhenius(A=(1.83701,'m^3/(mol*s)'), n=1.71338, Ea=(46.5052,'kJ/mol'), T0=(1,'K'), comment="""Estimated using average of templates [CO_O;HJ] + [CO-NdNd_O;YJ] for rate rule [CO-NdNd_O;HJ]
Euclidian distance = 1.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction6',
reactants = ['H(3)', 'CCC1OC=C1OC([O])CCC=O(20387)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS6',
kinetics = Arrhenius(A=(2.182e+10,'cm^3/(mol*s)'), n=0.859, Ea=(6.76971,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2000,'K'), comment="""Estimated using an average for rate rule [Cds-OsH_Cds;HJ]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction7',
reactants = ['CCC1OCC1=O(3198)', '[O][CH]CCC=O(5764)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS7',
kinetics = Arrhenius(A=(32300,'cm^3/(mol*s)'), n=2.98, Ea=(33.0536,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using template [Od_CO-CsCs;YJ] for rate rule [Od_CO-CsCs;CJ]
Euclidian distance = 1.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction8',
reactants = ['C2H5(29)', '[O]C(CCC=O)OC1=COC1(20388)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS8',
kinetics = Arrhenius(A=(0.00139222,'m^3/(mol*s)'), n=2.42243, Ea=(22.5521,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Cds_Cds;CsJ-CsHH] for rate rule [Cds-OsH_Cds;CsJ-CsHH]
Euclidian distance = 1.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction9',
reactants = ['CH2CH2CHO(560)', 'CCC1OC[C]1OC=O(5315)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS9',
kinetics = Arrhenius(A=(7.94e+10,'cm^3/(mol*s)'), n=0, Ea=(28.0328,'kJ/mol'), T0=(1,'K'), Tmin=(333,'K'), Tmax=(363,'K'), comment="""Estimated using template [CO_O;CsJ-CsHH] for rate rule [CO-NdH_O;CsJ-CsHH]
Euclidian distance = 1.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction10',
reactants = ['CC[C]1OCC1OC([O])CCC=O(20389)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS10',
kinetics = Arrhenius(A=(7.01161e+09,'s^-1'), n=1.05, Ea=(164.379,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R2H_S;C_rad_out_NonDe;Cs_H_out_NonDe] for rate rule [R2H_S_cy4;C_rad_out_NDMustO;Cs_H_out_NDMustO]
Euclidian distance = 1.73205080757
family: intra_H_migration"""),
)
reaction(
label = 'reaction11',
reactants = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
products = ['CCC1OC[C]1O[C](O)CCC=O(20390)'],
transitionState = 'TS11',
kinetics = Arrhenius(A=(1.43381e+07,'s^-1'), n=1.70481, Ea=(158.036,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R2H_S;Y_rad_out;Cs_H_out_NonDe] for rate rule [R2H_S;O_rad_out;Cs_H_out_NDMustO]
Euclidian distance = 1.41421356237
family: intra_H_migration"""),
)
reaction(
label = 'reaction12',
reactants = ['CCC1O[CH]C1OC([O])CCC=O(20391)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS12',
kinetics = Arrhenius(A=(1.14713e+11,'s^-1'), n=0.563333, Ea=(166.523,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R2H_S;C_rad_out_1H;Cs_H_out_NonDe] for rate rule [R2H_S_cy4;C_rad_out_H/NonDeO;Cs_H_out_NDMustO]
Euclidian distance = 1.73205080757
family: intra_H_migration"""),
)
reaction(
label = 'reaction13',
reactants = ['CCC1OCC1O[C]([O])CCC=O(20392)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS13',
kinetics = Arrhenius(A=(4.97e+09,'s^-1'), n=1.01, Ea=(160.958,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using an average for rate rule [R3H_SS_O;Y_rad_out;Cs_H_out_Cs2]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction14',
reactants = ['C[CH]C1OCC1OC([O])CCC=O(20393)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS14',
kinetics = Arrhenius(A=(7.66994e+09,'s^-1'), n=0.768667, Ea=(168.559,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R3H_SS;C_rad_out_H/NonDeC;Cs_H_out_NonDe] for rate rule [R3H_SS_23cy4;C_rad_out_H/NonDeC;Cs_H_out_NDMustO]
Euclidian distance = 1.41421356237
family: intra_H_migration"""),
)
reaction(
label = 'reaction15',
reactants = ['CCC1OC[C]1OC(O)[CH]CC=O(20394)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS15',
kinetics = Arrhenius(A=(5.71,'s^-1'), n=3.021, Ea=(105.562,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2500,'K'), comment="""From training reaction 319 used for R3H_SS_Cs;C_rad_out_H/NonDeC;O_H_out
Exact match found for rate rule [R3H_SS_Cs;C_rad_out_H/NonDeC;O_H_out]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction16',
reactants = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
products = ['CCC1OC[C]1OC(O)C[CH]C=O(20395)'],
transitionState = 'TS16',
kinetics = Arrhenius(A=(363473,'s^-1'), n=1.92229, Ea=(102.145,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4H_SSS;Y_rad_out;Cs_H_out_H/OneDe] for rate rule [R4H_SSS;O_rad_out;Cs_H_out_H/CO]
Euclidian distance = 1.41421356237
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction17',
reactants = ['[CH2]CC1OCC1OC([O])CCC=O(20396)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS17',
kinetics = Arrhenius(A=(1.86e+10,'s^-1'), n=0.58, Ea=(109.579,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using template [R4H_SSS;C_rad_out_2H;Cs_H_out_NonDe] for rate rule [R4H_SSS;C_rad_out_2H;Cs_H_out_NDMustO]
Euclidian distance = 1.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction18',
reactants = ['CCC1OCC1OC([O])[CH]CC=O(20397)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS18',
kinetics = Arrhenius(A=(1.86e+10,'s^-1'), n=0.58, Ea=(109.579,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using template [R4H_SSS;C_rad_out_single;Cs_H_out_Cs2] for rate rule [R4H_SSS;C_rad_out_H/NonDeC;Cs_H_out_Cs2]
Euclidian distance = 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction19',
reactants = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
products = ['CC[C]1OC[C]1OC(O)CCC=O(20398)'],
transitionState = 'TS19',
kinetics = Arrhenius(A=(1.9e+07,'s^-1'), n=1.1, Ea=(64.4336,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using template [R5Hall;O_rad_out;Cs_H_out_NDMustO] for rate rule [R5HJ_3;O_rad_out;Cs_H_out_NDMustO]
Euclidian distance = 1.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction20',
reactants = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
products = ['CCC1O[CH][C]1OC(O)CCC=O(20399)'],
transitionState = 'TS20',
kinetics = Arrhenius(A=(23000,'s^-1'), n=2.11, Ea=(64.7265,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using template [R5Hall;O_rad_out;Cs_H_out_H/NonDeO] for rate rule [R5HJ_3;O_rad_out;Cs_H_out_H/NonDeO]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction21',
reactants = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
products = ['CCC1OC[C]1OC(O)CC[C]=O(20400)'],
transitionState = 'TS21',
kinetics = Arrhenius(A=(214655,'s^-1'), n=1.70206, Ea=(32.737,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5H_CCC;O_rad_out;XH_out] for rate rule [R5H_CCC_O;O_rad_out;CO_H_out]
Euclidian distance = 1.41421356237
family: intra_H_migration"""),
)
reaction(
label = 'reaction22',
reactants = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
products = ['CCC1OCC1OC([O])C[CH]C=O(20401)'],
transitionState = 'TS22',
kinetics = Arrhenius(A=(12720.5,'s^-1'), n=1.89333, Ea=(45.0245,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5H_SSSS;C_rad_out_single;Cs_H_out_H/OneDe] for rate rule [R5H_SSSS_OCC;C_rad_out_Cs2;Cs_H_out_H/CO]
Euclidian distance = 3.31662479036
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction23',
reactants = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
products = ['CCC1OCC1OC([O])CC[C]=O(20402)'],
transitionState = 'TS23',
kinetics = Arrhenius(A=(274.273,'s^-1'), n=2.52222, Ea=(67.0486,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R6H_SSSSS_OO;C_rad_out_single;XH_out] for rate rule [R6H_SSSSS_OO;C_rad_out_Cs2;CO_H_out]
Euclidian distance = 3.16227766017
family: intra_H_migration"""),
)
reaction(
label = 'reaction24',
reactants = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
products = ['C[CH]C1OC[C]1OC(O)CCC=O(20403)'],
transitionState = 'TS24',
kinetics = Arrhenius(A=(3.12e+09,'s^-1'), n=0, Ea=(79.7052,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using template [R6Hall;O_rad_out;Cs_H_out_H/NonDeC] for rate rule [R6HJ_3;O_rad_out;Cs_H_out_H/NonDeC]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction25',
reactants = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
products = ['[CH2]CC1OC[C]1OC(O)CCC=O(20404)'],
transitionState = 'TS25',
kinetics = Arrhenius(A=(5.85e+08,'s^-1'), n=0, Ea=(106.901,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using template [R7Hall;O_rad_out;Cs_H_out_2H] for rate rule [R7HJ_3;O_rad_out;Cs_H_out_2H]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 3.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction26',
reactants = ['[O][CH]CCC=O(5764)', 'CCC1OC[C]1[O](5301)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS26',
kinetics = Arrhenius(A=(7.46075e+06,'m^3/(mol*s)'), n=0.027223, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Y_rad;Y_rad]
Euclidian distance = 0
family: R_Recombination
Ea raised from -14.4 to 0 kJ/mol."""),
)
reaction(
label = 'reaction27',
reactants = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
products = ['CCC1OC[C]1OC1CC[CH]OO1(20405)'],
transitionState = 'TS27',
kinetics = Arrhenius(A=(463580,'s^-1'), n=1.14062, Ea=(191.211,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R6_SSS;multiplebond_intra;radadd_intra] for rate rule [R6_SSS_CO;carbonyl_intra_H;radadd_intra_O]
Euclidian distance = 2.44948974278
family: Intra_R_Add_Endocyclic
Ea raised from 187.7 to 191.2 kJ/mol to match endothermicity of reaction."""),
)
reaction(
label = 'reaction28',
reactants = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
products = ['CCC1OCC12O[CH]CCC([O])O2(20406)'],
transitionState = 'TS28',
kinetics = Arrhenius(A=(5.59423e+07,'s^-1'), n=0.9735, Ea=(35.6937,'kJ/mol'), T0=(1,'K'), comment="""Estimated using average of templates [R7_linear;multiplebond_intra;radadd_intra_csNdNd] + [R7_linear;carbonyl_intra_H;radadd_intra] for rate rule [R7_linear;carbonyl_intra_H;radadd_intra_csNdNd]
Euclidian distance = 2.0
family: Intra_R_Add_Endocyclic"""),
)
reaction(
label = 'reaction29',
reactants = ['C=C(OC([O])CCC=O)C([O])CC(20407)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS29',
kinetics = Arrhenius(A=(3.21748e+08,'s^-1'), n=0.95, Ea=(124.892,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4_S_D;doublebond_intra_secNd_2H;radadd_intra] for rate rule [R4_S_D;doublebond_intra_secNd_2H;radadd_intra_O]
Euclidian distance = 1.0
family: Intra_R_Add_Endocyclic"""),
)
reaction(
label = 'reaction30',
reactants = ['CCC=C(C[O])OC([O])CCC=O(20408)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS30',
kinetics = Arrhenius(A=(3.21748e+08,'s^-1'), n=0.95, Ea=(124.892,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4_S_D;doublebond_intra_secNd;radadd_intra] for rate rule [R4_S_D;doublebond_intra_secNd_HNd;radadd_intra_O]
Euclidian distance = 1.41421356237
family: Intra_R_Add_Endocyclic"""),
)
reaction(
label = 'reaction31',
reactants = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
products = ['CCC1OCC1OC(=O)CCC=O(20409)'],
transitionState = 'TS31',
kinetics = Arrhenius(A=(7.437e+08,'s^-1'), n=1.045, Ea=(63.4002,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R3radExo;Y_rad_NDe;XH_Rrad]
Euclidian distance = 0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction32',
reactants = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
products = ['CCC1OCC=1OC(O)CCC=O(20410)'],
transitionState = 'TS32',
kinetics = Arrhenius(A=(3.21e+09,'s^-1'), n=0.137, Ea=(34.518,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5;Y_rad;XH_Rrad_NDe] for rate rule [R5radEndo;Y_rad;XH_Rrad_NDe]
Euclidian distance = 1.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction33',
reactants = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
products = ['CCC1OC=C1OC(O)CCC=O(20411)'],
transitionState = 'TS33',
kinetics = Arrhenius(A=(6.42e+09,'s^-1'), n=0.137, Ea=(34.518,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5;Y_rad;XH_Rrad_NDe] for rate rule [R5radEndo;Y_rad;XH_Rrad_NDe]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 2.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction34',
reactants = ['CH2(S)(23)', 'CC1OC[C]1OC([O])CCC=O(20412)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS34',
kinetics = Arrhenius(A=(1.31021e+06,'m^3/(mol*s)'), n=0.189, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [carbene;C_pri] for rate rule [carbene;C_pri/NonDeC]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 3.0
family: 1,2_Insertion_carbene
Ea raised from -1.5 to 0 kJ/mol."""),
)
reaction(
label = 'reaction35',
reactants = ['CO(12)', 'CCC([O])O[C]1COC1CC(13615)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS35',
kinetics = Arrhenius(A=(274200,'cm^3/(mol*s)'), n=2.53, Ea=(357.732,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 4 used for CO;C_pri/NonDeC
Exact match found for rate rule [CO;C_pri/NonDeC]
Euclidian distance = 0
Multiplied by reaction path degeneracy 3.0
family: 1,2_Insertion_CO"""),
)
reaction(
label = 'reaction36',
reactants = ['CCC1([CH]OC1)OC([O])CCC=O(20413)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS36',
kinetics = Arrhenius(A=(1.33e+08,'s^-1'), n=1.36, Ea=(157.318,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [cCs(-R!HR!H)CJ;CsJ;C]
Euclidian distance = 0
family: 1,2_shiftC"""),
)
reaction(
label = 'reaction37',
reactants = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
products = ['CCC1OCC12OC(CCC=O)O2(20305)'],
transitionState = 'TS37',
kinetics = Arrhenius(A=(1.62e+12,'s^-1'), n=-0.305, Ea=(8.28432,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4_SSS;C_rad_out_single;Ypri_rad_out] for rate rule [R4_SSS;C_rad_out_Cs2;Opri_rad]
Euclidian distance = 3.16227766017
family: Birad_recombination"""),
)
reaction(
label = 'reaction38',
reactants = ['C=COCC([O])O[C]1COC1CC(20414)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS38',
kinetics = Arrhenius(A=(7040,'s^-1'), n=2.66, Ea=(313.8,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 7 used for R_ROR;R1_doublebond_CH2;R2_doublebond_H;R_O_C
Exact match found for rate rule [R_ROR;R1_doublebond_CH2;R2_doublebond_H;R_O_C]
Euclidian distance = 0
family: ketoenol"""),
)
reaction(
label = 'reaction39',
reactants = ['CCC1OC[C]1OC([O])CC=CO(20415)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS39',
kinetics = Arrhenius(A=(605.045,'s^-1'), n=2.96, Ea=(143.867,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R_ROR;R1_doublebond;R2_doublebond_H;R_O_H] for rate rule [R_ROR;R1_doublebond_CHR;R2_doublebond_H;R_O_H]
Euclidian distance = 1.0
family: ketoenol"""),
)
reaction(
label = 'reaction40',
reactants = ['CCC1[C]CO1(5334)', '[O]C([O])CCC=O(18653)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS40',
kinetics = Arrhenius(A=(2711.41,'m^3/(mol*s)'), n=1.40819, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(303.03,'K'), Tmax=(2000,'K'), comment="""Estimated using an average for rate rule [O_rad/NonDe;Birad]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: Birad_R_Recombination
Ea raised from -12.0 to 0 kJ/mol."""),
)
reaction(
label = 'reaction41',
reactants = ['O(4)', 'CCC1OC[C]1O[CH]CCC=O(20416)'],
products = ['CCC1OC[C]1OC([O])CCC=O(20302)'],
transitionState = 'TS41',
kinetics = Arrhenius(A=(2085.55,'m^3/(mol*s)'), n=1.09077, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(303.03,'K'), Tmax=(2000,'K'), comment="""Estimated using template [Y_rad;O_birad] for rate rule [C_rad/H/CsO;O_birad]
Euclidian distance = 4.0
family: Birad_R_Recombination
Ea raised from -8.3 to 0 kJ/mol."""),
)
network(
label = '3631',
isomers = [
'CCC1OC[C]1OC([O])CCC=O(20302)',
],
reactants = [
('CCC1OCC1=O(3198)', 'O=CCCC=O(5767)'),
],
bathGas = {
'N2': 0.5,
'Ne': 0.5,
},
)
pressureDependence(
label = '3631',
Tmin = (300,'K'),
Tmax = (2000,'K'),
Tcount = 8,
Tlist = ([302.47,323.145,369.86,455.987,609.649,885.262,1353.64,1896.74],'K'),
Pmin = (0.01,'bar'),
Pmax = (100,'bar'),
Pcount = 5,
Plist = ([0.0125282,0.0667467,1,14.982,79.8202],'bar'),
maximumGrainSize = (0.5,'kcal/mol'),
minimumGrainCount = 250,
method = 'modified strong collision',
interpolationModel = ('Chebyshev', 6, 4),
activeKRotor = True,
activeJRotor = True,
rmgmode = True,
)
|
from flaskbb import create_app
#from flaskbb.configs.production import ProductionConfig
from flaskbb.configs.development import DevelopmentConfig
#flaskbb = create_app(config=ProductionConfig())
flaskbb = create_app(config=DevelopmentConfig())
|
from django.conf.urls import url
from . import views
from shop.views import IndexView
app_name = 'shop'
urlpatterns = [
url(r'^$', IndexView.as_view(), name='index'),
url(r'^register/', views.RegisterCreate.as_view(), name='register'),
url(r'^coffee/', views.coffee_view, name='coffee_view'),
url(r'^baked/', views.baked_view, name='baked'),
url(r'^contacts/', views.contacts_view, name='contacts'),
]
|
from django.contrib import admin
from .models import Blog, Keyword, Reply
class BlogAdmin(admin.ModelAdmin):
list_display = (
'title',
'date',
'image',
)
list_filter = ('date',)
class ReplyAdmin(admin.ModelAdmin):
list_display = (
'blog',
'author',
'date',
)
list_filter = ('date',)
admin.site.register(Blog, BlogAdmin)
admin.site.register(Reply, ReplyAdmin)
admin.site.register(Keyword)
|
import datetime
import numpy as np
import tensorflow as tf
from tensorflow import keras
from auto_forecaster.io import TextForecast, NetCDF
class ForecastRegion:
def __init__(self, name, region_id, spot_range):
"""
Parameters
----------
name: str
the short-form name of this region
region_id: int
an arbitrary region ID for this region
spot_range: list of ForecastPoint
a list of all ForecastPoints for this regions
"""
self.name = name
self.region_id = region_id
self.spot_range = spot_range
# data management
self.data = {}
self.params = []
self.tokeniser = None
# file management
self.total_files = 0
self.files_loaded = 0
self.adjust_total = 0
self.adjust_loaded = 0
self.loader = None
def set_loader(self, loader):
self.loader = loader
def add_spot_to_region(self, spot_point):
"""
Adds a ForecastPoint to this ForecastRegion
Parameters
----------
spot_point: ForecastPoint
the ForecastPoint to add to this region
"""
self.spot_range.append(spot_point)
def calc_total_files(self, params_to_load, mode="train"):
if mode == "test":
data = TextForecast.get_files_from_region(self, "test")
else:
data = TextForecast.get_files_from_region(self, "train")
if params_to_load is None:
# find all parameters available
parameters_to_load = NetCDF.get_all_params(list(data))
self.total_files = len(data) * 36 * len(parameters_to_load)
def process_data(self, mode="train", parameters_to_load=None):
"""
Parameters
----------
mode
parameters_to_load
Returns
-------
"""
if mode == "test":
data = TextForecast.get_files_from_region(self, "test")
else:
data = TextForecast.get_files_from_region(self, "train")
if parameters_to_load is None:
# find all parameters available
parameters_to_load = NetCDF.get_all_params(list(data))
self.params = parameters_to_load
"""print("\n\nLOADING DATA FROM REGION: \033[42m\033[30m\033[52m",
self.name, "\033[0m", end=" ")
for spot in self.spot_range:
print("\033[52m", spot.location_name, "\033[0m", end=" ")
print("\n\nParams:", end=" ")
for params in parameters_to_load:
print("\033[34m\033[52m", params, "\033[0m", end=" ")"""
training_for_tokeniser = []
data_keys = list(data.keys())
for key in range(len(data_keys)):
date_time = data_keys[key]
# input setup
time_to_use = date_time.replace(minute=0)
max_time_to_use = date_time + datetime.timedelta(hours=37)
input_data = NetCDF.open_from_forecast_region(
self, parameters_to_load, [time_to_use, max_time_to_use]
)
# output setup
output_data = TextForecast.open_file(data[date_time])
training_for_tokeniser.append(output_data)
# save into dict
self.data[date_time] = {
"input": input_data,
"output": output_data
}
if not self.tokeniser:
# fit tokeniser on texts
self.tokeniser = keras.preprocessing.text.Tokenizer(
filters='!"#$%&()*+,-/:;<=>?@[\\]^_`{|}~\t\n',
lower=False)
self.tokeniser.fit_on_texts(training_for_tokeniser)
for dates in self.data:
self.data[dates]["output"] = np.asarray(
self.tokeniser.texts_to_sequences(
[self.data[dates]["output"]]
), dtype=np.int
)
|
import numpy as np
import matplotlib.pyplot as plt
from sklearn import svm
def opendata(file):
X = []
Y = []
data = open(file,"r")
for line in data:
x = [float(num) for num in line.split()[1:]]
y = float(line.split()[0])
X.append(x)
Y.append(y)
X = np.array(X)
Y = np.array(Y)
return (X,Y)
(train_X, train_Y) = opendata("./features.train")
(test_X, test_Y) = opendata("./features.test")
train_Y1 = []
for y in train_Y:
if(y == 2):
train_Y1.append(1)
else:
train_Y1.append(-1)
C = [-5, -3, -1, 1, 3]
W = []
for c in C:
clf = svm.SVC(C = pow(10,c), kernel = 'linear')
clf.fit(train_X,train_Y1)
w = clf.coef_[0]
W.append(np.sqrt(np.dot(w,w)))
W = np.array(W)
C = np.array(C)
plt.plot(C,W)
plt.title("$||w||$ vs $log_{10}C$")
plt.xlabel('$log_{10}C$')
plt.ylabel('$||w||$')
plt.show()
|
n1 = float(input('Insira um numero:'))
n2 = float(input('Insira um numero:'))
n3 = float(input('Insira um numero:'))
if n1 > n2 > n3 and n1 > n3:
print('maior:',n1)
print('menor:',n3)
elif n1 > n2 and n1 > n3 > n2:
print('maior:', n1)
print('menor:', n2)
elif n2 > n1 > n3 and n2 > n3:
print('maior:', n2)
print('menor:', n3)
elif n2 > n1 and n2 > n3 > n1:
print('maior:', n2)
print('menor:', n1)
elif n3 > n1 > n2 and n3 > n2:
print('maior:', n3)
print('menor:', n2)
else:
print('maior:', n3)
print('menor:', n1) |
import os
import re
import unittest
from brdm.NcbiBlastData import NcbiBlastData
class TestNcbiBlastData(unittest.TestCase):
@classmethod
def setUpClass(self):
dir_path = os.path.dirname(os.path.realpath(__file__))
self.fixture = NcbiBlastData('{}/test_config.yaml'.format(dir_path))
'''
@classmethod
def tearDownClass(self):
if os.path.exists(self.fixture.destination_dir):
shutil.rmtree(self.fixture.destination_dir)
if os.path.exists(self.fixture.backup_dir):
shutil.rmtree(self.fixture.backup_dir)
pass
'''
def test_1_get_all_file(self):
print('Get ncbi nrnt blast file list...')
folder_url = os.path.join(self.fixture.login_url,
self.fixture.download_folder)
file_list = self.fixture.get_all_file(folder_url)
self.assertGreater(len(file_list), 0, 'File list is empty')
file_match = []
for i in file_list:
nr_nt_re = re.match("n[r|t]", i)
if nr_nt_re:
file_match.append(i)
self.assertEqual(len(file_list), len(file_match),
'Missing some nrnt files')
def test_2_update(self, files=2):
print('Update ncbi nrnt blast...')
success = self.fixture.update(file_number=files)
self.assertTrue(success, 'NCBI nrnt update did not return True.')
def test_3_unzip(self):
print('Unzip ncbi nrnt blast...')
success = self.fixture.unzip()
self.assertTrue(success, 'NCBI nrnt unzip did not return True.')
def test_4_readme(self):
print('Check readme files...')
ncbi_readme = os.path.join(self.fixture.destination_dir,
self.fixture.info_file_name)
self.assertTrue(os.path.isfile(ncbi_readme),
'Cannot find NCBI README file.')
readme_file = os.path.join(self.fixture.destination_dir,
self.fixture.config['readme_file'])
self.assertTrue(os.path.isfile(readme_file),
'Cannot find RDM README+ file.')
def test_5_download(self):
print("Check file download...")
folder_url = os.path.join(self.fixture.login_url,
self.fixture.download_folder)
file_list = self.fixture.get_all_file(folder_url)
start_time = os.path.getctime(self.fixture.destination_dir)
self.fixture.download(download_file_number=2)
end_time = os.path.getctime(self.fixture.destination_dir)
self.assertGreater(end_time, start_time, "No new files downloaded")
directory_list = os.listdir(self.fixture.destination_dir)
download_file_size = 0
self.assertFalse(set(directory_list).isdisjoint(set(file_list)),
'Expected download file not found')
for directory_file in directory_list:
if directory_file in file_list:
download_file_size = os.path.getsize(directory_file)
self.assertGreater(download_file_size, 0,
'Downloaded file is empty')
if __name__ == '__main__':
unittest.main()
|
#
# Bisection search to guess and check from lecture 3. Code
# still break under negative input and numbers from 0 to 1
#
# Define main function
def main():
# Put down some constants and storing variables first
epsilon = 0.01
num_guesses = 0
low = 0
# Prompting user for input and do some initial calculations
cube = int(input('Input cube: '))
high = cube
guess = (high + low)/2.0
# Start guessing with approximation using bisection search algorithm
while abs(guess**3 - cube) >= epsilon:
if guess**3 < cube:
low = guess
else:
high = guess
guess = (high + low)/2.0
num_guesses += 1
# Display result and terminate
print('Total guess time is', num_guesses)
print(guess, 'is close to the cube root of', cube)
return None
# Invoke main
main() |
# Generated by Django 2.0.3 on 2018-03-14 23:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('organization', '0010_teacher_image'),
]
operations = [
migrations.RemoveField(
model_name='courseorg',
name='adress',
),
migrations.AddField(
model_name='courseorg',
name='address',
field=models.CharField(blank=True, max_length=160, null=True, verbose_name='所在地址'),
),
]
|
print("Solo menores a siete")
def porcentaje (a, nt):
p = (a * 100) / nt
return p
def par(numero):
if numero % 2 == 0:
return True
return False
def multiplo(n1, n2):
resto = n1 % n2
return resto == 0
n = int(input("Cargue una secuencia de números. Termina en cero: "))
cont_pares = 0
cont_total = 0
cont_digito = 0
flag_menor = False
while n != 0:
cont_total += 1
if par(n) == True:
cont_pares += 1
digito = n
if n > 10:
digito = n % 10
if digito == 4 or digito == 5:
cont_digito += 1
if multiplo(n, 3):
if flag_menor == False or n < menor:
menor = n
flag_menor = True |
import numpy as np
import math
import time
import numbapro
from numbapro import vectorize
from numba import autojit, jit
from blackscholes import black_scholes
#import logging; logging.getLogger().setLevel(logging.WARNING)
RISKFREE = 0.02
VOLATILITY = 0.30
@jit('f8(f8)')
def normcdf(d):
A1 = 0.31938153
A2 = -0.356563782
A3 = 1.781477937
A4 = -1.821255978
A5 = 1.330274429
RSQRT2PI = 0.39894228040143267793994605993438
K = 1.0 / (1.0 + 0.2316419 * math.fabs(d))
ret_val = (RSQRT2PI * math.exp(-0.5 * d * d) *
(K * (A1 + K * (A2 + K * (A3 + K * (A4 + K * A5))))))
if d > 0:
ret_val = 1.0 - ret_val
return ret_val
@vectorize(['f8(f8,f8,f8,f8,f8)', 'f4(f4,f4,f4,f4,f4)'])
def black_scholes(S, K, T, R, V):
sqrtT = math.sqrt(T)
d1 = (math.log(S / K) + (R + 0.5 * V * V) * T) / (V * sqrtT)
d2 = d1 - V * sqrtT
cndd1 = normcdf(d1)
cndd2 = normcdf(d2)
expRT = math.exp((-1. * R) * T)
callResult = (S * cndd1 - K * expRT * cndd2)
putResult = (K * expRT * (1.0 - cndd2) - S * (1.0 - cndd1))
return callResult
def randfloat(rand_var, low, high):
return (1.0 - rand_var) * low + rand_var * high
def main(*args):
OPT_N = 40000
callResult = np.zeros(OPT_N)
putResult = -np.ones(OPT_N)
stockPrice = randfloat(np.random.random(OPT_N), 5.0, 30.0)
optionStrike = randfloat(np.random.random(OPT_N), 1.0, 100.0)
optionYears = randfloat(np.random.random(OPT_N), 0.25, 10.0)
c = black_scholes(stockPrice, optionStrike, optionYears, RISKFREE, VOLATILITY)
p = None
return c, p
if __name__ == "__main__":
import sys
c,p = main(*sys.argv[1:])
|
from pyspark import SparkContext
from pyspark.sql import SQLContext
from pyspark.sql.types import *
sc = SparkContext("local","Reading_csv")
schema1=StructType([StructField("make",StringType(),True),StructField("fueltype",StringType(),True),StructField("numofdoors",StringType(),True),StructField("bodystyle",StringType(),True),StructField("drivewheels",StringType(),True),StructField("enginelocation",StringType(),True),StructField("wheelbase",FloatType(),True),StructField("length",FloatType(),True),StructField("width",FloatType(),True),StructField("height",FloatType(),True),StructField("curbweight",IntegerType(),True),StructField("enginesize",IntegerType(),True),StructField("horsepower",IntegerType(),True),StructField("peakrpm",IntegerType(),True),StructField("citympg",IntegerType(),True),StructField("highwaympg",IntegerType(),True),StructField("price",IntegerType(),True)])
sqlContext = SQLContext(sc)
file1=sqlContext.read.csv('/Abhilash/cars.csv',header="true",schema=schema1)
file2=file1.limit(8)
file3=file2.select('make','length','width','height')
file3.show()
file4=file3.groupBy('make').min('length')
file4.show()
file5=file3.groupBy('make').max('width')
file5.show()
file6=file5.join(file4,file4.make == file5.make,"inner")
file6.show()
#file4.printSchema()
|
from motive_data_helpers import *
import quaternion as q
import pandas as pd
import os
import matplotlib.pyplot as plt
from scipy.io import savemat
import pdb
def quat_to_matlab(file_q):
data_q = readfile(file_q, 'stop2')
quats = data_q[['Rotation'+k for k in ['W','X','Y','Z']]].values
savemat(file_q[:-3]+'mat', dict(q=quats))
def compare_files(file_e, file_q, description, option):
data_q = readfile(file_q, 'stop2')
if option == 'optitrack':
data_e = readfile(file_e, 'stop2')
data_e['phi'] = data_e['RotationX']
data_e['theta'] = data_e['RotationZ']
data_e['psi'] = -data_e['RotationY']
elif option == 'matlab':
data_e = pd.read_csv(file_e, header=None, names=['phi','theta','psi'])
data_e['Time'] = data_q.Time
data_q['phi'], data_q['theta'], data_q['psi'] = euler2quat(data_q)
# Phi
plt.plot(data_q.Time, data_q.phi)
plt.plot(data_e.Time, data_e.phi)
plt.legend(['from_quat', 'from_euler'])
plt.xlabel('time')
plt.ylabel('$\phi$')
plt.title(description['phi'])
plt.show()
# Theta
plt.plot(data_q.Time, data_q.theta)
plt.plot(data_e.Time, data_e.theta)
plt.legend(['from_quat', 'from_euler'])
plt.xlabel('time')
plt.ylabel('$\\theta$')
plt.title(description['theta'])
plt.show()
# Psi
plt.plot(data_q.Time, data_q.psi)
plt.plot(data_e.Time, data_e.psi)
plt.legend(['from_quat', 'from_euler'])
plt.xlabel('time')
plt.ylabel('$\psi$')
plt.title(description['psi'])
plt.show()
if __name__ == '__main__':
# Case 1
# file_e = os.path.join(os.getcwd(), 'calib_euler/take1_eul.csv')
file_e = os.path.join(os.getcwd(), 'calib_euler/take1_matlab.csv')
file_q = os.path.join(os.getcwd(), 'calib_euler/take1_quat.csv')
description = {
'phi':'roll: +45, +90, -45, -90',
'theta':'pith: +45, +90, -45, -90',
'psi':'yaw: +90, 0, -90, -180, -270, -360'
}
compare_files(file_e, file_q, description,option='matlab')
# case 2
file_q = os.path.join(os.getcwd(), 'calib_euler/take2_quat.csv')
# file_e = os.path.join(os.getcwd(), 'calib_euler/take2_eul.csv')
file_e = os.path.join(os.getcwd(), 'calib_euler/take2_matlab.csv')
description = {
'phi': 'roll: 30, 30, 30 (approx)',
'theta': 'pith: 30, 30, 30 (approx)',
'psi': 'yaw: 30, -30, -30 (approx)'
}
compare_files(file_e, file_q, description, option='matlab') |
import cfg
import user_info
import bl_as_modul
import aiosqlite
from telethon.tl import types
from telethon.tl.types import (
KeyboardButtonRow,
KeyboardButtonCallback,
)
from datetime import date, datetime, timedelta
mdate_db = cfg.user_db # './mdate.db'
bot = bl_as_modul.client
tempAM_but = types.ReplyInlineMarkup(
rows=[
KeyboardButtonRow(buttons=[KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="12", data=b"timeam12"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
]
),
KeyboardButtonRow(buttons=[KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="11", data=b"timeam11"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" 1", data=b"timeam1"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
]
),
KeyboardButtonRow(buttons=[KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="10", data=b"timeam10"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" 2", data=b"timeam2"),
KeyboardButtonCallback(text=" ", data=b"s"),
]
),
KeyboardButtonRow(buttons=[KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" 9", data=b"timeam9"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" 3", data=b"timeam3"),
KeyboardButtonCallback(text=" ", data=b"s"),
]
),
KeyboardButtonRow(buttons=[KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" 8", data=b"timeam8"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" 4", data=b"timeam4"),
KeyboardButtonCallback(text=" ", data=b"s"),
]
),
KeyboardButtonRow(buttons=[KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" 7", data=b"timeam7"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" 5", data=b"timeam5"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
]
),
KeyboardButtonRow(buttons=[KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" 6", data=b"timeam6"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="PM", data=b"timeam_pm"),
]
),
]
)
tempPM_but = types.ReplyInlineMarkup(
rows=[
KeyboardButtonRow(buttons=[KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="24", data=b"timepm24"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
]
),
KeyboardButtonRow(buttons=[KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="23", data=b"timepm23"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="13", data=b"timepm13"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
]
),
KeyboardButtonRow(buttons=[KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="22", data=b"timepm22"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="14", data=b"timepm14"),
KeyboardButtonCallback(text=" ", data=b"s"),
]
),
KeyboardButtonRow(buttons=[KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="21", data=b"timepm21"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="15", data=b"timepm15"),
KeyboardButtonCallback(text=" ", data=b"s"),
]
),
KeyboardButtonRow(buttons=[KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="20", data=b"timepm20"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="16", data=b"timepm16"),
KeyboardButtonCallback(text=" ", data=b"s"),
]
),
KeyboardButtonRow(buttons=[KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="19", data=b"timepm19"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="17", data=b"timepm17"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
]
),
KeyboardButtonRow(buttons=[KeyboardButtonCallback(text="AM", data=b"timepm_am"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="18", data=b"timepm18"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
]
),
]
)
min_but = types.ReplyInlineMarkup(
rows=[
KeyboardButtonRow(buttons=[KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" 0", data=b"timem0"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
]
),
KeyboardButtonRow(buttons=[KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="55", data=b"timem55"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" 5", data=b"timem5"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
]
),
KeyboardButtonRow(buttons=[KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="50", data=b"timem50"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="10", data=b"timem10"),
KeyboardButtonCallback(text=" ", data=b"s"),
]
),
KeyboardButtonRow(buttons=[KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="45", data=b"timem45"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="15", data=b"timem15"),
KeyboardButtonCallback(text=" ", data=b"s"),
]
),
KeyboardButtonRow(buttons=[KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="40", data=b"timem40"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="20", data=b"timem20"),
KeyboardButtonCallback(text=" ", data=b"s"),
]
),
KeyboardButtonRow(buttons=[KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="35", data=b"timem35"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="25", data=b"timem25"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
]
),
KeyboardButtonRow(buttons=[KeyboardButtonCallback(text="Re", data=b"timere"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="30", data=b"timem30"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text=" ", data=b"s"),
KeyboardButtonCallback(text="Ok", data=b"timeok_tm"),
]
),
]
)
async def sl_time(event):
sender = await event.get_sender()
channel = sender.id # 275965108
hr = 25
mnt = 61
if event.data == b"timeam_pm":
await event.edit("PM", buttons=tempPM_but)
elif event.data == b"timepm_am":
await event.edit("AM", buttons=tempAM_but)
elif event.data == b"timere":
await event.edit("pm", buttons=tempPM_but)
elif event.data == b"timeam12":
await event.edit("12 часов. выберете минуты", buttons=min_but)
hr = 12
elif event.data == b"timeam1":
await event.edit("1 час. выберете минуты", buttons=min_but)
hr = 1
elif event.data == b"timeam2":
await event.edit("2 часа. выберете минуты", buttons=min_but)
hr = 2
elif event.data == b"timeam3":
await event.edit("3 часа. выберете минуты", buttons=min_but)
hr = 3
elif event.data == b"timeam4":
await event.edit("4 часа. выберете минуты", buttons=min_but)
hr = 4
elif event.data == b"timeam5":
await event.edit("5 часов. выберете минуты", buttons=min_but)
hr = 5
elif event.data == b"timeam6":
await event.edit("6 часов. выберете минуты", buttons=min_but)
hr = 6
elif event.data == b"timeam7":
await event.edit("7 часов. выберете минуты", buttons=min_but)
hr = 7
elif event.data == b"timeam8":
await event.edit("8 часов. выберете минуты", buttons=min_but)
hr = 8
elif event.data == b"timeam9":
await event.edit("9 часов. выберете минуты", buttons=min_but)
hr = 9
elif event.data == b"timeam10":
await event.edit("10 часов. выберете минуты", buttons=min_but)
hr = 10
elif event.data == b"timeam11":
await event.edit("11 часов. выберете минуты", buttons=min_but)
hr = 11
elif event.data == b"timepm23":
await event.edit("23 часа. выберете минуты", buttons=min_but)
hr = 23
elif event.data == b"timepm24":
await event.edit("24 часа. выберете минуты", buttons=min_but)
hr = 24
elif event.data == b"timepm13":
await event.edit("13 часов. выберете минуты", buttons=min_but)
hr = 13
elif event.data == b"timepm14":
await event.edit("14 часов. выберете минуты", buttons=min_but)
hr = 14
elif event.data == b"timepm15":
await event.edit("15 часов. выберете минуты", buttons=min_but)
hr = 15
elif event.data == b"timepm16":
await event.edit("16 часов. выберете минуты", buttons=min_but)
hr = 16
elif event.data == b"timepm17":
await event.edit("17 часов. выберете минуты", buttons=min_but)
hr = 17
elif event.data == b"timepm18":
await event.edit("18 часов. выберете минуты", buttons=min_but)
hr = 18
elif event.data == b"timepm19":
await event.edit("19 часов. выберете минуты", buttons=min_but)
hr = 19
elif event.data == b"timepm20":
await event.edit("20 часов. выберете минуты", buttons=min_but)
hr = 20
elif event.data == b"timepm21":
await event.edit("21 час. выберете минуты", buttons=min_but)
hr = 21
elif event.data == b"timepm22":
await event.edit("22 часа. выберете минуты", buttons=min_but)
hr = 22
elif event.data == b"timem0":
await event.edit(" 0 минут", buttons=min_but)
mnt = 0
elif event.data == b"timem5":
await event.edit(" 5 минут", buttons=min_but)
mnt = 5
elif event.data == b"timem10":
await event.edit(" 10 минут", buttons=min_but)
mnt = 10
elif event.data == b"timem15":
await event.edit(" 15 минут", buttons=min_but)
mnt = 15
elif event.data == b"timem20":
await event.edit(" 20 минут", buttons=min_but)
mnt = 20
elif event.data == b"timem25":
await event.edit(" 25 минут", buttons=min_but)
mnt = 25
elif event.data == b"timem30":
await event.edit(" 30 минут", buttons=min_but)
mnt = 30
elif event.data == b"timem35":
await event.edit(" 35 минут", buttons=min_but)
mnt = 35
elif event.data == b"timem40":
await event.edit(" 40 минут", buttons=min_but)
mnt = 40
elif event.data == b"timem45":
await event.edit(" 45 минут", buttons=min_but)
mnt = 45
elif event.data == b"timem50":
await event.edit(" 50 минут", buttons=min_but)
mnt = 50
elif event.data == b"timem55":
await event.edit(" 55 минут", buttons=min_but)
mnt = 55
elif event.data == b"timeok_tm":
await event.edit("Данные внесены")
# m = await bot.send_message(channel, 'можно формировать следушее сообщение')
await user_info.snd_chl_s(event)
# break
if not (hr == 25 and mnt == 61):
conn = await user_info.create_connection()
t = await user_info.find_user(conn, channel, '', 1)
await user_info.close_connection(conn)
conn_d = await create_conn_date()
if t[12] == 0:
u = await find_date(conn_d, t[0], t[1], t[3]) # pkanal берем для уникальности, а по Ок меняем на id
else:
u = await find_date(conn_d, t[0], t[1], t[12])
while (True):
try:
if event.data.startswith(b"timeam") or event.data.startswith(b"timepm"):
# bias_hour = datetime.now().hour - datetime.utcnow().hour
# сделать проверку на переход на другие сутки / по мск
if t[4] == 'sch' or t[4].startswith('sch'): #
await update_info(conn_d, u[0], u[1], u[2], u[3], hr, u[5], u[6], u[7], u[8], u[9], u[10],
u[11], u[12], u[13], u[14], 0)
await close_connection_d(conn_d)
elif t[4].startswith('del'):
await update_info(conn_d, u[0], u[1], u[2], u[3], u[4], u[5], u[6], u[7], u[8], hr, u[10],
u[11], u[12], u[13], u[14], 0)
await close_connection_d(conn_d)
break
elif event.data.startswith(b"timem"):
if t[4] == 'sch' or t[4].startswith('sch'): #
await update_info(conn_d, u[0], u[1], u[2], mnt, u[4], u[5], u[6], u[7], u[8], u[9], u[10],
u[11], u[12], u[13], u[14], 0)
await close_connection_d(conn_d)
elif t[4].startswith('del'): # мгновенно и удалить
await update_info(conn_d, u[0], u[1], u[2], u[3], u[4], u[5], u[6], u[7], mnt, u[9], u[10],
u[11], u[12], u[13], u[14], 0)
await close_connection_d(conn_d)
break
except Exception: # Зачем?
# if event.data.startswith(b"timeam") or event.data.startswith(b"timepm"):
# await add_mess_string(event.chat_id, event.chat_id, event.chat_id, hr, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, '')
break
break
sql_create_mess_table = """ CREATE TABLE IF NOT EXISTS messdate (
bot_chat_id integer,
channel_chat_id integer,
unic_mess_id integer,
min_snd integer,
hr_snd integer,
day_snd integer,
month_snd integer,
year_snd integer,
min_del integer,
hr_del integer,
day_del integer,
month_del integer,
year_del integer,
telo text,
but_text
); """
async def create_conn_date():
db = await aiosqlite.connect(mdate_db)
cursor = await db.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='messdate';")
if not await cursor.fetchone():
await cursor.execute(sql_create_mess_table)
return cursor, db
async def find_date(cursor, n0, n1, n2): # поиск юзера
bci = (n0, n1, n2)
await cursor[0].execute('SELECT * FROM messdate WHERE bot_chat_id=? and channel_chat_id=? and unic_mess_id=?', bci)
user_string = await cursor[0].fetchone() # возвращается кортеж
# raise StopPropagation
return user_string
async def add_mess_string(n0, n1, n2, n3, n4, n5, n6, n7, n8, n9, n10, n11, n12, n13, n14):
user = (n0, n1, n2, n3, n4, n5, n6, n7, n8, n9, n10, n11, n12, n13, n14)
async with aiosqlite.connect(mdate_db) as db:
await db.execute('INSERT INTO messdate VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', user)
await db.commit()
async def close_connection_d(conn_d):
await conn_d[0].close()
await conn_d[1].close()
async def update_info(conn_d, n0, n1, n2, n3, n4, n5, n6, n7, n8, n9, n10, n11, n12, n13, n14, dop): # Обновляем данные
user = (n0, n1, n2, n3, n4, n5, n6, n7, n8, n9, n10, n11, n12, n13, n14)
if dop == 0:
del_user = (n0, n1, n2) # во втором входе в базах 9 а приходит id
else:
del_user = (n0, n1, dop)
await conn_d[0].execute('DELETE FROM messdate WHERE bot_chat_id = ? and channel_chat_id = ? and unic_mess_id = ?', del_user)
await conn_d[1].commit()
await conn_d[0].execute('INSERT INTO messdate VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', user)
await conn_d[1].commit() # Сохраняем изменения
async def dmy(result, event): # вводим дату первый вход отложенное сообщение
conn_d = await create_conn_date()
ic = result.timetuple()
sender = await event.get_sender()
channel = sender.id
conn = await user_info.create_connection()
u = await user_info.find_user(conn, channel, '', 1)
await user_info.close_connection(conn)
if u[4] == 'sch' or u[4].startswith('sch'): # отложенное
await add_mess_string(u[0], u[1], u[3], 0, 0, ic.tm_mday, ic.tm_mon, ic.tm_year, 0, 0, 0, 0, 0, u[5], u[11])
elif u[4].startswith('del'): # мгновенно и удалить
if u[12] != 0:
t = await find_date(conn_d, u[0], u[1], u[12])
else:
t = await find_date(conn_d, u[0], u[1], u[3]) # тут ошибка
try:
await update_info(conn_d, t[0], t[1], t[2], t[3], t[4], t[5], t[6], t[7], t[8], t[9],
ic.tm_mday, ic.tm_mon, ic.tm_year, t[13], t[14], 0)
except Exception:
await add_mess_string(u[0], u[1], u[3], 0, 0, 0, 0, 0, 0, 0, ic.tm_mday, ic.tm_mon, ic.tm_year,
u[5], u[11])
await close_connection_d(conn_d)
async def un_mes(event, rec, msg_id): # вводим уникальный id сообщния
conn_d = await create_conn_date()
u = await find_date(conn_d, event.chat_id, event.chat_id, event.chat_id)
await update_info(conn_d, event.chat_id, rec, msg_id, u[3], u[4], u[5], u[6], u[7], u[8], u[9], u[10], u[11], u[12],
u[13], u[14], u[2])
|
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 11 09:39:40 2017
@author: MichaelEK
"""
from shapely.geometry import Point, LineString, Polygon
def getPolyCoords(row, coord_type, geom='geometry'):
"""Returns the coordinates ('x' or 'y') of edges of a Polygon exterior"""
# Parse the exterior of the coordinate
exterior = row[geom].exterior
if coord_type == 'x':
# Get the x coordinates of the exterior
return list(exterior.coords.xy[0])
elif coord_type == 'y':
# Get the y coordinates of the exterior
return list(exterior.coords.xy[1])
def getPointCoords(row, coord_type, geom='geometry'):
"""Calculates coordinates ('x' or 'y') of a Point geometry"""
if coord_type == 'x':
return row[geom].x
elif coord_type == 'y':
return row[geom].y
def getLineCoords(row, coord_type, geom='geometry'):
"""Returns a list of coordinates ('x' or 'y') of a LineString geometry"""
if coord_type == 'x':
return list(row[geom].coords.xy[0])
elif coord_type == 'y':
return list(row[geom].coords.xy[1])
def getCoords(gdf):
"""
Function to put in x and y coordinates for Bokeh plotting from geodataframes of points, lines, or polygons.
"""
gdf1 = gdf.copy()
x_all = []
y_all = []
for i in gdf1.index:
geo1 = gdf1.geometry[i]
if isinstance(geo1, Point):
x = geo1.x
y = geo1.y
elif isinstance(geo1, LineString):
x = list(geo1.coords.xy[0])
y = list(geo1.coords.xy[1])
elif isinstance(geo1, Polygon):
x = list(geo1.exterior.coords.xy[0])
y = list(geo1.exterior.coords.xy[1])
else:
raise TypeError('Index ' + str(i) + ' is not a shapely Point, LineString, or Polygon')
x_all.append(x)
y_all.append(y)
gdf1.loc[:, 'x'] = x_all
gdf1.loc[:, 'y'] = y_all
return(gdf1)
|
#Funcion que te dira el maximo de dos numeros
def max(A,B):
if A>B:
return A
else:
return B
A = int(input("Ingrese el primer numero: "))
B = int(input("Ingrese el segundo numero: " ))
print (max(A,B))
|
import os
import sys
sys.path.append("./")
import hashlib
import requests
import numpy as np
from utils import pdf_utils
from search_engines import common
from html.parser import HTMLParser
from enum import Enum
class Paper(Enum):
TITLE = 3
LINK = 6
class MyHTMLParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.parsing_paper = False
self.papers_titles = []
self.papers_links = []
self.first_word = []
self.counter = 0
self.skip_enabled = False
self.link_cnt = 0
self.title_cnt = 0
def handle_starttag(self, tag, attrs):
if tag == "dt":
print("New Paper starts\n--------------------------")
self.parsing_paper = True
self.counter = 0
elif tag == "form":
self.skip_enabled = True
return
elif self.skip_enabled:
return
# elif tag == 'dd':
print("{}: Encountered a start tag:".format(self.counter + 1), tag, attrs)
self.counter += 1
if (
self.parsing_paper
and len(attrs) == 1
and attrs[0][0] == "href"
and self.counter == Paper.LINK.value
):
link = attrs[0][1]
self.papers_links.append(link)
if len(self.papers_links) != len(self.papers_titles):
print(self.papers_titles[-1], self.papers_titles[-2])
print("That should not happen")
# if not self.first_word[-1] in link:
# print(self.first_word[-1],self.papers_titles[-1])
def handle_endtag(self, tag):
if tag == "form":
self.skip_enabled = False
return
elif self.skip_enabled:
return
print("Encountered an end tag :", tag)
def handle_data(self, data):
if self.skip_enabled:
return
print("Encountered some data :", data)
if self.parsing_paper and len(data) > 3 and self.counter == Paper.TITLE.value:
pre_len = len(self.papers_titles)
if len(self.papers_titles) == len(self.papers_links):
self.papers_titles.append(data)
else:
self.papers_titles[-1] += data
# self.first_word.append(data.split()[0])
# post_len = len(self.papers_titles)
# if post_len != pre_len + 1:
# print(data)
def read_papers(openaccess_url):
response = requests.get(openaccess_url)
data = response.content
# print(data)
parser = MyHTMLParser()
parser.feed(str(data))
np.argmin([len(paper) for paper in parser.papers_titles])
print(parser.papers_titles)
print(
"found {} paper title and {} paper links".format(
len(parser.papers_titles), len(parser.papers_links)
)
)
if len(parser.papers_titles) != len(parser.papers_links):
print("Something is wrong")
quit()
import pandas as pd
saved_papers = []
papers_hash = []
for paper_title, paper_link in zip(parser.papers_titles, parser.papers_links):
title_hash = hashlib.sha1(paper_title.encode("utf-8")).hexdigest()
papers_hash.append(title_hash)
tmp_save_dir = "./tmp"
if not os.path.exists(tmp_save_dir):
os.makedirs(tmp_save_dir)
save_filepath = "{}/{}.pdf".format(tmp_save_dir, title_hash)
saved_papers.append(save_filepath)
if os.path.exists(save_filepath):
continue
common.download_pdf(
"http://openaccess.thecvf.com/{}".format(paper_link), save_filepath
)
csv_data = {
"Title": parser.papers_titles,
"Link": parser.papers_links,
"Name": papers_hash,
}
df = pd.DataFrame.from_dict(csv_data)
df.to_csv("./papers_iccv2021.csv")
output_pdf = "./iccv_2021.pdf"
pdf_utils.merge_files(saved_papers, output_pdf)
if __name__ == "__main__":
read_papers("https://openaccess.thecvf.com/ICCV2021?day=all")
|
# Copyright (C) 2009 Internet Systems Consortium.
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""This module holds the BindCmdInterpreter class. This provides the
core functionality for bundyctl. It maintains a session with
bundy-cmdctl, holds local configuration and module information, and
handles command line interface commands"""
import sys
from cmd import Cmd
from bundyctl.exception import *
from bundyctl.moduleinfo import *
from bundyctl.cmdparse import BindCmdParser
from bundyctl import command_sets
from xml.dom import minidom
import bundy.config
import bundy.cc.data
import http.client
import json
import inspect
import pprint
import ssl, socket
import os, time, random, re
import os.path
import getpass
from hashlib import sha1
import csv
import pwd
import getpass
import copy
import errno
try:
from collections import OrderedDict
except ImportError:
from bundyctl.mycollections import OrderedDict
# if we have readline support, use that, otherwise use normal stdio
try:
import readline
# Only consider spaces as word boundaries; identifiers can contain
# '/' and '[]', and configuration item names can in theory use any
# printable character. See the discussion in tickets #1345 and
# #2254 for more information.
readline.set_completer_delims(' ')
my_readline = readline.get_line_buffer
except ImportError:
my_readline = sys.stdin.readline
# Used for tab-completion of 'identifiers' (i.e. config values)
# If a command parameter has this name, the tab completion hints
# are derived from config data
CFGITEM_IDENTIFIER_PARAM = 'identifier'
CSV_FILE_NAME = 'default_user.csv'
CONFIG_MODULE_NAME = 'config'
CONST_BINDCTL_HELP = """
usage: <module name> <command name> [param1 = value1 [, param2 = value2]]
Type Tab character to get the hint of module/command/parameters.
Type \"help(? h)\" for help on bundyctl.
Type \"<module_name> help\" for help on the specific module.
Type \"<module_name> <command_name> help\" for help on the specific command.
\nAvailable module names: """
class ValidatedHTTPSConnection(http.client.HTTPSConnection):
'''Overrides HTTPSConnection to support certification
validation. '''
def __init__(self, host, ca_certs):
http.client.HTTPSConnection.__init__(self, host)
self.ca_certs = ca_certs
def connect(self):
''' Overrides the connect() so that we do
certificate validation. '''
sock = socket.create_connection((self.host, self.port),
self.timeout)
if self._tunnel_host:
self.sock = sock
self._tunnel()
req_cert = ssl.CERT_NONE
if self.ca_certs:
req_cert = ssl.CERT_REQUIRED
self.sock = ssl.wrap_socket(sock, self.key_file,
self.cert_file,
cert_reqs=req_cert,
ca_certs=self.ca_certs)
class BindCmdInterpreter(Cmd):
"""simple bundyctl example."""
def __init__(self, server_port='localhost:8080', pem_file=None,
csv_file_dir=None):
Cmd.__init__(self)
self.location = ""
self.prompt_end = '> '
if sys.stdin.isatty():
self.prompt = self.prompt_end
else:
self.prompt = ""
self.ruler = '-'
self.modules = OrderedDict()
self.add_module_info(ModuleInfo("help", desc = "Get help for bundyctl."))
self.server_port = server_port
self.conn = ValidatedHTTPSConnection(self.server_port,
ca_certs=pem_file)
self.session_id = self._get_session_id()
self.config_data = None
if csv_file_dir is not None:
self.csv_file_dir = csv_file_dir
else:
self.csv_file_dir = pwd.getpwnam(getpass.getuser()).pw_dir + \
os.sep + '.bundy' + os.sep
def _print(self, *args):
'''Simple wrapper around calls to print that can be overridden in
unit tests.'''
print(*args)
def _get_session_id(self):
'''Generate one session id for the connection. '''
rand = os.urandom(16)
now = time.time()
session_id = sha1(("%s%s%s" %(rand, now,
socket.gethostname())).encode())
digest = session_id.hexdigest()
return digest
def run(self):
'''Parse commands from user and send them to cmdctl.'''
# Show helper warning about a well known issue. We only do this
# when stdin is attached to a terminal, because otherwise it doesn't
# matter and is just noisy, and could even be harmful if the output
# is processed by a script that expects a specific format.
if my_readline == sys.stdin.readline and sys.stdin.isatty():
sys.stdout.write("""\
WARNING: The Python readline module isn't available, so some command line
editing features (including command history management) will not
work. See the BUNDY guide for more details.\n\n""")
try:
if not self.login_to_cmdctl():
return 1
self.cmdloop()
self._print('\nExit from bundyctl')
return 0
except FailToLogin as err:
# error already printed when this was raised, ignoring
return 1
except KeyboardInterrupt:
self._print('\nExit from bundyctl')
return 0
except socket.error as err:
self._print('Failed to send request, the connection is closed')
return 1
except http.client.CannotSendRequest:
self._print('Can not send request, the connection is busy')
return 1
def _get_saved_user_info(self, dir, file_name):
''' Read all the available username and password pairs saved in
file(path is "dir + file_name"), Return value is one list of elements
['name', 'password'], If get information failed, empty list will be
returned.'''
if (not dir) or (not os.path.exists(dir)):
return []
try:
csvfile = None
users = []
csvfile = open(dir + file_name)
users_info = csv.reader(csvfile)
for row in users_info:
users.append([row[0], row[1]])
except (IOError, IndexError) as err:
self._print("Error reading saved username and password "
"from %s%s: %s" % (dir, file_name, err))
finally:
if csvfile:
csvfile.close()
return users
def _save_user_info(self, username, passwd, dir, file_name):
''' Save username and password in file "dir + file_name"
If it's saved properly, return True, or else return False. '''
try:
if not os.path.exists(dir):
os.mkdir(dir, 0o700)
csvfilepath = dir + file_name
csvfile = open(csvfilepath, 'w')
os.chmod(csvfilepath, 0o600)
writer = csv.writer(csvfile)
writer.writerow([username, passwd])
csvfile.close()
except IOError as err:
self._print("Error saving user information:", err)
self._print("user info file name: %s%s" % (dir, file_name))
return False
return True
def _try_login(self, username, password):
'''
Attempts to log into cmdctl by sending a POST with the given
username and password. On success of the POST (not the login,
but the network operation), it returns a tuple (response, data).
We check for some failures such as SSL errors and socket errors
which could happen due to the environment in which BUNDY runs.
On failure, it raises a FailToLogin exception and prints some
information on the failure. This call is essentially 'private',
but made 'protected' for easier testing.
'''
param = {'username': username, 'password' : password}
try:
response = self.send_POST('/login', param)
data = response.read().decode()
# return here (will raise error after try block)
return (response, data)
except (ssl.SSLError, socket.error) as err:
self._print('Error while sending login information:', err)
pass
raise FailToLogin()
def login_to_cmdctl(self):
'''Login to cmdctl with the username and password given by
the user. After the login is sucessful, the username and
password will be saved in 'default_user.csv', when run the next
time, username and password saved in 'default_user.csv' will be
used first.
'''
# Look at existing username/password combinations and try to log in
users = self._get_saved_user_info(self.csv_file_dir, CSV_FILE_NAME)
for row in users:
response, data = self._try_login(row[0], row[1])
if response.status == http.client.OK:
# Is interactive?
if sys.stdin.isatty():
self._print(data + ' login as ' + row[0])
return True
# No valid logins were found, prompt the user for a username/password
count = 0
if not os.path.exists(self.csv_file_dir + CSV_FILE_NAME):
self._print('\nNo stored password file found.\n\n'
'When the system is first set up you need to create '
'at least one user account.\n'
'For information on how to set up a BUNDY system, '
'please check see the\n'
'BUNDY Guide: \n\n'
'http://bundy.bundy.org/docs/bundy-guide.html#quick-start-auth-dns\n\n'
'If a user account has been set up, please check the '
'bundy-cmdctl log for other\n'
'information.\n')
else:
self._print('Login failed: either the user name or password is '
'invalid.\n')
# If this was not an interactive session do not prompt for login info.
if not sys.stdin.isatty():
return False
while True:
count = count + 1
if count > 3:
self._print("Too many authentication failures")
return False
username = input("Username: ")
passwd = getpass.getpass()
response, data = self._try_login(username, passwd)
self._print(data)
if response.status == http.client.OK:
self._save_user_info(username, passwd, self.csv_file_dir,
CSV_FILE_NAME)
return True
def _update_commands(self):
'''Update the commands of all modules. '''
for module_name in self.config_data.get_config_item_list():
self._prepare_module_commands(self.config_data.get_module_spec(module_name))
def _send_message(self, url, body):
headers = {"cookie" : self.session_id}
self.conn.request('GET', url, body, headers)
res = self.conn.getresponse()
return res.status, res.read()
def send_GET(self, url, body = None):
'''Send GET request to cmdctl, session id is send with the name
'cookie' in header.
'''
status, reply_msg = self._send_message(url, body)
if status == http.client.UNAUTHORIZED:
if self.login_to_cmdctl():
# successful, so try send again
status, reply_msg = self._send_message(url, body)
if reply_msg:
return json.loads(reply_msg.decode())
else:
return {}
def send_POST(self, url, post_param=None):
'''Send POST request to cmdctl, session id is send with the name
'cookie' in header.
Format: /module_name/command_name
parameters of command is encoded as a map
'''
param = None
if post_param is not None and len(post_param) != 0:
param = json.dumps(post_param)
headers = {"cookie" : self.session_id}
self.conn.request('POST', url, param, headers)
return self.conn.getresponse()
def _update_all_modules_info(self):
''' Get all modules' information from cmdctl, including
specification file and configuration data. This function
should be called before interpreting command line or complete-key
is entered. This may not be the best way to keep bundyctl
and cmdctl share same modules information, but it works.'''
if self.config_data is not None:
self.config_data.update_specs_and_config()
else:
self.config_data = bundy.config.UIModuleCCSession(self)
self._update_commands()
def precmd(self, line):
if line != 'EOF':
self._update_all_modules_info()
return line
def postcmd(self, stop, line):
'''Update the prompt after every command, but only if we
have a tty as output'''
if sys.stdin.isatty():
self.prompt = self.location + self.prompt_end
return stop
def _prepare_module_commands(self, module_spec):
'''Prepare the module commands'''
module = ModuleInfo(name = module_spec.get_module_name(),
desc = module_spec.get_module_description())
for command in module_spec.get_commands_spec():
cmd = CommandInfo(name = command["command_name"],
desc = command["command_description"])
for arg in command["command_args"]:
param = ParamInfo(name = arg["item_name"],
type = arg["item_type"],
optional = bool(arg["item_optional"]),
param_spec = arg)
if ("item_default" in arg):
param.default = arg["item_default"]
if ("item_description" in arg):
param.desc = arg["item_description"]
cmd.add_param(param)
module.add_command(cmd)
self.add_module_info(module)
def _validate_cmd(self, cmd):
'''validate the parameters and merge some parameters together,
merge algorithm is based on the command line syntax, later, if
a better command line syntax come out, this function should be
updated first.
'''
if not cmd.module in self.modules:
raise CmdUnknownModuleSyntaxError(cmd.module)
module_info = self.modules[cmd.module]
if not module_info.has_command_with_name(cmd.command):
raise CmdUnknownCmdSyntaxError(cmd.module, cmd.command)
command_info = module_info.get_command_with_name(cmd.command)
manda_params = command_info.get_mandatory_param_names()
all_params = command_info.get_param_names()
# If help is entered, don't do further parameter validation.
for val in cmd.params.keys():
if val == "help":
return
params = cmd.params.copy()
if not params and manda_params:
raise CmdMissParamSyntaxError(cmd.module, cmd.command, manda_params[0])
elif params and not all_params:
raise CmdUnknownParamSyntaxError(cmd.module, cmd.command,
list(params.keys())[0])
elif params:
param_name = None
param_count = len(params)
for name in params:
# either the name of the parameter must be known, or
# the 'name' must be an integer (ie. the position of
# an unnamed argument
if type(name) == int:
# lump all extraneous arguments together as one big final one
# todo: check if last param type is a string?
while (param_count > 2 and
param_count > len(command_info.params) - 1):
params[param_count - 2] += " " + params[param_count - 1]
del(params[param_count - 1])
param_count = len(params)
cmd.params = params.copy()
# (-1, help is always in the all_params list)
if name >= len(all_params) - 1:
# add to last known param
if param_name:
cmd.params[param_name] += cmd.params[name]
else:
raise CmdUnknownParamSyntaxError(cmd.module, cmd.command, cmd.params[name])
else:
# replace the numbered items by named items
param_name = command_info.get_param_name_by_position(name, param_count)
cmd.params[param_name] = cmd.params[name]
del cmd.params[name]
elif not name in all_params:
raise CmdUnknownParamSyntaxError(cmd.module, cmd.command, name)
param_nr = 0
for name in manda_params:
if not name in params and not param_nr in params:
raise CmdMissParamSyntaxError(cmd.module, cmd.command, name)
param_nr += 1
# Convert parameter value according to parameter spec
# file. Ignore check for commands belonging to module 'config'
# or 'execute'.
if cmd.module != CONFIG_MODULE_NAME and\
cmd.module != command_sets.EXECUTE_MODULE_NAME:
for param_name in cmd.params:
param_spec = command_info.get_param_with_name(param_name).param_spec
try:
cmd.params[param_name] = bundy.config.config_data.convert_type(param_spec, cmd.params[param_name])
except bundy.cc.data.DataTypeError as e:
raise bundy.cc.data.DataTypeError('Invalid parameter value for \"%s\", the type should be \"%s\" \n'
% (param_name, param_spec['item_type']) + str(e))
def _handle_cmd(self, cmd):
'''Handle a command entered by the user'''
if cmd.command == "help" or ("help" in cmd.params.keys()):
self._handle_help(cmd)
elif cmd.module == CONFIG_MODULE_NAME:
self.apply_config_cmd(cmd)
elif cmd.module == command_sets.EXECUTE_MODULE_NAME:
self.apply_execute_cmd(cmd)
else:
self.apply_cmd(cmd)
def add_module_info(self, module_info):
'''Add the information about one module'''
self.modules[module_info.name] = module_info
def get_module_names(self):
'''Return the names of all known modules'''
return list(self.modules.keys())
#override methods in cmd
def default(self, line):
self._parse_cmd(line)
def emptyline(self):
pass
def do_help(self, name):
self._print(CONST_BINDCTL_HELP)
for k in self.modules.values():
n = k.get_name()
if len(n) >= CONST_BINDCTL_HELP_INDENT_WIDTH:
self._print(" %s" % n)
self._print(textwrap.fill(k.get_desc(),
initial_indent=" ",
subsequent_indent=" " +
" " * CONST_BINDCTL_HELP_INDENT_WIDTH,
width=70))
else:
self._print(textwrap.fill("%s%s%s" %
(k.get_name(),
" "*(CONST_BINDCTL_HELP_INDENT_WIDTH -
len(k.get_name())),
k.get_desc()),
initial_indent=" ",
subsequent_indent=" " +
" " * CONST_BINDCTL_HELP_INDENT_WIDTH,
width=70))
def onecmd(self, line):
if line == 'EOF' or line.lower() == "quit":
self.conn.close()
return True
if line == 'h':
line = 'help'
Cmd.onecmd(self, line)
def _get_identifier_startswith(self, id_text):
"""Return the tab-completion hints for identifiers starting with
id_text.
Parameters:
id_text (string): the currently entered identifier part, which
is to be completed.
"""
# Strip starting "/" from id_text
if id_text.startswith('/'):
id_text = id_text[1:]
# Get all items from the given module (up to the first /)
list = self.config_data.get_config_item_list(
id_text.rpartition("/")[0], recurse=True)
# filter out all possibilities that don't match currently entered
# text part
hints = [val for val in list if val.startswith(id_text)]
return hints
def _cmd_has_identifier_param(self, cmd):
"""
Returns True if the given (parsed) command is known and has a
parameter which points to a config data identifier
Parameters:
cmd (cmdparse.BindCmdParser): command context, including given params
"""
if cmd.module not in self.modules:
return False
command = self.modules[cmd.module].get_command_with_name(cmd.command)
return command.has_param_with_name(CFGITEM_IDENTIFIER_PARAM)
def complete(self, text, state):
"""
Returns tab-completion hints. See the python documentation of the
readline and Cmd modules for more information.
The first time this is called (within one 'completer' action), it
has state 0, and a list of possible completions is made. This list
is stored; complete() will then be called with increasing values of
state, until it returns None. For each call it returns the state'th
element of the hints it collected in the first call.
The hints list contents depend on which part of the full command
line; if no module is given yet, it will list all modules. If a
module is given, but no command, it will complete with module
commands. If both have been given, it will create the hints based on
the command parameters.
If module and command have already been specified, and the command
has a parameter 'identifier', the configuration data is used to
create the hints list.
Parameters:
text (string): The text entered so far in the 'current' part of
the command (module, command, parameters)
state (int): state used in the readline tab-completion logic;
0 on first call, increasing by one until there are
no (more) hints to return.
Returns the string value of the hints list with index 'state',
or None if no (more) hints are available.
"""
if state == 0:
self._update_all_modules_info()
text = text.strip()
hints = []
cur_line = my_readline()
try:
cmd = BindCmdParser(cur_line)
if not cmd.params and text:
hints = self._get_command_startswith(cmd.module, text)
elif self._cmd_has_identifier_param(cmd):
# If the command has an argument that is a configuration
# identifier (currently, this is only a subset of
# the config commands), then don't tab-complete with
# hints derived from command parameters, but from
# possible configuration identifiers.
#
# This solves the issue reported in #2254, where
# there were hints such as 'argument' and 'identifier'.
#
# Since they are replaced, the tab-completion no longer
# adds 'help' as an option (but it still works)
#
# Also, currently, tab-completion does not work
# together with 'config go' (it does not take 'current
# position' into account). But config go currently has
# problems by itself, unrelated to completion.
hints = self._get_identifier_startswith(text)
else:
hints = self._get_param_startswith(cmd.module, cmd.command,
text)
except CmdModuleNameFormatError:
if not text:
hints = self.get_module_names()
except CmdMissCommandNameFormatError as e:
if not text.strip(): # command name is empty
hints = self.modules[e.module].get_command_names()
else:
hints = self._get_module_startswith(text)
except CmdCommandNameFormatError as e:
if e.module in self.modules:
hints = self._get_command_startswith(e.module, text)
except CmdParamFormatError as e:
hints = self._get_param_startswith(e.module, e.command, text)
except BindCtlException:
hints = []
self.hint = hints
if state < len(self.hint):
return self.hint[state]
else:
return None
def _get_module_startswith(self, text):
return [module
for module in self.modules
if module.startswith(text)]
def _get_command_startswith(self, module, text):
if module in self.modules:
return [command
for command in self.modules[module].get_command_names()
if command.startswith(text)]
return []
def _get_param_startswith(self, module, command, text):
if module in self.modules:
module_info = self.modules[module]
if command in module_info.get_command_names():
cmd_info = module_info.get_command_with_name(command)
params = cmd_info.get_param_names()
hint = []
if text:
hint = [val for val in params if val.startswith(text)]
else:
hint = list(params)
if len(hint) == 1 and hint[0] != "help":
hint[0] = hint[0] + " ="
return hint
return []
def _parse_cmd(self, line):
try:
cmd = BindCmdParser(line)
self._validate_cmd(cmd)
self._handle_cmd(cmd)
except (IOError, http.client.HTTPException) as err:
self._print('Error: ', err)
except BindCtlException as err:
self._print("Error! ", err)
self._print_correct_usage(err)
except bundy.cc.data.DataTypeError as err:
self._print("Error! ", err)
except bundy.cc.data.DataTypeError as dte:
self._print("Error: " + str(dte))
except bundy.cc.data.DataNotFoundError as dnfe:
self._print("Error: " + str(dnfe))
except bundy.cc.data.DataAlreadyPresentError as dape:
self._print("Error: " + str(dape))
except KeyError as ke:
self._print("Error: missing " + str(ke))
def _print_correct_usage(self, ept):
if isinstance(ept, CmdUnknownModuleSyntaxError):
self.do_help(None)
elif isinstance(ept, CmdUnknownCmdSyntaxError):
self.modules[ept.module].module_help()
elif isinstance(ept, CmdMissParamSyntaxError) or \
isinstance(ept, CmdUnknownParamSyntaxError):
self.modules[ept.module].command_help(ept.command)
def _append_space_to_hint(self):
"""Append one space at the end of complete hint."""
self.hint = [(val + " ") for val in self.hint]
def _handle_help(self, cmd):
if cmd.command == "help":
self.modules[cmd.module].module_help()
else:
self.modules[cmd.module].command_help(cmd.command)
def apply_config_cmd(self, cmd):
'''Handles a configuration command.
Raises a DataTypeError if a wrong value is set.
Raises a DataNotFoundError if a wrong identifier is used.
Raises a KeyError if the command was not complete
'''
identifier = self.location
if 'identifier' in cmd.params:
if not identifier.endswith("/"):
identifier += "/"
if cmd.params['identifier'].startswith("/"):
identifier = cmd.params['identifier']
else:
if cmd.params['identifier'].startswith('['):
identifier = identifier[:-1]
identifier += cmd.params['identifier']
# Check if the module is known; for unknown modules
# we currently deny setting preferences, as we have
# no way yet to determine if they are ok.
module_name = identifier.split('/')[1]
if module_name != "" and (self.config_data is None or \
not self.config_data.have_specification(module_name)):
self._print("Error: Module '" + module_name +
"' unknown or not running")
return
if cmd.command == "show":
# check if we have the 'all' argument
show_all = False
if 'argument' in cmd.params:
if cmd.params['argument'] == 'all':
show_all = True
elif 'identifier' not in cmd.params:
# no 'all', no identifier, assume this is the
#identifier
identifier += cmd.params['argument']
else:
self._print("Error: unknown argument " +
cmd.params['argument'] +
", or multiple identifiers given")
return
values = self.config_data.get_value_maps(identifier, show_all)
for value_map in values:
line = value_map['name']
if value_map['type'] in [ 'module', 'map' ]:
line += "/"
elif value_map['type'] == 'list' \
and value_map['value'] != []:
# do not print content of non-empty lists if
# we have more data to show
line += "/"
else:
# if type is named_set, don't print value if None
# (it is either {} meaning empty, or None, meaning
# there actually is data, but not to be shown with
# the current command
if value_map['type'] == 'named_set' and\
value_map['value'] is None:
line += "/\t"
else:
line += "\t" + json.dumps(value_map['value'])
line += "\t" + value_map['type']
line += "\t"
if value_map['default']:
line += "(default)"
if value_map['modified']:
line += "(modified)"
self._print(line)
elif cmd.command == "show_json":
if identifier == "":
self._print("Need at least the module to show the "
"configuration in JSON format")
else:
data, default = self.config_data.get_value(identifier)
self._print(json.dumps(data))
elif cmd.command == "add":
self.config_data.add_value(identifier,
cmd.params.get('value_or_name'),
cmd.params.get('value_for_set'))
elif cmd.command == "remove":
if 'value' in cmd.params:
self.config_data.remove_value(identifier, cmd.params['value'])
else:
self.config_data.remove_value(identifier, None)
elif cmd.command == "set":
if 'identifier' not in cmd.params:
self._print("Error: missing identifier or value")
else:
parsed_value = None
try:
parsed_value = json.loads(cmd.params['value'])
except Exception as exc:
# ok could be an unquoted string, interpret as such
parsed_value = cmd.params['value']
self.config_data.set_value(identifier, parsed_value)
elif cmd.command == "unset":
self.config_data.unset(identifier)
elif cmd.command == "revert":
self.config_data.clear_local_changes()
elif cmd.command == "commit":
try:
self.config_data.commit()
except bundy.config.ModuleCCSessionError as mcse:
self._print(str(mcse))
elif cmd.command == "diff":
self._print(self.config_data.get_local_changes())
elif cmd.command == "go":
self.go(identifier)
def go(self, identifier):
'''Handles the config go command, change the 'current' location
within the configuration tree. '..' will be interpreted as
'up one level'.'''
id_parts = bundy.cc.data.split_identifier(identifier)
new_location = ""
for id_part in id_parts:
if (id_part == ".."):
# go 'up' one level
new_location, a, b = new_location.rpartition("/")
else:
new_location += "/" + id_part
# check if exists, if not, revert and error
v,d = self.config_data.get_value(new_location)
if v is None:
self._print("Error: " + identifier + " not found")
return
self.location = new_location
def apply_execute_cmd(self, command):
'''Handles the 'execute' command, which executes a number of
(preset) statements. The command set to execute is either
read from a file (e.g. 'execute file <file>'.) or one
of the sets as defined in command_sets.py'''
if command.command == 'file':
try:
with open(command.params['filename']) as command_file:
commands = command_file.readlines()
except IOError as ioe:
self._print("Error: " + str(ioe))
return
elif command_sets.has_command_set(command.command):
commands = command_sets.get_commands(command.command)
else:
# Should not be reachable; parser should've caught this
raise Exception("Unknown execute command type " + command.command)
# We have our set of commands now, depending on whether 'show' was
# specified, show or execute them
if 'show' in command.params and command.params['show'] == 'show':
self.__show_execute_commands(commands)
else:
self.__apply_execute_commands(commands)
def __show_execute_commands(self, commands):
'''Prints the command list without executing them'''
for line in commands:
self._print(line.strip())
def __apply_execute_commands(self, commands):
'''Applies the configuration commands from the given iterator.
This is the method that catches, comments, echo statements, and
other directives. All commands not filtered by this method are
interpreted as if they are directly entered in an active session.
Lines starting with any of the following characters are not
passed directly:
# - These are comments
! - These are directives
!echo: print the rest of the line
!verbose on/off: print the commands themselves too
Unknown directives are ignored (with a warning)
The execution is stopped if there are any errors.
'''
verbose = False
try:
for line in commands:
line = line.strip()
if verbose:
self._print(line)
if line.startswith('#') or len(line) == 0:
continue
elif line.startswith('!'):
if re.match('^!echo ', line, re.I) and len(line) > 6:
self._print(line[6:])
elif re.match('^!verbose\s+on\s*$', line, re.I):
verbose = True
elif re.match('^!verbose\s+off$', line, re.I):
verbose = False
else:
self._print("Warning: ignoring unknown directive: " +
line)
else:
cmd = BindCmdParser(line)
self._validate_cmd(cmd)
self._handle_cmd(cmd)
except (bundy.config.ModuleCCSessionError,
IOError, http.client.HTTPException,
BindCtlException, bundy.cc.data.DataTypeError,
bundy.cc.data.DataNotFoundError,
bundy.cc.data.DataAlreadyPresentError,
KeyError) as err:
self._print('Error: ', err)
self._print()
self._print('Depending on the contents of the script, and which')
self._print('commands it has called, there can be committed and')
self._print('local changes. It is advised to check your settings')
self._print(', and revert local changes with "config revert".')
def apply_cmd(self, cmd):
'''Handles a general module command'''
url = '/' + cmd.module + '/' + cmd.command
cmd_params = None
if (len(cmd.params) != 0):
cmd_params = json.dumps(cmd.params)
reply = self.send_POST(url, cmd.params)
data = reply.read().decode()
# The reply is a string containing JSON data,
# parse it, then prettyprint
if data != "" and data != "{}":
self._print(json.dumps(json.loads(data), sort_keys=True,
indent=4))
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_bcrypt import Bcrypt
from flask_login import LoginManager
from flask_mail import Mail
import json
app = Flask(__name__, template_folder='templates')
app.config['SECRET_KEY'] = '7976b37fadec64fb4ceae186f7533ec4'
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db'
db = SQLAlchemy(app)
bcrypt = Bcrypt(app)
login_manager = LoginManager(app)
app.config['MAIL_SERVER'] = 'smtp.googlemail.com'
app.config['MAIL_PORT'] = 587
app.config['MAIL_USE_TLS'] = True
app.config['MAIL_USERNAME'] = 'foodbytesverify@gmail.com'
app.config['MAIL_PASSWORD'] = 'Admin123@'
mail = Mail(app)
from foodapp import routes |
#!/usr/bin/env python
import datetime
import qwiic_ccs811
import time
import sys
def runExample():
mySensor = qwiic_ccs811.QwiicCcs811()
if mySensor.connected == False:
print("The Qwiic CCS811 device isn't connected to the system. Please check your connection", \
file=sys.stderr)
return
mySensor.begin()
mySensor.read_algorithm_results()
time.sleep(3)
mySensor.read_algorithm_results()
time.sleep(3)
mySensor.read_algorithm_results()
print(datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S"),end=",")
print("%.3f" % mySensor.CO2,end = ",")
print("%.3f" % mySensor.TVOC)
if __name__ == '__main__':
try:
runExample()
except (KeyboardInterrupt, SystemExit) as exErr:
print("\nEnding Basic Example")
sys.exit(0)
|
"""
Module for wizard dialogs.
"""
import wx
from .input import Dialog
class WizardPage(wx.Panel):
def __init__(self, parent, title):
wx.Panel.__init__(self, parent)
self.sizer = wx.BoxSizer(wx.VERTICAL)
self.SetSizer(self.sizer)
self.ctrl_title = wx.StaticText(self, label=title)
self.ctrl_title.SetFont(wx.Font(18, wx.SWISS, wx.NORMAL, wx.BOLD))
self.canvas = wx.Panel(self)
self.sizer.Add(self.ctrl_title, 0, wx.ALIGN_CENTRE|wx.ALL, 5)
self.sizer.Add(wx.StaticLine(self, -1), 0, wx.EXPAND|wx.ALL, 5)
self.sizer.Add(self.canvas, 1, wx.EXPAND|wx.ALL, 5)
class Wizard(Dialog):
def __init__(self, *args, **kwargs):
self.pages = []
self.cur_page = 0
super(Wizard, self).__init__(*args, **kwargs)
def CreateContentArea(self):
"""Create a sizer with the content area controls."""
# Nesting a panel is necessary since automatic validation uses only
# the validators of *child* windows.
self.ContentArea = wx.Panel(self)
self.content_sizer = wx.BoxSizer(wx.VERTICAL)
self.ContentArea.SetSizer(self.content_sizer)
return self.ContentArea
# add prev/next buttons
def CreateButtonArea(self):
self.button_sizer = wx.BoxSizer(wx.HORIZONTAL)
self.prev_button = wx.Button(self, wx.ID_BACKWARD, label="&Back")
self.next_button = wx.Button(self, wx.ID_FORWARD, label="&Forward")
self.finish_button = wx.Button(self, wx.ID_APPLY, label="&Apply")
self.cancel_button = wx.Button(self, wx.ID_CANCEL, label="&Cancel")
self.prev_button.Bind(wx.EVT_BUTTON, self.OnPrevButton)
self.next_button.Bind(wx.EVT_BUTTON, self.OnNextButton)
self.finish_button.Bind(wx.EVT_BUTTON, self.OnFinishButton)
self.prev_button.Bind(wx.EVT_UPDATE_UI, self.UpdatePrevButton)
self.next_button.Bind(wx.EVT_UPDATE_UI, self.UpdateNextButton)
self.finish_button.Bind(wx.EVT_UPDATE_UI, self.UpdateFinishButton)
self.button_sizer.Add(self.prev_button, 0, wx.ALL|wx.ALIGN_RIGHT, 5)
self.button_sizer.Add(self.next_button, 0, wx.ALL|wx.ALIGN_RIGHT, 5)
self.button_sizer.Add(self.finish_button, 0, wx.ALL|wx.ALIGN_RIGHT, 5)
self.button_sizer.AddSpacer(10)
self.button_sizer.Add(self.cancel_button, 0, wx.ALL|wx.ALIGN_RIGHT, 5)
return self.button_sizer
def Fit(self):
self._UpdateSize()
super(Wizard, self).Fit()
def AddPage(self, title):
panel = WizardPage(self.ContentArea, title)
self.content_sizer.Add(panel, 2, wx.EXPAND)
self.pages.append(panel)
if len(self.pages) > 1:
# hide all panels after the first one
self.content_sizer.Hide(panel)
return panel
def _UpdateSize(self):
min_w, min_h = 0, 0
for page in self.pages:
w, h = page.GetSizer().GetMinSize()
if w > min_w:
min_w = w
if h > min_h:
min_h = h
for page in self.pages:
page.GetSizer().SetMinSize((min_w, min_h))
def OnNextButton(self, event):
if self.CanForward():
self.NextPage()
def OnPrevButton(self, event):
if self.CanBack():
self.PrevPage()
def GoToPage(self, page):
if page == self.cur_page:
return
if page < 0 or page >= len(self.pages):
raise ValueError("Invalid page index: {}".format(page))
old_page, new_page = self.cur_page, page
self.Freeze()
self.content_sizer.Hide(old_page)
self.content_sizer.Show(new_page)
self.cur_page = new_page
self.content_sizer.Layout()
self.Thaw()
def NextPage(self):
self.GoToPage(self.cur_page + 1)
def PrevPage(self):
self.GoToPage(self.cur_page - 1)
def OnFinishButton(self, event):
pass
def CanBack(self):
return self.cur_page > 0
def CanForward(self):
return self.cur_page < len(self.pages)-1
def CanApply(self):
return self.cur_page == len(self.pages)-1
def UpdatePrevButton(self, event):
event.Enable(self.CanBack())
def UpdateNextButton(self, event):
event.Enable(self.CanForward())
def UpdateFinishButton(self, event):
event.Enable(self.CanApply())
|
# -*- coding:utf-8 -*-
from django.db import models
from django.contrib.auth.models import User
from backend.core.models import ModelDateMixin, ModelUserMixin, ProjectManage, VersionManage, ModuleManage, TaskManage
class PageManage(ModelUserMixin, ModelDateMixin):
""" 公共表,页面管理 """
version = models.ForeignKey(VersionManage, null=True, on_delete=models.SET_NULL, verbose_name="版本号")
module = models.ForeignKey(ModuleManage, null=True, on_delete=models.SET_NULL, verbose_name="模块名称")
platform = models.CharField(max_length=20, blank=True, null=True, verbose_name='平台类型')
project = models.ForeignKey(ProjectManage, null=True, on_delete=models.SET_NULL, verbose_name="项目名称")
name = models.CharField(max_length=50, verbose_name='页面名称')
class Meta:
unique_together = (("project", "version","module", "name"),)
ordering = ('name', )
verbose_name = '页面管理'
verbose_name_plural = verbose_name
class ElementManage(models.Model):
""" 公共表,元素管理 """
page = models.ForeignKey(PageManage, null=True, on_delete=models.SET_NULL, verbose_name="页面名称")
element = models.CharField(max_length=100, verbose_name='元素对象')
find_type = models.CharField(max_length=30, default='id', verbose_name='查找方式')
content = models.CharField(max_length=100, verbose_name="元素内容")
comment = models.TextField(blank=True, null=True, verbose_name="元素备注")
class Meta:
unique_together = (("page", "element", "find_type"),)
ordering = ('element', )
verbose_name = '元素对象'
verbose_name_plural = verbose_name
class SvnPath(models.Model):
""" ui表,存储svn地址 """
url = models.CharField(max_length=150, unique=True, verbose_name='url地址')
project = models.ForeignKey(ModuleManage, null=True, on_delete=models.SET_NULL, verbose_name="项目名称")
class UICase(ModelUserMixin, ModelDateMixin):
""" ui表,测试用例表 """
xml_comment = models.CharField(max_length=100, verbose_name='xml描述')
name = models.CharField(max_length=50, verbose_name='用例名称')
testng_xml = models.CharField(max_length=50, verbose_name='导入的xml文件')
jenkins_url = models.TextField(blank=True, null=True, verbose_name="jenkins的url")
status = models.CharField(max_length=100, verbose_name='运行状态(运行中、编译失败等)')
project = models.ForeignKey(ProjectManage, null=True, on_delete=models.SET_NULL, verbose_name="项目名称")
task = models.ForeignKey(TaskManage, null=True, on_delete=models.SET_NULL, verbose_name="任务名称")
class Meta:
unique_together = (("project", "name", "task"),)
ordering = ('name', )
verbose_name = '用例名称'
verbose_name_plural = verbose_name
class UITestResult(ModelDateMixin):
""" ui表,测试结果表 """
job_name = models.CharField(max_length=50, verbose_name='测试方法名称')
case = models.ForeignKey(UICase, null=True, on_delete=models.SET_NULL, verbose_name="用例名称")
module = models.ForeignKey(ModuleManage, null=True, on_delete=models.SET_NULL, verbose_name="模块名称")
job = models.CharField(max_length=50, verbose_name='任务job名')
excutor = models.ForeignKey(User, null=True, on_delete=models.SET_NULL, verbose_name="执行人")
start_time = models.DateTimeField(editable=True, null=True, verbose_name="开始时间")
end_time = models.DateTimeField(editable=True, null=True, verbose_name="开始时间")
result = models.CharField(max_length=20, verbose_name='用例运行结果')
times = models.CharField(max_length=10, verbose_name='任务id(运行次数)')
os = models.CharField(max_length=10, verbose_name='操作系统')
excute_platform = models.CharField(max_length=20, verbose_name='运行平台')
log_id = models.CharField(max_length=50, verbose_name='mongo日志id')
class UICaseStep(models.Model):
""" ui表,测试用例步骤表 """
test_result = models.ForeignKey(UITestResult, null=True, on_delete=models.SET_NULL, verbose_name="用例名称")
name = models.CharField(max_length=50, verbose_name='用例名称')
code = models.CharField(max_length=50, verbose_name='用例code')
comment = models.TextField(blank=True, null=True, verbose_name="用例描述")
step = models.TextField(blank=True, null=True, verbose_name="执行步骤")
check_point = models.TextField(blank=True, null=True, verbose_name="检查点")
class UITestStepResult(models.Model):
""" ui表,测试步骤结果表 """
test_result = models.ForeignKey(UITestResult, null=True, on_delete=models.SET_NULL, verbose_name="测试结果id")
img_path = models.TextField(blank=True, null=True, verbose_name="存储错误图片地址")
video_path = models.TextField(blank=True, null=True, verbose_name="存储视频地址")
log_id = models.CharField(max_length=50, verbose_name='mongo日志id')
result = models.CharField(max_length=20, verbose_name='步骤运行结果')
class DeviceManage(ModelDateMixin):
""" app表,设备管理 """
name = models.CharField(max_length=50, verbose_name='设备名称')
device_id = models.CharField(max_length=30, unique=True, verbose_name='设备id')
brand = models.CharField(max_length=20, verbose_name='设备品牌')
phone_model = models.CharField(max_length=50, verbose_name='设备机型')
phone_sys_ver = models.CharField(max_length=100, verbose_name='设备系统版本')
cpu_info = models.CharField(max_length=30, verbose_name='设备cpu核数')
mem_info = models.CharField(max_length=50, verbose_name='设备内存信息')
image_link = models.CharField(max_length=100, verbose_name='设备图片地址')
resolution_info = models.CharField(max_length=20, verbose_name='设备分辨率')
wireless_ip = models.CharField(unique=True, max_length=30, verbose_name='ip地址')
wireless_port = models.IntegerField(verbose_name='无线端口')
status = models.CharField(max_length=20, default='Ready')
run_info = models.CharField(max_length=50, blank=True, null=True, verbose_name='执行任务信息')
|
from cooperative_transport.utils import saturation
import numpy as np
def proportional_control(k_p, r, y, u_max, avoid_overturning):
"""Implement proportional control law.
Arguments:
k_p (float): Proportional gain
r (float): reference signal
y (float): system output signal
u_max (float): maximum control effort
avoid_overturning (bool): if True avoids rotation greater than pi
"""
error = r - y
if avoid_overturning:
if abs(error) > np.pi:
error += -2 * np.pi * np.sign(error)
u = k_p * error
saturated_u = saturation(u ,u_max)
return saturated_u
|
'''
Created on 13 May 2016
@author: Sam
'''
# Python 3.5
if __name__ == '__main__':
runningbest = (0, 0)
for contestant in range(5):
line = input().split()
score = sum([int(line[a]) for a in range(4)])
if score > runningbest[1]:
runningbest = (contestant, score)
print(runningbest[0]+1, runningbest[1]) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.